aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
Diffstat (limited to 'drivers')
-rw-r--r--drivers/dma/ioat/dma.c5
-rw-r--r--drivers/dma/ioat/dma.h4
-rw-r--r--drivers/dma/ioat/dma_v2.c1
-rw-r--r--drivers/dma/ioat/dma_v3.c275
4 files changed, 282 insertions, 3 deletions
diff --git a/drivers/dma/ioat/dma.c b/drivers/dma/ioat/dma.c
index cb08f8108496..32a757be75c1 100644
--- a/drivers/dma/ioat/dma.c
+++ b/drivers/dma/ioat/dma.c
@@ -799,7 +799,7 @@ static void __devinit ioat_dma_test_callback(void *dma_async_param)
799 * ioat_dma_self_test - Perform a IOAT transaction to verify the HW works. 799 * ioat_dma_self_test - Perform a IOAT transaction to verify the HW works.
800 * @device: device to be tested 800 * @device: device to be tested
801 */ 801 */
802static int __devinit ioat_dma_self_test(struct ioatdma_device *device) 802int __devinit ioat_dma_self_test(struct ioatdma_device *device)
803{ 803{
804 int i; 804 int i;
805 u8 *src; 805 u8 *src;
@@ -1039,7 +1039,7 @@ int __devinit ioat_probe(struct ioatdma_device *device)
1039 if (err) 1039 if (err)
1040 goto err_setup_interrupts; 1040 goto err_setup_interrupts;
1041 1041
1042 err = ioat_dma_self_test(device); 1042 err = device->self_test(device);
1043 if (err) 1043 if (err)
1044 goto err_self_test; 1044 goto err_self_test;
1045 1045
@@ -1197,6 +1197,7 @@ int __devinit ioat1_dma_probe(struct ioatdma_device *device, int dca)
1197 1197
1198 device->intr_quirk = ioat1_intr_quirk; 1198 device->intr_quirk = ioat1_intr_quirk;
1199 device->enumerate_channels = ioat1_enumerate_channels; 1199 device->enumerate_channels = ioat1_enumerate_channels;
1200 device->self_test = ioat_dma_self_test;
1200 dma = &device->common; 1201 dma = &device->common;
1201 dma->device_prep_dma_memcpy = ioat1_dma_prep_memcpy; 1202 dma->device_prep_dma_memcpy = ioat1_dma_prep_memcpy;
1202 dma->device_issue_pending = ioat1_dma_memcpy_issue_pending; 1203 dma->device_issue_pending = ioat1_dma_memcpy_issue_pending;
diff --git a/drivers/dma/ioat/dma.h b/drivers/dma/ioat/dma.h
index c2939b289185..0e37e426c729 100644
--- a/drivers/dma/ioat/dma.h
+++ b/drivers/dma/ioat/dma.h
@@ -62,10 +62,10 @@
62 * @enumerate_channels: hw version specific channel enumeration 62 * @enumerate_channels: hw version specific channel enumeration
63 * @cleanup_tasklet: select between the v2 and v3 cleanup routines 63 * @cleanup_tasklet: select between the v2 and v3 cleanup routines
64 * @timer_fn: select between the v2 and v3 timer watchdog routines 64 * @timer_fn: select between the v2 and v3 timer watchdog routines
65 * @self_test: hardware version specific self test for each supported op type
65 * 66 *
66 * Note: the v3 cleanup routine supports raid operations 67 * Note: the v3 cleanup routine supports raid operations
67 */ 68 */
68
69struct ioatdma_device { 69struct ioatdma_device {
70 struct pci_dev *pdev; 70 struct pci_dev *pdev;
71 void __iomem *reg_base; 71 void __iomem *reg_base;
@@ -80,6 +80,7 @@ struct ioatdma_device {
80 int (*enumerate_channels)(struct ioatdma_device *device); 80 int (*enumerate_channels)(struct ioatdma_device *device);
81 void (*cleanup_tasklet)(unsigned long data); 81 void (*cleanup_tasklet)(unsigned long data);
82 void (*timer_fn)(unsigned long data); 82 void (*timer_fn)(unsigned long data);
83 int (*self_test)(struct ioatdma_device *device);
83}; 84};
84 85
85struct ioat_chan_common { 86struct ioat_chan_common {
@@ -313,6 +314,7 @@ static inline void ioat_unmap(struct pci_dev *pdev, dma_addr_t addr, size_t len,
313int __devinit ioat_probe(struct ioatdma_device *device); 314int __devinit ioat_probe(struct ioatdma_device *device);
314int __devinit ioat_register(struct ioatdma_device *device); 315int __devinit ioat_register(struct ioatdma_device *device);
315int __devinit ioat1_dma_probe(struct ioatdma_device *dev, int dca); 316int __devinit ioat1_dma_probe(struct ioatdma_device *dev, int dca);
317int __devinit ioat_dma_self_test(struct ioatdma_device *device);
316void __devexit ioat_dma_remove(struct ioatdma_device *device); 318void __devexit ioat_dma_remove(struct ioatdma_device *device);
317struct dca_provider * __devinit ioat_dca_init(struct pci_dev *pdev, 319struct dca_provider * __devinit ioat_dca_init(struct pci_dev *pdev,
318 void __iomem *iobase); 320 void __iomem *iobase);
diff --git a/drivers/dma/ioat/dma_v2.c b/drivers/dma/ioat/dma_v2.c
index ee295d48ba2c..12c64e1a7e31 100644
--- a/drivers/dma/ioat/dma_v2.c
+++ b/drivers/dma/ioat/dma_v2.c
@@ -836,6 +836,7 @@ int __devinit ioat2_dma_probe(struct ioatdma_device *device, int dca)
836 device->enumerate_channels = ioat2_enumerate_channels; 836 device->enumerate_channels = ioat2_enumerate_channels;
837 device->cleanup_tasklet = ioat2_cleanup_tasklet; 837 device->cleanup_tasklet = ioat2_cleanup_tasklet;
838 device->timer_fn = ioat2_timer_event; 838 device->timer_fn = ioat2_timer_event;
839 device->self_test = ioat_dma_self_test;
839 dma = &device->common; 840 dma = &device->common;
840 dma->device_prep_dma_memcpy = ioat2_dma_prep_memcpy_lock; 841 dma->device_prep_dma_memcpy = ioat2_dma_prep_memcpy_lock;
841 dma->device_issue_pending = ioat2_issue_pending; 842 dma->device_issue_pending = ioat2_issue_pending;
diff --git a/drivers/dma/ioat/dma_v3.c b/drivers/dma/ioat/dma_v3.c
index 957c205f91d0..927c08b08861 100644
--- a/drivers/dma/ioat/dma_v3.c
+++ b/drivers/dma/ioat/dma_v3.c
@@ -513,6 +513,280 @@ ioat3_prep_xor_val(struct dma_chan *chan, dma_addr_t *src,
513 src_cnt - 1, len, flags); 513 src_cnt - 1, len, flags);
514} 514}
515 515
516static void __devinit ioat3_dma_test_callback(void *dma_async_param)
517{
518 struct completion *cmp = dma_async_param;
519
520 complete(cmp);
521}
522
523#define IOAT_NUM_SRC_TEST 6 /* must be <= 8 */
524static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
525{
526 int i, src_idx;
527 struct page *dest;
528 struct page *xor_srcs[IOAT_NUM_SRC_TEST];
529 struct page *xor_val_srcs[IOAT_NUM_SRC_TEST + 1];
530 dma_addr_t dma_srcs[IOAT_NUM_SRC_TEST + 1];
531 dma_addr_t dma_addr, dest_dma;
532 struct dma_async_tx_descriptor *tx;
533 struct dma_chan *dma_chan;
534 dma_cookie_t cookie;
535 u8 cmp_byte = 0;
536 u32 cmp_word;
537 u32 xor_val_result;
538 int err = 0;
539 struct completion cmp;
540 unsigned long tmo;
541 struct device *dev = &device->pdev->dev;
542 struct dma_device *dma = &device->common;
543
544 dev_dbg(dev, "%s\n", __func__);
545
546 if (!dma_has_cap(DMA_XOR, dma->cap_mask))
547 return 0;
548
549 for (src_idx = 0; src_idx < IOAT_NUM_SRC_TEST; src_idx++) {
550 xor_srcs[src_idx] = alloc_page(GFP_KERNEL);
551 if (!xor_srcs[src_idx]) {
552 while (src_idx--)
553 __free_page(xor_srcs[src_idx]);
554 return -ENOMEM;
555 }
556 }
557
558 dest = alloc_page(GFP_KERNEL);
559 if (!dest) {
560 while (src_idx--)
561 __free_page(xor_srcs[src_idx]);
562 return -ENOMEM;
563 }
564
565 /* Fill in src buffers */
566 for (src_idx = 0; src_idx < IOAT_NUM_SRC_TEST; src_idx++) {
567 u8 *ptr = page_address(xor_srcs[src_idx]);
568 for (i = 0; i < PAGE_SIZE; i++)
569 ptr[i] = (1 << src_idx);
570 }
571
572 for (src_idx = 0; src_idx < IOAT_NUM_SRC_TEST; src_idx++)
573 cmp_byte ^= (u8) (1 << src_idx);
574
575 cmp_word = (cmp_byte << 24) | (cmp_byte << 16) |
576 (cmp_byte << 8) | cmp_byte;
577
578 memset(page_address(dest), 0, PAGE_SIZE);
579
580 dma_chan = container_of(dma->channels.next, struct dma_chan,
581 device_node);
582 if (dma->device_alloc_chan_resources(dma_chan) < 1) {
583 err = -ENODEV;
584 goto out;
585 }
586
587 /* test xor */
588 dest_dma = dma_map_page(dev, dest, 0, PAGE_SIZE, DMA_FROM_DEVICE);
589 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
590 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE,
591 DMA_TO_DEVICE);
592 tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
593 IOAT_NUM_SRC_TEST, PAGE_SIZE,
594 DMA_PREP_INTERRUPT);
595
596 if (!tx) {
597 dev_err(dev, "Self-test xor prep failed\n");
598 err = -ENODEV;
599 goto free_resources;
600 }
601
602 async_tx_ack(tx);
603 init_completion(&cmp);
604 tx->callback = ioat3_dma_test_callback;
605 tx->callback_param = &cmp;
606 cookie = tx->tx_submit(tx);
607 if (cookie < 0) {
608 dev_err(dev, "Self-test xor setup failed\n");
609 err = -ENODEV;
610 goto free_resources;
611 }
612 dma->device_issue_pending(dma_chan);
613
614 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
615
616 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
617 dev_err(dev, "Self-test xor timed out\n");
618 err = -ENODEV;
619 goto free_resources;
620 }
621
622 dma_sync_single_for_cpu(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
623 for (i = 0; i < (PAGE_SIZE / sizeof(u32)); i++) {
624 u32 *ptr = page_address(dest);
625 if (ptr[i] != cmp_word) {
626 dev_err(dev, "Self-test xor failed compare\n");
627 err = -ENODEV;
628 goto free_resources;
629 }
630 }
631 dma_sync_single_for_device(dev, dest_dma, PAGE_SIZE, DMA_TO_DEVICE);
632
633 /* skip validate if the capability is not present */
634 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask))
635 goto free_resources;
636
637 /* validate the sources with the destintation page */
638 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
639 xor_val_srcs[i] = xor_srcs[i];
640 xor_val_srcs[i] = dest;
641
642 xor_val_result = 1;
643
644 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
645 dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
646 DMA_TO_DEVICE);
647 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
648 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
649 &xor_val_result, DMA_PREP_INTERRUPT);
650 if (!tx) {
651 dev_err(dev, "Self-test zero prep failed\n");
652 err = -ENODEV;
653 goto free_resources;
654 }
655
656 async_tx_ack(tx);
657 init_completion(&cmp);
658 tx->callback = ioat3_dma_test_callback;
659 tx->callback_param = &cmp;
660 cookie = tx->tx_submit(tx);
661 if (cookie < 0) {
662 dev_err(dev, "Self-test zero setup failed\n");
663 err = -ENODEV;
664 goto free_resources;
665 }
666 dma->device_issue_pending(dma_chan);
667
668 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
669
670 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
671 dev_err(dev, "Self-test validate timed out\n");
672 err = -ENODEV;
673 goto free_resources;
674 }
675
676 if (xor_val_result != 0) {
677 dev_err(dev, "Self-test validate failed compare\n");
678 err = -ENODEV;
679 goto free_resources;
680 }
681
682 /* skip memset if the capability is not present */
683 if (!dma_has_cap(DMA_MEMSET, dma_chan->device->cap_mask))
684 goto free_resources;
685
686 /* test memset */
687 dma_addr = dma_map_page(dev, dest, 0,
688 PAGE_SIZE, DMA_FROM_DEVICE);
689 tx = dma->device_prep_dma_memset(dma_chan, dma_addr, 0, PAGE_SIZE,
690 DMA_PREP_INTERRUPT);
691 if (!tx) {
692 dev_err(dev, "Self-test memset prep failed\n");
693 err = -ENODEV;
694 goto free_resources;
695 }
696
697 async_tx_ack(tx);
698 init_completion(&cmp);
699 tx->callback = ioat3_dma_test_callback;
700 tx->callback_param = &cmp;
701 cookie = tx->tx_submit(tx);
702 if (cookie < 0) {
703 dev_err(dev, "Self-test memset setup failed\n");
704 err = -ENODEV;
705 goto free_resources;
706 }
707 dma->device_issue_pending(dma_chan);
708
709 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
710
711 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
712 dev_err(dev, "Self-test memset timed out\n");
713 err = -ENODEV;
714 goto free_resources;
715 }
716
717 for (i = 0; i < PAGE_SIZE/sizeof(u32); i++) {
718 u32 *ptr = page_address(dest);
719 if (ptr[i]) {
720 dev_err(dev, "Self-test memset failed compare\n");
721 err = -ENODEV;
722 goto free_resources;
723 }
724 }
725
726 /* test for non-zero parity sum */
727 xor_val_result = 0;
728 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
729 dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
730 DMA_TO_DEVICE);
731 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
732 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
733 &xor_val_result, DMA_PREP_INTERRUPT);
734 if (!tx) {
735 dev_err(dev, "Self-test 2nd zero prep failed\n");
736 err = -ENODEV;
737 goto free_resources;
738 }
739
740 async_tx_ack(tx);
741 init_completion(&cmp);
742 tx->callback = ioat3_dma_test_callback;
743 tx->callback_param = &cmp;
744 cookie = tx->tx_submit(tx);
745 if (cookie < 0) {
746 dev_err(dev, "Self-test 2nd zero setup failed\n");
747 err = -ENODEV;
748 goto free_resources;
749 }
750 dma->device_issue_pending(dma_chan);
751
752 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
753
754 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
755 dev_err(dev, "Self-test 2nd validate timed out\n");
756 err = -ENODEV;
757 goto free_resources;
758 }
759
760 if (xor_val_result != SUM_CHECK_P_RESULT) {
761 dev_err(dev, "Self-test validate failed compare\n");
762 err = -ENODEV;
763 goto free_resources;
764 }
765
766free_resources:
767 dma->device_free_chan_resources(dma_chan);
768out:
769 src_idx = IOAT_NUM_SRC_TEST;
770 while (src_idx--)
771 __free_page(xor_srcs[src_idx]);
772 __free_page(dest);
773 return err;
774}
775
776static int __devinit ioat3_dma_self_test(struct ioatdma_device *device)
777{
778 int rc = ioat_dma_self_test(device);
779
780 if (rc)
781 return rc;
782
783 rc = ioat_xor_val_self_test(device);
784 if (rc)
785 return rc;
786
787 return 0;
788}
789
516int __devinit ioat3_dma_probe(struct ioatdma_device *device, int dca) 790int __devinit ioat3_dma_probe(struct ioatdma_device *device, int dca)
517{ 791{
518 struct pci_dev *pdev = device->pdev; 792 struct pci_dev *pdev = device->pdev;
@@ -526,6 +800,7 @@ int __devinit ioat3_dma_probe(struct ioatdma_device *device, int dca)
526 device->enumerate_channels = ioat2_enumerate_channels; 800 device->enumerate_channels = ioat2_enumerate_channels;
527 device->cleanup_tasklet = ioat3_cleanup_tasklet; 801 device->cleanup_tasklet = ioat3_cleanup_tasklet;
528 device->timer_fn = ioat3_timer_event; 802 device->timer_fn = ioat3_timer_event;
803 device->self_test = ioat3_dma_self_test;
529 dma = &device->common; 804 dma = &device->common;
530 dma->device_prep_dma_memcpy = ioat2_dma_prep_memcpy_lock; 805 dma->device_prep_dma_memcpy = ioat2_dma_prep_memcpy_lock;
531 dma->device_issue_pending = ioat2_issue_pending; 806 dma->device_issue_pending = ioat2_issue_pending;