aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/ioat/dma_v3.c
diff options
context:
space:
mode:
authorDan Williams <dan.j.williams@intel.com>2009-09-08 20:42:58 -0400
committerDan Williams <dan.j.williams@intel.com>2009-09-08 20:42:58 -0400
commit9de6fc717bdc574cf5faf9d46ce0f9d6265c7952 (patch)
tree55cb18ecfae85033d61f730b5f32d2ac1fb572bc /drivers/dma/ioat/dma_v3.c
parentb094ad3be564e7cc59cca4ff0256550d3a55dd3b (diff)
ioat3: xor self test
This adds a hardware specific self test to be called from ioat_probe. In the ioat3 case we will have tests for all the different raid operations, while ioat1 and ioat2 will continue to just test memcpy. Signed-off-by: Dan Williams <dan.j.williams@intel.com>
Diffstat (limited to 'drivers/dma/ioat/dma_v3.c')
-rw-r--r--drivers/dma/ioat/dma_v3.c275
1 files changed, 275 insertions, 0 deletions
diff --git a/drivers/dma/ioat/dma_v3.c b/drivers/dma/ioat/dma_v3.c
index 957c205f91d0..927c08b08861 100644
--- a/drivers/dma/ioat/dma_v3.c
+++ b/drivers/dma/ioat/dma_v3.c
@@ -513,6 +513,280 @@ ioat3_prep_xor_val(struct dma_chan *chan, dma_addr_t *src,
513 src_cnt - 1, len, flags); 513 src_cnt - 1, len, flags);
514} 514}
515 515
516static void __devinit ioat3_dma_test_callback(void *dma_async_param)
517{
518 struct completion *cmp = dma_async_param;
519
520 complete(cmp);
521}
522
523#define IOAT_NUM_SRC_TEST 6 /* must be <= 8 */
524static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
525{
526 int i, src_idx;
527 struct page *dest;
528 struct page *xor_srcs[IOAT_NUM_SRC_TEST];
529 struct page *xor_val_srcs[IOAT_NUM_SRC_TEST + 1];
530 dma_addr_t dma_srcs[IOAT_NUM_SRC_TEST + 1];
531 dma_addr_t dma_addr, dest_dma;
532 struct dma_async_tx_descriptor *tx;
533 struct dma_chan *dma_chan;
534 dma_cookie_t cookie;
535 u8 cmp_byte = 0;
536 u32 cmp_word;
537 u32 xor_val_result;
538 int err = 0;
539 struct completion cmp;
540 unsigned long tmo;
541 struct device *dev = &device->pdev->dev;
542 struct dma_device *dma = &device->common;
543
544 dev_dbg(dev, "%s\n", __func__);
545
546 if (!dma_has_cap(DMA_XOR, dma->cap_mask))
547 return 0;
548
549 for (src_idx = 0; src_idx < IOAT_NUM_SRC_TEST; src_idx++) {
550 xor_srcs[src_idx] = alloc_page(GFP_KERNEL);
551 if (!xor_srcs[src_idx]) {
552 while (src_idx--)
553 __free_page(xor_srcs[src_idx]);
554 return -ENOMEM;
555 }
556 }
557
558 dest = alloc_page(GFP_KERNEL);
559 if (!dest) {
560 while (src_idx--)
561 __free_page(xor_srcs[src_idx]);
562 return -ENOMEM;
563 }
564
565 /* Fill in src buffers */
566 for (src_idx = 0; src_idx < IOAT_NUM_SRC_TEST; src_idx++) {
567 u8 *ptr = page_address(xor_srcs[src_idx]);
568 for (i = 0; i < PAGE_SIZE; i++)
569 ptr[i] = (1 << src_idx);
570 }
571
572 for (src_idx = 0; src_idx < IOAT_NUM_SRC_TEST; src_idx++)
573 cmp_byte ^= (u8) (1 << src_idx);
574
575 cmp_word = (cmp_byte << 24) | (cmp_byte << 16) |
576 (cmp_byte << 8) | cmp_byte;
577
578 memset(page_address(dest), 0, PAGE_SIZE);
579
580 dma_chan = container_of(dma->channels.next, struct dma_chan,
581 device_node);
582 if (dma->device_alloc_chan_resources(dma_chan) < 1) {
583 err = -ENODEV;
584 goto out;
585 }
586
587 /* test xor */
588 dest_dma = dma_map_page(dev, dest, 0, PAGE_SIZE, DMA_FROM_DEVICE);
589 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
590 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE,
591 DMA_TO_DEVICE);
592 tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
593 IOAT_NUM_SRC_TEST, PAGE_SIZE,
594 DMA_PREP_INTERRUPT);
595
596 if (!tx) {
597 dev_err(dev, "Self-test xor prep failed\n");
598 err = -ENODEV;
599 goto free_resources;
600 }
601
602 async_tx_ack(tx);
603 init_completion(&cmp);
604 tx->callback = ioat3_dma_test_callback;
605 tx->callback_param = &cmp;
606 cookie = tx->tx_submit(tx);
607 if (cookie < 0) {
608 dev_err(dev, "Self-test xor setup failed\n");
609 err = -ENODEV;
610 goto free_resources;
611 }
612 dma->device_issue_pending(dma_chan);
613
614 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
615
616 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
617 dev_err(dev, "Self-test xor timed out\n");
618 err = -ENODEV;
619 goto free_resources;
620 }
621
622 dma_sync_single_for_cpu(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
623 for (i = 0; i < (PAGE_SIZE / sizeof(u32)); i++) {
624 u32 *ptr = page_address(dest);
625 if (ptr[i] != cmp_word) {
626 dev_err(dev, "Self-test xor failed compare\n");
627 err = -ENODEV;
628 goto free_resources;
629 }
630 }
631 dma_sync_single_for_device(dev, dest_dma, PAGE_SIZE, DMA_TO_DEVICE);
632
633 /* skip validate if the capability is not present */
634 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask))
635 goto free_resources;
636
637 /* validate the sources with the destintation page */
638 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
639 xor_val_srcs[i] = xor_srcs[i];
640 xor_val_srcs[i] = dest;
641
642 xor_val_result = 1;
643
644 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
645 dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
646 DMA_TO_DEVICE);
647 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
648 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
649 &xor_val_result, DMA_PREP_INTERRUPT);
650 if (!tx) {
651 dev_err(dev, "Self-test zero prep failed\n");
652 err = -ENODEV;
653 goto free_resources;
654 }
655
656 async_tx_ack(tx);
657 init_completion(&cmp);
658 tx->callback = ioat3_dma_test_callback;
659 tx->callback_param = &cmp;
660 cookie = tx->tx_submit(tx);
661 if (cookie < 0) {
662 dev_err(dev, "Self-test zero setup failed\n");
663 err = -ENODEV;
664 goto free_resources;
665 }
666 dma->device_issue_pending(dma_chan);
667
668 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
669
670 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
671 dev_err(dev, "Self-test validate timed out\n");
672 err = -ENODEV;
673 goto free_resources;
674 }
675
676 if (xor_val_result != 0) {
677 dev_err(dev, "Self-test validate failed compare\n");
678 err = -ENODEV;
679 goto free_resources;
680 }
681
682 /* skip memset if the capability is not present */
683 if (!dma_has_cap(DMA_MEMSET, dma_chan->device->cap_mask))
684 goto free_resources;
685
686 /* test memset */
687 dma_addr = dma_map_page(dev, dest, 0,
688 PAGE_SIZE, DMA_FROM_DEVICE);
689 tx = dma->device_prep_dma_memset(dma_chan, dma_addr, 0, PAGE_SIZE,
690 DMA_PREP_INTERRUPT);
691 if (!tx) {
692 dev_err(dev, "Self-test memset prep failed\n");
693 err = -ENODEV;
694 goto free_resources;
695 }
696
697 async_tx_ack(tx);
698 init_completion(&cmp);
699 tx->callback = ioat3_dma_test_callback;
700 tx->callback_param = &cmp;
701 cookie = tx->tx_submit(tx);
702 if (cookie < 0) {
703 dev_err(dev, "Self-test memset setup failed\n");
704 err = -ENODEV;
705 goto free_resources;
706 }
707 dma->device_issue_pending(dma_chan);
708
709 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
710
711 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
712 dev_err(dev, "Self-test memset timed out\n");
713 err = -ENODEV;
714 goto free_resources;
715 }
716
717 for (i = 0; i < PAGE_SIZE/sizeof(u32); i++) {
718 u32 *ptr = page_address(dest);
719 if (ptr[i]) {
720 dev_err(dev, "Self-test memset failed compare\n");
721 err = -ENODEV;
722 goto free_resources;
723 }
724 }
725
726 /* test for non-zero parity sum */
727 xor_val_result = 0;
728 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
729 dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
730 DMA_TO_DEVICE);
731 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
732 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
733 &xor_val_result, DMA_PREP_INTERRUPT);
734 if (!tx) {
735 dev_err(dev, "Self-test 2nd zero prep failed\n");
736 err = -ENODEV;
737 goto free_resources;
738 }
739
740 async_tx_ack(tx);
741 init_completion(&cmp);
742 tx->callback = ioat3_dma_test_callback;
743 tx->callback_param = &cmp;
744 cookie = tx->tx_submit(tx);
745 if (cookie < 0) {
746 dev_err(dev, "Self-test 2nd zero setup failed\n");
747 err = -ENODEV;
748 goto free_resources;
749 }
750 dma->device_issue_pending(dma_chan);
751
752 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
753
754 if (dma->device_is_tx_complete(dma_chan, cookie, NULL, NULL) != DMA_SUCCESS) {
755 dev_err(dev, "Self-test 2nd validate timed out\n");
756 err = -ENODEV;
757 goto free_resources;
758 }
759
760 if (xor_val_result != SUM_CHECK_P_RESULT) {
761 dev_err(dev, "Self-test validate failed compare\n");
762 err = -ENODEV;
763 goto free_resources;
764 }
765
766free_resources:
767 dma->device_free_chan_resources(dma_chan);
768out:
769 src_idx = IOAT_NUM_SRC_TEST;
770 while (src_idx--)
771 __free_page(xor_srcs[src_idx]);
772 __free_page(dest);
773 return err;
774}
775
776static int __devinit ioat3_dma_self_test(struct ioatdma_device *device)
777{
778 int rc = ioat_dma_self_test(device);
779
780 if (rc)
781 return rc;
782
783 rc = ioat_xor_val_self_test(device);
784 if (rc)
785 return rc;
786
787 return 0;
788}
789
516int __devinit ioat3_dma_probe(struct ioatdma_device *device, int dca) 790int __devinit ioat3_dma_probe(struct ioatdma_device *device, int dca)
517{ 791{
518 struct pci_dev *pdev = device->pdev; 792 struct pci_dev *pdev = device->pdev;
@@ -526,6 +800,7 @@ int __devinit ioat3_dma_probe(struct ioatdma_device *device, int dca)
526 device->enumerate_channels = ioat2_enumerate_channels; 800 device->enumerate_channels = ioat2_enumerate_channels;
527 device->cleanup_tasklet = ioat3_cleanup_tasklet; 801 device->cleanup_tasklet = ioat3_cleanup_tasklet;
528 device->timer_fn = ioat3_timer_event; 802 device->timer_fn = ioat3_timer_event;
803 device->self_test = ioat3_dma_self_test;
529 dma = &device->common; 804 dma = &device->common;
530 dma->device_prep_dma_memcpy = ioat2_dma_prep_memcpy_lock; 805 dma->device_prep_dma_memcpy = ioat2_dma_prep_memcpy_lock;
531 dma->device_issue_pending = ioat2_issue_pending; 806 dma->device_issue_pending = ioat2_issue_pending;