aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/ioat
diff options
context:
space:
mode:
authorBartlomiej Zolnierkiewicz <b.zolnierkie@samsung.com>2012-11-05 05:00:19 -0500
committerVinod Koul <vinod.koul@intel.com>2013-01-08 01:05:08 -0500
commit7369f56e3e7193583576ec705d95647b04838b05 (patch)
treed6fade0f7097f11c38f9074a00aebf0f1f300419 /drivers/dma/ioat
parent5e034f7b659be9d94e64aaaa985ab530dd847fdb (diff)
ioat3: add missing DMA unmap to ioat_xor_val_self_test()
Make ioat_xor_val_self_test() do DMA unmapping itself and fix handling of failure cases. Cc: Dan Williams <djbw@fb.com> Cc: Tomasz Figa <t.figa@samsung.com> Signed-off-by: Bartlomiej Zolnierkiewicz <b.zolnierkie@samsung.com> Signed-off-by: Kyungmin Park <kyungmin.park@samsung.com> Signed-off-by: Dan Williams <djbw@fb.com>
Diffstat (limited to 'drivers/dma/ioat')
-rw-r--r--drivers/dma/ioat/dma_v3.c76
1 files changed, 59 insertions, 17 deletions
diff --git a/drivers/dma/ioat/dma_v3.c b/drivers/dma/ioat/dma_v3.c
index f7f1dc62c15c..6456f7d38e13 100644
--- a/drivers/dma/ioat/dma_v3.c
+++ b/drivers/dma/ioat/dma_v3.c
@@ -863,6 +863,7 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
863 unsigned long tmo; 863 unsigned long tmo;
864 struct device *dev = &device->pdev->dev; 864 struct device *dev = &device->pdev->dev;
865 struct dma_device *dma = &device->common; 865 struct dma_device *dma = &device->common;
866 u8 op = 0;
866 867
867 dev_dbg(dev, "%s\n", __func__); 868 dev_dbg(dev, "%s\n", __func__);
868 869
@@ -908,18 +909,22 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
908 } 909 }
909 910
910 /* test xor */ 911 /* test xor */
912 op = IOAT_OP_XOR;
913
911 dest_dma = dma_map_page(dev, dest, 0, PAGE_SIZE, DMA_FROM_DEVICE); 914 dest_dma = dma_map_page(dev, dest, 0, PAGE_SIZE, DMA_FROM_DEVICE);
912 for (i = 0; i < IOAT_NUM_SRC_TEST; i++) 915 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
913 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE, 916 dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE,
914 DMA_TO_DEVICE); 917 DMA_TO_DEVICE);
915 tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs, 918 tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
916 IOAT_NUM_SRC_TEST, PAGE_SIZE, 919 IOAT_NUM_SRC_TEST, PAGE_SIZE,
917 DMA_PREP_INTERRUPT); 920 DMA_PREP_INTERRUPT |
921 DMA_COMPL_SKIP_SRC_UNMAP |
922 DMA_COMPL_SKIP_DEST_UNMAP);
918 923
919 if (!tx) { 924 if (!tx) {
920 dev_err(dev, "Self-test xor prep failed\n"); 925 dev_err(dev, "Self-test xor prep failed\n");
921 err = -ENODEV; 926 err = -ENODEV;
922 goto free_resources; 927 goto dma_unmap;
923 } 928 }
924 929
925 async_tx_ack(tx); 930 async_tx_ack(tx);
@@ -930,7 +935,7 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
930 if (cookie < 0) { 935 if (cookie < 0) {
931 dev_err(dev, "Self-test xor setup failed\n"); 936 dev_err(dev, "Self-test xor setup failed\n");
932 err = -ENODEV; 937 err = -ENODEV;
933 goto free_resources; 938 goto dma_unmap;
934 } 939 }
935 dma->device_issue_pending(dma_chan); 940 dma->device_issue_pending(dma_chan);
936 941
@@ -939,9 +944,13 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
939 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) { 944 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) {
940 dev_err(dev, "Self-test xor timed out\n"); 945 dev_err(dev, "Self-test xor timed out\n");
941 err = -ENODEV; 946 err = -ENODEV;
942 goto free_resources; 947 goto dma_unmap;
943 } 948 }
944 949
950 dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
951 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
952 dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE, DMA_TO_DEVICE);
953
945 dma_sync_single_for_cpu(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE); 954 dma_sync_single_for_cpu(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
946 for (i = 0; i < (PAGE_SIZE / sizeof(u32)); i++) { 955 for (i = 0; i < (PAGE_SIZE / sizeof(u32)); i++) {
947 u32 *ptr = page_address(dest); 956 u32 *ptr = page_address(dest);
@@ -957,6 +966,8 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
957 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask)) 966 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask))
958 goto free_resources; 967 goto free_resources;
959 968
969 op = IOAT_OP_XOR_VAL;
970
960 /* validate the sources with the destintation page */ 971 /* validate the sources with the destintation page */
961 for (i = 0; i < IOAT_NUM_SRC_TEST; i++) 972 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
962 xor_val_srcs[i] = xor_srcs[i]; 973 xor_val_srcs[i] = xor_srcs[i];
@@ -969,11 +980,13 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
969 DMA_TO_DEVICE); 980 DMA_TO_DEVICE);
970 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs, 981 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
971 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE, 982 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
972 &xor_val_result, DMA_PREP_INTERRUPT); 983 &xor_val_result, DMA_PREP_INTERRUPT |
984 DMA_COMPL_SKIP_SRC_UNMAP |
985 DMA_COMPL_SKIP_DEST_UNMAP);
973 if (!tx) { 986 if (!tx) {
974 dev_err(dev, "Self-test zero prep failed\n"); 987 dev_err(dev, "Self-test zero prep failed\n");
975 err = -ENODEV; 988 err = -ENODEV;
976 goto free_resources; 989 goto dma_unmap;
977 } 990 }
978 991
979 async_tx_ack(tx); 992 async_tx_ack(tx);
@@ -984,7 +997,7 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
984 if (cookie < 0) { 997 if (cookie < 0) {
985 dev_err(dev, "Self-test zero setup failed\n"); 998 dev_err(dev, "Self-test zero setup failed\n");
986 err = -ENODEV; 999 err = -ENODEV;
987 goto free_resources; 1000 goto dma_unmap;
988 } 1001 }
989 dma->device_issue_pending(dma_chan); 1002 dma->device_issue_pending(dma_chan);
990 1003
@@ -993,9 +1006,12 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
993 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) { 1006 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) {
994 dev_err(dev, "Self-test validate timed out\n"); 1007 dev_err(dev, "Self-test validate timed out\n");
995 err = -ENODEV; 1008 err = -ENODEV;
996 goto free_resources; 1009 goto dma_unmap;
997 } 1010 }
998 1011
1012 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
1013 dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE, DMA_TO_DEVICE);
1014
999 if (xor_val_result != 0) { 1015 if (xor_val_result != 0) {
1000 dev_err(dev, "Self-test validate failed compare\n"); 1016 dev_err(dev, "Self-test validate failed compare\n");
1001 err = -ENODEV; 1017 err = -ENODEV;
@@ -1007,14 +1023,18 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
1007 goto free_resources; 1023 goto free_resources;
1008 1024
1009 /* test memset */ 1025 /* test memset */
1026 op = IOAT_OP_FILL;
1027
1010 dma_addr = dma_map_page(dev, dest, 0, 1028 dma_addr = dma_map_page(dev, dest, 0,
1011 PAGE_SIZE, DMA_FROM_DEVICE); 1029 PAGE_SIZE, DMA_FROM_DEVICE);
1012 tx = dma->device_prep_dma_memset(dma_chan, dma_addr, 0, PAGE_SIZE, 1030 tx = dma->device_prep_dma_memset(dma_chan, dma_addr, 0, PAGE_SIZE,
1013 DMA_PREP_INTERRUPT); 1031 DMA_PREP_INTERRUPT |
1032 DMA_COMPL_SKIP_SRC_UNMAP |
1033 DMA_COMPL_SKIP_DEST_UNMAP);
1014 if (!tx) { 1034 if (!tx) {
1015 dev_err(dev, "Self-test memset prep failed\n"); 1035 dev_err(dev, "Self-test memset prep failed\n");
1016 err = -ENODEV; 1036 err = -ENODEV;
1017 goto free_resources; 1037 goto dma_unmap;
1018 } 1038 }
1019 1039
1020 async_tx_ack(tx); 1040 async_tx_ack(tx);
@@ -1025,7 +1045,7 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
1025 if (cookie < 0) { 1045 if (cookie < 0) {
1026 dev_err(dev, "Self-test memset setup failed\n"); 1046 dev_err(dev, "Self-test memset setup failed\n");
1027 err = -ENODEV; 1047 err = -ENODEV;
1028 goto free_resources; 1048 goto dma_unmap;
1029 } 1049 }
1030 dma->device_issue_pending(dma_chan); 1050 dma->device_issue_pending(dma_chan);
1031 1051
@@ -1034,9 +1054,11 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
1034 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) { 1054 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) {
1035 dev_err(dev, "Self-test memset timed out\n"); 1055 dev_err(dev, "Self-test memset timed out\n");
1036 err = -ENODEV; 1056 err = -ENODEV;
1037 goto free_resources; 1057 goto dma_unmap;
1038 } 1058 }
1039 1059
1060 dma_unmap_page(dev, dma_addr, PAGE_SIZE, DMA_FROM_DEVICE);
1061
1040 for (i = 0; i < PAGE_SIZE/sizeof(u32); i++) { 1062 for (i = 0; i < PAGE_SIZE/sizeof(u32); i++) {
1041 u32 *ptr = page_address(dest); 1063 u32 *ptr = page_address(dest);
1042 if (ptr[i]) { 1064 if (ptr[i]) {
@@ -1047,17 +1069,21 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
1047 } 1069 }
1048 1070
1049 /* test for non-zero parity sum */ 1071 /* test for non-zero parity sum */
1072 op = IOAT_OP_XOR_VAL;
1073
1050 xor_val_result = 0; 1074 xor_val_result = 0;
1051 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++) 1075 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
1052 dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE, 1076 dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
1053 DMA_TO_DEVICE); 1077 DMA_TO_DEVICE);
1054 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs, 1078 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
1055 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE, 1079 IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
1056 &xor_val_result, DMA_PREP_INTERRUPT); 1080 &xor_val_result, DMA_PREP_INTERRUPT |
1081 DMA_COMPL_SKIP_SRC_UNMAP |
1082 DMA_COMPL_SKIP_DEST_UNMAP);
1057 if (!tx) { 1083 if (!tx) {
1058 dev_err(dev, "Self-test 2nd zero prep failed\n"); 1084 dev_err(dev, "Self-test 2nd zero prep failed\n");
1059 err = -ENODEV; 1085 err = -ENODEV;
1060 goto free_resources; 1086 goto dma_unmap;
1061 } 1087 }
1062 1088
1063 async_tx_ack(tx); 1089 async_tx_ack(tx);
@@ -1068,7 +1094,7 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
1068 if (cookie < 0) { 1094 if (cookie < 0) {
1069 dev_err(dev, "Self-test 2nd zero setup failed\n"); 1095 dev_err(dev, "Self-test 2nd zero setup failed\n");
1070 err = -ENODEV; 1096 err = -ENODEV;
1071 goto free_resources; 1097 goto dma_unmap;
1072 } 1098 }
1073 dma->device_issue_pending(dma_chan); 1099 dma->device_issue_pending(dma_chan);
1074 1100
@@ -1077,15 +1103,31 @@ static int __devinit ioat_xor_val_self_test(struct ioatdma_device *device)
1077 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) { 1103 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_SUCCESS) {
1078 dev_err(dev, "Self-test 2nd validate timed out\n"); 1104 dev_err(dev, "Self-test 2nd validate timed out\n");
1079 err = -ENODEV; 1105 err = -ENODEV;
1080 goto free_resources; 1106 goto dma_unmap;
1081 } 1107 }
1082 1108
1083 if (xor_val_result != SUM_CHECK_P_RESULT) { 1109 if (xor_val_result != SUM_CHECK_P_RESULT) {
1084 dev_err(dev, "Self-test validate failed compare\n"); 1110 dev_err(dev, "Self-test validate failed compare\n");
1085 err = -ENODEV; 1111 err = -ENODEV;
1086 goto free_resources; 1112 goto dma_unmap;
1087 } 1113 }
1088 1114
1115 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
1116 dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE, DMA_TO_DEVICE);
1117
1118 goto free_resources;
1119dma_unmap:
1120 if (op == IOAT_OP_XOR) {
1121 dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
1122 for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
1123 dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
1124 DMA_TO_DEVICE);
1125 } else if (op == IOAT_OP_XOR_VAL) {
1126 for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
1127 dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
1128 DMA_TO_DEVICE);
1129 } else if (op == IOAT_OP_FILL)
1130 dma_unmap_page(dev, dma_addr, PAGE_SIZE, DMA_FROM_DEVICE);
1089free_resources: 1131free_resources:
1090 dma->device_free_chan_resources(dma_chan); 1132 dma->device_free_chan_resources(dma_chan);
1091out: 1133out: