diff options
author | Vinod Koul <vinod.koul@intel.com> | 2017-09-06 12:25:10 -0400 |
---|---|---|
committer | Vinod Koul <vinod.koul@intel.com> | 2017-09-06 12:25:10 -0400 |
commit | 41bd0314fa3a458bee7ad768d079e681316332e7 (patch) | |
tree | 09a6bb5a1d83b99e004fc6d798882e0bf014d261 /drivers/dma/dmaengine.c | |
parent | 346ea25e811b1e260fde7a446aa18833ca7e7b79 (diff) | |
parent | 3eeb5156362bd756859e8c84ceb2c22e1d4ef652 (diff) |
Merge branch 'topic/dmatest' into for-linus
Diffstat (limited to 'drivers/dma/dmaengine.c')
-rw-r--r-- | drivers/dma/dmaengine.c | 103 |
1 files changed, 79 insertions, 24 deletions
diff --git a/drivers/dma/dmaengine.c b/drivers/dma/dmaengine.c index d9118ec23025..b451354735d3 100644 --- a/drivers/dma/dmaengine.c +++ b/drivers/dma/dmaengine.c | |||
@@ -923,30 +923,85 @@ int dma_async_device_register(struct dma_device *device) | |||
923 | return -ENODEV; | 923 | return -ENODEV; |
924 | 924 | ||
925 | /* validate device routines */ | 925 | /* validate device routines */ |
926 | BUG_ON(dma_has_cap(DMA_MEMCPY, device->cap_mask) && | 926 | if (!device->dev) { |
927 | !device->device_prep_dma_memcpy); | 927 | pr_err("DMAdevice must have dev\n"); |
928 | BUG_ON(dma_has_cap(DMA_XOR, device->cap_mask) && | 928 | return -EIO; |
929 | !device->device_prep_dma_xor); | 929 | } |
930 | BUG_ON(dma_has_cap(DMA_XOR_VAL, device->cap_mask) && | 930 | |
931 | !device->device_prep_dma_xor_val); | 931 | if (dma_has_cap(DMA_MEMCPY, device->cap_mask) && !device->device_prep_dma_memcpy) { |
932 | BUG_ON(dma_has_cap(DMA_PQ, device->cap_mask) && | 932 | dev_err(device->dev, |
933 | !device->device_prep_dma_pq); | 933 | "Device claims capability %s, but op is not defined\n", |
934 | BUG_ON(dma_has_cap(DMA_PQ_VAL, device->cap_mask) && | 934 | "DMA_MEMCPY"); |
935 | !device->device_prep_dma_pq_val); | 935 | return -EIO; |
936 | BUG_ON(dma_has_cap(DMA_MEMSET, device->cap_mask) && | 936 | } |
937 | !device->device_prep_dma_memset); | 937 | |
938 | BUG_ON(dma_has_cap(DMA_INTERRUPT, device->cap_mask) && | 938 | if (dma_has_cap(DMA_XOR, device->cap_mask) && !device->device_prep_dma_xor) { |
939 | !device->device_prep_dma_interrupt); | 939 | dev_err(device->dev, |
940 | BUG_ON(dma_has_cap(DMA_SG, device->cap_mask) && | 940 | "Device claims capability %s, but op is not defined\n", |
941 | !device->device_prep_dma_sg); | 941 | "DMA_XOR"); |
942 | BUG_ON(dma_has_cap(DMA_CYCLIC, device->cap_mask) && | 942 | return -EIO; |
943 | !device->device_prep_dma_cyclic); | 943 | } |
944 | BUG_ON(dma_has_cap(DMA_INTERLEAVE, device->cap_mask) && | 944 | |
945 | !device->device_prep_interleaved_dma); | 945 | if (dma_has_cap(DMA_XOR_VAL, device->cap_mask) && !device->device_prep_dma_xor_val) { |
946 | 946 | dev_err(device->dev, | |
947 | BUG_ON(!device->device_tx_status); | 947 | "Device claims capability %s, but op is not defined\n", |
948 | BUG_ON(!device->device_issue_pending); | 948 | "DMA_XOR_VAL"); |
949 | BUG_ON(!device->dev); | 949 | return -EIO; |
950 | } | ||
951 | |||
952 | if (dma_has_cap(DMA_PQ, device->cap_mask) && !device->device_prep_dma_pq) { | ||
953 | dev_err(device->dev, | ||
954 | "Device claims capability %s, but op is not defined\n", | ||
955 | "DMA_PQ"); | ||
956 | return -EIO; | ||
957 | } | ||
958 | |||
959 | if (dma_has_cap(DMA_PQ_VAL, device->cap_mask) && !device->device_prep_dma_pq_val) { | ||
960 | dev_err(device->dev, | ||
961 | "Device claims capability %s, but op is not defined\n", | ||
962 | "DMA_PQ_VAL"); | ||
963 | return -EIO; | ||
964 | } | ||
965 | |||
966 | if (dma_has_cap(DMA_MEMSET, device->cap_mask) && !device->device_prep_dma_memset) { | ||
967 | dev_err(device->dev, | ||
968 | "Device claims capability %s, but op is not defined\n", | ||
969 | "DMA_MEMSET"); | ||
970 | return -EIO; | ||
971 | } | ||
972 | |||
973 | if (dma_has_cap(DMA_INTERRUPT, device->cap_mask) && !device->device_prep_dma_interrupt) { | ||
974 | dev_err(device->dev, | ||
975 | "Device claims capability %s, but op is not defined\n", | ||
976 | "DMA_INTERRUPT"); | ||
977 | return -EIO; | ||
978 | } | ||
979 | |||
980 | if (dma_has_cap(DMA_CYCLIC, device->cap_mask) && !device->device_prep_dma_cyclic) { | ||
981 | dev_err(device->dev, | ||
982 | "Device claims capability %s, but op is not defined\n", | ||
983 | "DMA_CYCLIC"); | ||
984 | return -EIO; | ||
985 | } | ||
986 | |||
987 | if (dma_has_cap(DMA_INTERLEAVE, device->cap_mask) && !device->device_prep_interleaved_dma) { | ||
988 | dev_err(device->dev, | ||
989 | "Device claims capability %s, but op is not defined\n", | ||
990 | "DMA_INTERLEAVE"); | ||
991 | return -EIO; | ||
992 | } | ||
993 | |||
994 | |||
995 | if (!device->device_tx_status) { | ||
996 | dev_err(device->dev, "Device tx_status is not defined\n"); | ||
997 | return -EIO; | ||
998 | } | ||
999 | |||
1000 | |||
1001 | if (!device->device_issue_pending) { | ||
1002 | dev_err(device->dev, "Device issue_pending is not defined\n"); | ||
1003 | return -EIO; | ||
1004 | } | ||
950 | 1005 | ||
951 | /* note: this only matters in the | 1006 | /* note: this only matters in the |
952 | * CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH=n case | 1007 | * CONFIG_ASYNC_TX_ENABLE_CHANNEL_SWITCH=n case |