aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/ioat/dma_v3.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/ioat/dma_v3.c')
-rw-r--r--drivers/dma/ioat/dma_v3.c25
1 files changed, 22 insertions, 3 deletions
diff --git a/drivers/dma/ioat/dma_v3.c b/drivers/dma/ioat/dma_v3.c
index 32eae38291e5..77a6dcf25b98 100644
--- a/drivers/dma/ioat/dma_v3.c
+++ b/drivers/dma/ioat/dma_v3.c
@@ -214,6 +214,11 @@ static bool is_bwd_ioat(struct pci_dev *pdev)
214 case PCI_DEVICE_ID_INTEL_IOAT_BWD1: 214 case PCI_DEVICE_ID_INTEL_IOAT_BWD1:
215 case PCI_DEVICE_ID_INTEL_IOAT_BWD2: 215 case PCI_DEVICE_ID_INTEL_IOAT_BWD2:
216 case PCI_DEVICE_ID_INTEL_IOAT_BWD3: 216 case PCI_DEVICE_ID_INTEL_IOAT_BWD3:
217 /* even though not Atom, BDX-DE has same DMA silicon */
218 case PCI_DEVICE_ID_INTEL_IOAT_BDXDE0:
219 case PCI_DEVICE_ID_INTEL_IOAT_BDXDE1:
220 case PCI_DEVICE_ID_INTEL_IOAT_BDXDE2:
221 case PCI_DEVICE_ID_INTEL_IOAT_BDXDE3:
217 return true; 222 return true;
218 default: 223 default:
219 return false; 224 return false;
@@ -489,6 +494,7 @@ static void ioat3_eh(struct ioat2_dma_chan *ioat)
489 struct ioat_chan_common *chan = &ioat->base; 494 struct ioat_chan_common *chan = &ioat->base;
490 struct pci_dev *pdev = to_pdev(chan); 495 struct pci_dev *pdev = to_pdev(chan);
491 struct ioat_dma_descriptor *hw; 496 struct ioat_dma_descriptor *hw;
497 struct dma_async_tx_descriptor *tx;
492 u64 phys_complete; 498 u64 phys_complete;
493 struct ioat_ring_ent *desc; 499 struct ioat_ring_ent *desc;
494 u32 err_handled = 0; 500 u32 err_handled = 0;
@@ -534,6 +540,16 @@ static void ioat3_eh(struct ioat2_dma_chan *ioat)
534 dev_err(to_dev(chan), "%s: fatal error (%x:%x)\n", 540 dev_err(to_dev(chan), "%s: fatal error (%x:%x)\n",
535 __func__, chanerr, err_handled); 541 __func__, chanerr, err_handled);
536 BUG(); 542 BUG();
543 } else { /* cleanup the faulty descriptor */
544 tx = &desc->txd;
545 if (tx->cookie) {
546 dma_cookie_complete(tx);
547 dma_descriptor_unmap(tx);
548 if (tx->callback) {
549 tx->callback(tx->callback_param);
550 tx->callback = NULL;
551 }
552 }
537 } 553 }
538 554
539 writel(chanerr, chan->reg_base + IOAT_CHANERR_OFFSET); 555 writel(chanerr, chan->reg_base + IOAT_CHANERR_OFFSET);
@@ -1300,7 +1316,8 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
1300 1316
1301 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); 1317 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
1302 1318
1303 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) { 1319 if (tmo == 0 ||
1320 dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) {
1304 dev_err(dev, "Self-test xor timed out\n"); 1321 dev_err(dev, "Self-test xor timed out\n");
1305 err = -ENODEV; 1322 err = -ENODEV;
1306 goto dma_unmap; 1323 goto dma_unmap;
@@ -1366,7 +1383,8 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
1366 1383
1367 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); 1384 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
1368 1385
1369 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) { 1386 if (tmo == 0 ||
1387 dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) {
1370 dev_err(dev, "Self-test validate timed out\n"); 1388 dev_err(dev, "Self-test validate timed out\n");
1371 err = -ENODEV; 1389 err = -ENODEV;
1372 goto dma_unmap; 1390 goto dma_unmap;
@@ -1418,7 +1436,8 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
1418 1436
1419 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); 1437 tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000));
1420 1438
1421 if (dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) { 1439 if (tmo == 0 ||
1440 dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) {
1422 dev_err(dev, "Self-test 2nd validate timed out\n"); 1441 dev_err(dev, "Self-test 2nd validate timed out\n");
1423 err = -ENODEV; 1442 err = -ENODEV;
1424 goto dma_unmap; 1443 goto dma_unmap;