aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/dma/ioat/dma_v2.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/dma/ioat/dma_v2.c')
-rw-r--r--drivers/dma/ioat/dma_v2.c70
1 files changed, 21 insertions, 49 deletions
diff --git a/drivers/dma/ioat/dma_v2.c b/drivers/dma/ioat/dma_v2.c
index 5cc37afe2bc1..1ed5d66d7dca 100644
--- a/drivers/dma/ioat/dma_v2.c
+++ b/drivers/dma/ioat/dma_v2.c
@@ -51,48 +51,40 @@ MODULE_PARM_DESC(ioat_ring_max_alloc_order,
51 51
52void __ioat2_issue_pending(struct ioat2_dma_chan *ioat) 52void __ioat2_issue_pending(struct ioat2_dma_chan *ioat)
53{ 53{
54 void * __iomem reg_base = ioat->base.reg_base; 54 struct ioat_chan_common *chan = &ioat->base;
55 55
56 ioat->pending = 0;
57 ioat->dmacount += ioat2_ring_pending(ioat); 56 ioat->dmacount += ioat2_ring_pending(ioat);
58 ioat->issued = ioat->head; 57 ioat->issued = ioat->head;
59 /* make descriptor updates globally visible before notifying channel */ 58 /* make descriptor updates globally visible before notifying channel */
60 wmb(); 59 wmb();
61 writew(ioat->dmacount, reg_base + IOAT_CHAN_DMACOUNT_OFFSET); 60 writew(ioat->dmacount, chan->reg_base + IOAT_CHAN_DMACOUNT_OFFSET);
62 dev_dbg(to_dev(&ioat->base), 61 dev_dbg(to_dev(chan),
63 "%s: head: %#x tail: %#x issued: %#x count: %#x\n", 62 "%s: head: %#x tail: %#x issued: %#x count: %#x\n",
64 __func__, ioat->head, ioat->tail, ioat->issued, ioat->dmacount); 63 __func__, ioat->head, ioat->tail, ioat->issued, ioat->dmacount);
65} 64}
66 65
67void ioat2_issue_pending(struct dma_chan *chan) 66void ioat2_issue_pending(struct dma_chan *c)
68{ 67{
69 struct ioat2_dma_chan *ioat = to_ioat2_chan(chan); 68 struct ioat2_dma_chan *ioat = to_ioat2_chan(c);
70 69
71 spin_lock_bh(&ioat->ring_lock); 70 if (ioat2_ring_pending(ioat)) {
72 if (ioat->pending == 1) 71 spin_lock_bh(&ioat->ring_lock);
73 __ioat2_issue_pending(ioat); 72 __ioat2_issue_pending(ioat);
74 spin_unlock_bh(&ioat->ring_lock); 73 spin_unlock_bh(&ioat->ring_lock);
74 }
75} 75}
76 76
77/** 77/**
78 * ioat2_update_pending - log pending descriptors 78 * ioat2_update_pending - log pending descriptors
79 * @ioat: ioat2+ channel 79 * @ioat: ioat2+ channel
80 * 80 *
81 * set pending to '1' unless pending is already set to '2', pending == 2 81 * Check if the number of unsubmitted descriptors has exceeded the
82 * indicates that submission is temporarily blocked due to an in-flight 82 * watermark. Called with ring_lock held
83 * reset. If we are already above the ioat_pending_level threshold then
84 * just issue pending.
85 *
86 * called with ring_lock held
87 */ 83 */
88static void ioat2_update_pending(struct ioat2_dma_chan *ioat) 84static void ioat2_update_pending(struct ioat2_dma_chan *ioat)
89{ 85{
90 if (unlikely(ioat->pending == 2)) 86 if (ioat2_ring_pending(ioat) > ioat_pending_level)
91 return;
92 else if (ioat2_ring_pending(ioat) > ioat_pending_level)
93 __ioat2_issue_pending(ioat); 87 __ioat2_issue_pending(ioat);
94 else
95 ioat->pending = 1;
96} 88}
97 89
98static void __ioat2_start_null_desc(struct ioat2_dma_chan *ioat) 90static void __ioat2_start_null_desc(struct ioat2_dma_chan *ioat)
@@ -166,7 +158,7 @@ static void __cleanup(struct ioat2_dma_chan *ioat, unsigned long phys_complete)
166 seen_current = true; 158 seen_current = true;
167 } 159 }
168 ioat->tail += i; 160 ioat->tail += i;
169 BUG_ON(!seen_current); /* no active descs have written a completion? */ 161 BUG_ON(active && !seen_current); /* no active descs have written a completion? */
170 162
171 chan->last_completion = phys_complete; 163 chan->last_completion = phys_complete;
172 if (ioat->head == ioat->tail) { 164 if (ioat->head == ioat->tail) {
@@ -207,9 +199,9 @@ static void ioat2_cleanup(struct ioat2_dma_chan *ioat)
207 spin_unlock_bh(&chan->cleanup_lock); 199 spin_unlock_bh(&chan->cleanup_lock);
208} 200}
209 201
210void ioat2_cleanup_tasklet(unsigned long data) 202void ioat2_cleanup_event(unsigned long data)
211{ 203{
212 struct ioat2_dma_chan *ioat = (void *) data; 204 struct ioat2_dma_chan *ioat = to_ioat2_chan((void *) data);
213 205
214 ioat2_cleanup(ioat); 206 ioat2_cleanup(ioat);
215 writew(IOAT_CHANCTRL_RUN, ioat->base.reg_base + IOAT_CHANCTRL_OFFSET); 207 writew(IOAT_CHANCTRL_RUN, ioat->base.reg_base + IOAT_CHANCTRL_OFFSET);
@@ -291,7 +283,7 @@ static void ioat2_restart_channel(struct ioat2_dma_chan *ioat)
291 283
292void ioat2_timer_event(unsigned long data) 284void ioat2_timer_event(unsigned long data)
293{ 285{
294 struct ioat2_dma_chan *ioat = (void *) data; 286 struct ioat2_dma_chan *ioat = to_ioat2_chan((void *) data);
295 struct ioat_chan_common *chan = &ioat->base; 287 struct ioat_chan_common *chan = &ioat->base;
296 288
297 spin_lock_bh(&chan->cleanup_lock); 289 spin_lock_bh(&chan->cleanup_lock);
@@ -397,10 +389,7 @@ int ioat2_enumerate_channels(struct ioatdma_device *device)
397 if (!ioat) 389 if (!ioat)
398 break; 390 break;
399 391
400 ioat_init_channel(device, &ioat->base, i, 392 ioat_init_channel(device, &ioat->base, i);
401 device->timer_fn,
402 device->cleanup_tasklet,
403 (unsigned long) ioat);
404 ioat->xfercap_log = xfercap_log; 393 ioat->xfercap_log = xfercap_log;
405 spin_lock_init(&ioat->ring_lock); 394 spin_lock_init(&ioat->ring_lock);
406 if (device->reset_hw(&ioat->base)) { 395 if (device->reset_hw(&ioat->base)) {
@@ -546,7 +535,6 @@ int ioat2_alloc_chan_resources(struct dma_chan *c)
546 ioat->head = 0; 535 ioat->head = 0;
547 ioat->issued = 0; 536 ioat->issued = 0;
548 ioat->tail = 0; 537 ioat->tail = 0;
549 ioat->pending = 0;
550 ioat->alloc_order = order; 538 ioat->alloc_order = order;
551 spin_unlock_bh(&ioat->ring_lock); 539 spin_unlock_bh(&ioat->ring_lock);
552 540
@@ -701,7 +689,7 @@ int ioat2_alloc_and_lock(u16 *idx, struct ioat2_dma_chan *ioat, int num_descs)
701 689
702 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); 690 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT);
703 spin_unlock_bh(&chan->cleanup_lock); 691 spin_unlock_bh(&chan->cleanup_lock);
704 device->timer_fn((unsigned long) ioat); 692 device->timer_fn((unsigned long) &chan->common);
705 } else 693 } else
706 spin_unlock_bh(&chan->cleanup_lock); 694 spin_unlock_bh(&chan->cleanup_lock);
707 return -ENOMEM; 695 return -ENOMEM;
@@ -785,7 +773,7 @@ void ioat2_free_chan_resources(struct dma_chan *c)
785 773
786 tasklet_disable(&chan->cleanup_task); 774 tasklet_disable(&chan->cleanup_task);
787 del_timer_sync(&chan->timer); 775 del_timer_sync(&chan->timer);
788 device->cleanup_tasklet((unsigned long) ioat); 776 device->cleanup_fn((unsigned long) c);
789 device->reset_hw(chan); 777 device->reset_hw(chan);
790 778
791 spin_lock_bh(&ioat->ring_lock); 779 spin_lock_bh(&ioat->ring_lock);
@@ -815,25 +803,9 @@ void ioat2_free_chan_resources(struct dma_chan *c)
815 803
816 chan->last_completion = 0; 804 chan->last_completion = 0;
817 chan->completion_dma = 0; 805 chan->completion_dma = 0;
818 ioat->pending = 0;
819 ioat->dmacount = 0; 806 ioat->dmacount = 0;
820} 807}
821 808
822enum dma_status
823ioat2_is_complete(struct dma_chan *c, dma_cookie_t cookie,
824 dma_cookie_t *done, dma_cookie_t *used)
825{
826 struct ioat2_dma_chan *ioat = to_ioat2_chan(c);
827 struct ioatdma_device *device = ioat->base.device;
828
829 if (ioat_is_complete(c, cookie, done, used) == DMA_SUCCESS)
830 return DMA_SUCCESS;
831
832 device->cleanup_tasklet((unsigned long) ioat);
833
834 return ioat_is_complete(c, cookie, done, used);
835}
836
837static ssize_t ring_size_show(struct dma_chan *c, char *page) 809static ssize_t ring_size_show(struct dma_chan *c, char *page)
838{ 810{
839 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); 811 struct ioat2_dma_chan *ioat = to_ioat2_chan(c);
@@ -874,7 +846,7 @@ int __devinit ioat2_dma_probe(struct ioatdma_device *device, int dca)
874 846
875 device->enumerate_channels = ioat2_enumerate_channels; 847 device->enumerate_channels = ioat2_enumerate_channels;
876 device->reset_hw = ioat2_reset_hw; 848 device->reset_hw = ioat2_reset_hw;
877 device->cleanup_tasklet = ioat2_cleanup_tasklet; 849 device->cleanup_fn = ioat2_cleanup_event;
878 device->timer_fn = ioat2_timer_event; 850 device->timer_fn = ioat2_timer_event;
879 device->self_test = ioat_dma_self_test; 851 device->self_test = ioat_dma_self_test;
880 dma = &device->common; 852 dma = &device->common;
@@ -882,7 +854,7 @@ int __devinit ioat2_dma_probe(struct ioatdma_device *device, int dca)
882 dma->device_issue_pending = ioat2_issue_pending; 854 dma->device_issue_pending = ioat2_issue_pending;
883 dma->device_alloc_chan_resources = ioat2_alloc_chan_resources; 855 dma->device_alloc_chan_resources = ioat2_alloc_chan_resources;
884 dma->device_free_chan_resources = ioat2_free_chan_resources; 856 dma->device_free_chan_resources = ioat2_free_chan_resources;
885 dma->device_is_tx_complete = ioat2_is_complete; 857 dma->device_is_tx_complete = ioat_is_dma_complete;
886 858
887 err = ioat_probe(device); 859 err = ioat_probe(device);
888 if (err) 860 if (err)