aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/usb/musb
diff options
context:
space:
mode:
authorPer Forlin <per.forlin@linaro.org>2011-08-17 05:03:40 -0400
committerFelipe Balbi <balbi@ti.com>2011-12-12 04:51:39 -0500
commitbe18a251892ab85d1bc10d87d336ee25f8dba615 (patch)
treef49d961002ab75e4f3af1833aa1470b85337485d /drivers/usb/musb
parentdc47ce90c3a822cd7c9e9339fe4d5f61dcb26b50 (diff)
usb: musb: ux500: optimize DMA callback routine
Skip the use of work queue and call musb_dma_completion() directly from DMA callback context. Here follows measurements on a Snowball board with ondemand governor active. Performance using work queue: (105 MB) copied, 6.23758 s, 16.8 MB/s (105 MB) copied, 5.7151 s, 18.3 MB/s (105 MB) copied, 5.83583 s, 18.0 MB/s (105 MB) copied, 5.93611 s, 17.7 MB/s Performance without work queue (105 MB) copied, 5.62173 s, 18.7 MB/s (105 MB) copied, 5.61811 s, 18.7 MB/s (105 MB) copied, 5.57817 s, 18.8 MB/s (105 MB) copied, 5.58549 s, 18.8 MB/s Signed-off-by: Per Forlin <per.forlin@linaro.org> Acked-by: Mian Yousaf Kaukab <mian.yousaf.kaukab@stericsson.com> Signed-off-by: Felipe Balbi <balbi@ti.com>
Diffstat (limited to 'drivers/usb/musb')
-rw-r--r--drivers/usb/musb/ux500_dma.c39
1 files changed, 3 insertions, 36 deletions
diff --git a/drivers/usb/musb/ux500_dma.c b/drivers/usb/musb/ux500_dma.c
index ef4333f4bbe0..a163632877af 100644
--- a/drivers/usb/musb/ux500_dma.c
+++ b/drivers/usb/musb/ux500_dma.c
@@ -37,7 +37,6 @@ struct ux500_dma_channel {
37 struct dma_channel channel; 37 struct dma_channel channel;
38 struct ux500_dma_controller *controller; 38 struct ux500_dma_controller *controller;
39 struct musb_hw_ep *hw_ep; 39 struct musb_hw_ep *hw_ep;
40 struct work_struct channel_work;
41 struct dma_chan *dma_chan; 40 struct dma_chan *dma_chan;
42 unsigned int cur_len; 41 unsigned int cur_len;
43 dma_cookie_t cookie; 42 dma_cookie_t cookie;
@@ -56,31 +55,11 @@ struct ux500_dma_controller {
56 dma_addr_t phy_base; 55 dma_addr_t phy_base;
57}; 56};
58 57
59/* Work function invoked from DMA callback to handle tx transfers. */
60static void ux500_tx_work(struct work_struct *data)
61{
62 struct ux500_dma_channel *ux500_channel = container_of(data,
63 struct ux500_dma_channel, channel_work);
64 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep;
65 struct musb *musb = hw_ep->musb;
66 unsigned long flags;
67
68 dev_dbg(musb->controller, "DMA tx transfer done on hw_ep=%d\n",
69 hw_ep->epnum);
70
71 spin_lock_irqsave(&musb->lock, flags);
72 ux500_channel->channel.actual_len = ux500_channel->cur_len;
73 ux500_channel->channel.status = MUSB_DMA_STATUS_FREE;
74 musb_dma_completion(musb, hw_ep->epnum,
75 ux500_channel->is_tx);
76 spin_unlock_irqrestore(&musb->lock, flags);
77}
78
79/* Work function invoked from DMA callback to handle rx transfers. */ 58/* Work function invoked from DMA callback to handle rx transfers. */
80static void ux500_rx_work(struct work_struct *data) 59void ux500_dma_callback(void *private_data)
81{ 60{
82 struct ux500_dma_channel *ux500_channel = container_of(data, 61 struct dma_channel *channel = private_data;
83 struct ux500_dma_channel, channel_work); 62 struct ux500_dma_channel *ux500_channel = channel->private_data;
84 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep; 63 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep;
85 struct musb *musb = hw_ep->musb; 64 struct musb *musb = hw_ep->musb;
86 unsigned long flags; 65 unsigned long flags;
@@ -94,14 +73,7 @@ static void ux500_rx_work(struct work_struct *data)
94 musb_dma_completion(musb, hw_ep->epnum, 73 musb_dma_completion(musb, hw_ep->epnum,
95 ux500_channel->is_tx); 74 ux500_channel->is_tx);
96 spin_unlock_irqrestore(&musb->lock, flags); 75 spin_unlock_irqrestore(&musb->lock, flags);
97}
98
99void ux500_dma_callback(void *private_data)
100{
101 struct dma_channel *channel = (struct dma_channel *)private_data;
102 struct ux500_dma_channel *ux500_channel = channel->private_data;
103 76
104 schedule_work(&ux500_channel->channel_work);
105} 77}
106 78
107static bool ux500_configure_channel(struct dma_channel *channel, 79static bool ux500_configure_channel(struct dma_channel *channel,
@@ -330,7 +302,6 @@ static int ux500_dma_controller_start(struct dma_controller *c)
330 void **param_array; 302 void **param_array;
331 struct ux500_dma_channel *channel_array; 303 struct ux500_dma_channel *channel_array;
332 u32 ch_count; 304 u32 ch_count;
333 void (*musb_channel_work)(struct work_struct *);
334 dma_cap_mask_t mask; 305 dma_cap_mask_t mask;
335 306
336 if ((data->num_rx_channels > UX500_MUSB_DMA_NUM_RX_CHANNELS) || 307 if ((data->num_rx_channels > UX500_MUSB_DMA_NUM_RX_CHANNELS) ||
@@ -347,7 +318,6 @@ static int ux500_dma_controller_start(struct dma_controller *c)
347 channel_array = controller->rx_channel; 318 channel_array = controller->rx_channel;
348 ch_count = data->num_rx_channels; 319 ch_count = data->num_rx_channels;
349 param_array = data->dma_rx_param_array; 320 param_array = data->dma_rx_param_array;
350 musb_channel_work = ux500_rx_work;
351 321
352 for (dir = 0; dir < 2; dir++) { 322 for (dir = 0; dir < 2; dir++) {
353 for (ch_num = 0; ch_num < ch_count; ch_num++) { 323 for (ch_num = 0; ch_num < ch_count; ch_num++) {
@@ -374,15 +344,12 @@ static int ux500_dma_controller_start(struct dma_controller *c)
374 return -EBUSY; 344 return -EBUSY;
375 } 345 }
376 346
377 INIT_WORK(&ux500_channel->channel_work,
378 musb_channel_work);
379 } 347 }
380 348
381 /* Prepare the loop for TX channels */ 349 /* Prepare the loop for TX channels */
382 channel_array = controller->tx_channel; 350 channel_array = controller->tx_channel;
383 ch_count = data->num_tx_channels; 351 ch_count = data->num_tx_channels;
384 param_array = data->dma_tx_param_array; 352 param_array = data->dma_tx_param_array;
385 musb_channel_work = ux500_tx_work;
386 is_tx = 1; 353 is_tx = 1;
387 } 354 }
388 355