aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/media/video/ivtv/ivtv-irq.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/media/video/ivtv/ivtv-irq.c')
-rw-r--r--drivers/media/video/ivtv/ivtv-irq.c26
1 files changed, 6 insertions, 20 deletions
diff --git a/drivers/media/video/ivtv/ivtv-irq.c b/drivers/media/video/ivtv/ivtv-irq.c
index bf7d99c6ffaf..fd1688e4757d 100644
--- a/drivers/media/video/ivtv/ivtv-irq.c
+++ b/drivers/media/video/ivtv/ivtv-irq.c
@@ -42,7 +42,6 @@ static void ivtv_pio_work_handler(struct ivtv *itv)
42{ 42{
43 struct ivtv_stream *s = &itv->streams[itv->cur_pio_stream]; 43 struct ivtv_stream *s = &itv->streams[itv->cur_pio_stream];
44 struct ivtv_buffer *buf; 44 struct ivtv_buffer *buf;
45 struct list_head *p;
46 int i = 0; 45 int i = 0;
47 46
48 IVTV_DEBUG_HI_DMA("ivtv_pio_work_handler\n"); 47 IVTV_DEBUG_HI_DMA("ivtv_pio_work_handler\n");
@@ -54,9 +53,7 @@ static void ivtv_pio_work_handler(struct ivtv *itv)
54 return; 53 return;
55 } 54 }
56 IVTV_DEBUG_HI_DMA("Process PIO %s\n", s->name); 55 IVTV_DEBUG_HI_DMA("Process PIO %s\n", s->name);
57 buf = list_entry(s->q_dma.list.next, struct ivtv_buffer, list); 56 list_for_each_entry(buf, &s->q_dma.list, list) {
58 list_for_each(p, &s->q_dma.list) {
59 struct ivtv_buffer *buf = list_entry(p, struct ivtv_buffer, list);
60 u32 size = s->sg_processing[i].size & 0x3ffff; 57 u32 size = s->sg_processing[i].size & 0x3ffff;
61 58
62 /* Copy the data from the card to the buffer */ 59 /* Copy the data from the card to the buffer */
@@ -97,7 +94,6 @@ static int stream_enc_dma_append(struct ivtv_stream *s, u32 data[CX2341X_MBOX_MA
97{ 94{
98 struct ivtv *itv = s->itv; 95 struct ivtv *itv = s->itv;
99 struct ivtv_buffer *buf; 96 struct ivtv_buffer *buf;
100 struct list_head *p;
101 u32 bytes_needed = 0; 97 u32 bytes_needed = 0;
102 u32 offset, size; 98 u32 offset, size;
103 u32 UVoffset = 0, UVsize = 0; 99 u32 UVoffset = 0, UVsize = 0;
@@ -202,9 +198,7 @@ static int stream_enc_dma_append(struct ivtv_stream *s, u32 data[CX2341X_MBOX_MA
202 /* got the buffers, now fill in sg_pending */ 198 /* got the buffers, now fill in sg_pending */
203 buf = list_entry(s->q_predma.list.next, struct ivtv_buffer, list); 199 buf = list_entry(s->q_predma.list.next, struct ivtv_buffer, list);
204 memset(buf->buf, 0, 128); 200 memset(buf->buf, 0, 128);
205 list_for_each(p, &s->q_predma.list) { 201 list_for_each_entry(buf, &s->q_predma.list, list) {
206 struct ivtv_buffer *buf = list_entry(p, struct ivtv_buffer, list);
207
208 if (skip_bufs-- > 0) 202 if (skip_bufs-- > 0)
209 continue; 203 continue;
210 s->sg_pending[idx].dst = buf->dma_handle; 204 s->sg_pending[idx].dst = buf->dma_handle;
@@ -289,9 +283,7 @@ static void dma_post(struct ivtv_stream *s)
289 if (buf) 283 if (buf)
290 buf->bytesused += s->dma_last_offset; 284 buf->bytesused += s->dma_last_offset;
291 if (buf && s->type == IVTV_DEC_STREAM_TYPE_VBI) { 285 if (buf && s->type == IVTV_DEC_STREAM_TYPE_VBI) {
292 list_for_each(p, &s->q_dma.list) { 286 list_for_each_entry(buf, &s->q_dma.list, list) {
293 buf = list_entry(p, struct ivtv_buffer, list);
294
295 /* Parse and Groom VBI Data */ 287 /* Parse and Groom VBI Data */
296 s->q_dma.bytesused -= buf->bytesused; 288 s->q_dma.bytesused -= buf->bytesused;
297 ivtv_process_vbi_data(itv, buf, 0, s->type); 289 ivtv_process_vbi_data(itv, buf, 0, s->type);
@@ -311,7 +303,6 @@ void ivtv_dma_stream_dec_prepare(struct ivtv_stream *s, u32 offset, int lock)
311{ 303{
312 struct ivtv *itv = s->itv; 304 struct ivtv *itv = s->itv;
313 struct ivtv_buffer *buf; 305 struct ivtv_buffer *buf;
314 struct list_head *p;
315 u32 y_size = itv->params.height * itv->params.width; 306 u32 y_size = itv->params.height * itv->params.width;
316 u32 uv_offset = offset + IVTV_YUV_BUFFER_UV_OFFSET; 307 u32 uv_offset = offset + IVTV_YUV_BUFFER_UV_OFFSET;
317 int y_done = 0; 308 int y_done = 0;
@@ -320,10 +311,7 @@ void ivtv_dma_stream_dec_prepare(struct ivtv_stream *s, u32 offset, int lock)
320 int idx = 0; 311 int idx = 0;
321 312
322 IVTV_DEBUG_HI_DMA("DEC PREPARE DMA %s: %08x %08x\n", s->name, s->q_predma.bytesused, offset); 313 IVTV_DEBUG_HI_DMA("DEC PREPARE DMA %s: %08x %08x\n", s->name, s->q_predma.bytesused, offset);
323 buf = list_entry(s->q_predma.list.next, struct ivtv_buffer, list); 314 list_for_each_entry(buf, &s->q_predma.list, list) {
324 list_for_each(p, &s->q_predma.list) {
325 struct ivtv_buffer *buf = list_entry(p, struct ivtv_buffer, list);
326
327 /* YUV UV Offset from Y Buffer */ 315 /* YUV UV Offset from Y Buffer */
328 if (s->type == IVTV_DEC_STREAM_TYPE_YUV && !y_done && bytes_written >= y_size) { 316 if (s->type == IVTV_DEC_STREAM_TYPE_YUV && !y_done && bytes_written >= y_size) {
329 offset = uv_offset; 317 offset = uv_offset;
@@ -677,11 +665,9 @@ static void ivtv_irq_enc_vbi_cap(struct ivtv *itv)
677 we just drop the old requests when there are already three 665 we just drop the old requests when there are already three
678 requests queued. */ 666 requests queued. */
679 if (s->sg_pending_size > 2) { 667 if (s->sg_pending_size > 2) {
680 struct list_head *p; 668 struct ivtv_buffer *buf;
681 list_for_each(p, &s->q_predma.list) { 669 list_for_each_entry(buf, &s->q_predma.list, list)
682 struct ivtv_buffer *buf = list_entry(p, struct ivtv_buffer, list);
683 ivtv_buf_sync_for_cpu(s, buf); 670 ivtv_buf_sync_for_cpu(s, buf);
684 }
685 ivtv_queue_move(s, &s->q_predma, NULL, &s->q_free, 0); 671 ivtv_queue_move(s, &s->q_predma, NULL, &s->q_free, 0);
686 s->sg_pending_size = 0; 672 s->sg_pending_size = 0;
687 } 673 }