aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/media/platform/xilinx/xilinx-dma.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/media/platform/xilinx/xilinx-dma.c')
-rw-r--r--drivers/media/platform/xilinx/xilinx-dma.c24
1 files changed, 13 insertions, 11 deletions
diff --git a/drivers/media/platform/xilinx/xilinx-dma.c b/drivers/media/platform/xilinx/xilinx-dma.c
index d9dcd4be2792..5af66c20475b 100644
--- a/drivers/media/platform/xilinx/xilinx-dma.c
+++ b/drivers/media/platform/xilinx/xilinx-dma.c
@@ -285,7 +285,7 @@ done:
285 * @dma: DMA channel that uses the buffer 285 * @dma: DMA channel that uses the buffer
286 */ 286 */
287struct xvip_dma_buffer { 287struct xvip_dma_buffer {
288 struct vb2_buffer buf; 288 struct vb2_v4l2_buffer buf;
289 struct list_head queue; 289 struct list_head queue;
290 struct xvip_dma *dma; 290 struct xvip_dma *dma;
291}; 291};
@@ -301,11 +301,11 @@ static void xvip_dma_complete(void *param)
301 list_del(&buf->queue); 301 list_del(&buf->queue);
302 spin_unlock(&dma->queued_lock); 302 spin_unlock(&dma->queued_lock);
303 303
304 buf->buf.v4l2_buf.field = V4L2_FIELD_NONE; 304 buf->buf.field = V4L2_FIELD_NONE;
305 buf->buf.v4l2_buf.sequence = dma->sequence++; 305 buf->buf.sequence = dma->sequence++;
306 v4l2_get_timestamp(&buf->buf.v4l2_buf.timestamp); 306 v4l2_get_timestamp(&buf->buf.timestamp);
307 vb2_set_plane_payload(&buf->buf, 0, dma->format.sizeimage); 307 vb2_set_plane_payload(&buf->buf.vb2_buf, 0, dma->format.sizeimage);
308 vb2_buffer_done(&buf->buf, VB2_BUF_STATE_DONE); 308 vb2_buffer_done(&buf->buf.vb2_buf, VB2_BUF_STATE_DONE);
309} 309}
310 310
311static int 311static int
@@ -329,8 +329,9 @@ xvip_dma_queue_setup(struct vb2_queue *vq, const struct v4l2_format *fmt,
329 329
330static int xvip_dma_buffer_prepare(struct vb2_buffer *vb) 330static int xvip_dma_buffer_prepare(struct vb2_buffer *vb)
331{ 331{
332 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
332 struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue); 333 struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue);
333 struct xvip_dma_buffer *buf = to_xvip_dma_buffer(vb); 334 struct xvip_dma_buffer *buf = to_xvip_dma_buffer(vbuf);
334 335
335 buf->dma = dma; 336 buf->dma = dma;
336 337
@@ -339,8 +340,9 @@ static int xvip_dma_buffer_prepare(struct vb2_buffer *vb)
339 340
340static void xvip_dma_buffer_queue(struct vb2_buffer *vb) 341static void xvip_dma_buffer_queue(struct vb2_buffer *vb)
341{ 342{
343 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
342 struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue); 344 struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue);
343 struct xvip_dma_buffer *buf = to_xvip_dma_buffer(vb); 345 struct xvip_dma_buffer *buf = to_xvip_dma_buffer(vbuf);
344 struct dma_async_tx_descriptor *desc; 346 struct dma_async_tx_descriptor *desc;
345 dma_addr_t addr = vb2_dma_contig_plane_dma_addr(vb, 0); 347 dma_addr_t addr = vb2_dma_contig_plane_dma_addr(vb, 0);
346 u32 flags; 348 u32 flags;
@@ -367,7 +369,7 @@ static void xvip_dma_buffer_queue(struct vb2_buffer *vb)
367 desc = dmaengine_prep_interleaved_dma(dma->dma, &dma->xt, flags); 369 desc = dmaengine_prep_interleaved_dma(dma->dma, &dma->xt, flags);
368 if (!desc) { 370 if (!desc) {
369 dev_err(dma->xdev->dev, "Failed to prepare DMA transfer\n"); 371 dev_err(dma->xdev->dev, "Failed to prepare DMA transfer\n");
370 vb2_buffer_done(&buf->buf, VB2_BUF_STATE_ERROR); 372 vb2_buffer_done(&buf->buf.vb2_buf, VB2_BUF_STATE_ERROR);
371 return; 373 return;
372 } 374 }
373 desc->callback = xvip_dma_complete; 375 desc->callback = xvip_dma_complete;
@@ -434,7 +436,7 @@ error:
434 /* Give back all queued buffers to videobuf2. */ 436 /* Give back all queued buffers to videobuf2. */
435 spin_lock_irq(&dma->queued_lock); 437 spin_lock_irq(&dma->queued_lock);
436 list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) { 438 list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) {
437 vb2_buffer_done(&buf->buf, VB2_BUF_STATE_QUEUED); 439 vb2_buffer_done(&buf->buf.vb2_buf, VB2_BUF_STATE_QUEUED);
438 list_del(&buf->queue); 440 list_del(&buf->queue);
439 } 441 }
440 spin_unlock_irq(&dma->queued_lock); 442 spin_unlock_irq(&dma->queued_lock);
@@ -461,7 +463,7 @@ static void xvip_dma_stop_streaming(struct vb2_queue *vq)
461 /* Give back all queued buffers to videobuf2. */ 463 /* Give back all queued buffers to videobuf2. */
462 spin_lock_irq(&dma->queued_lock); 464 spin_lock_irq(&dma->queued_lock);
463 list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) { 465 list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) {
464 vb2_buffer_done(&buf->buf, VB2_BUF_STATE_ERROR); 466 vb2_buffer_done(&buf->buf.vb2_buf, VB2_BUF_STATE_ERROR);
465 list_del(&buf->queue); 467 list_del(&buf->queue);
466 } 468 }
467 spin_unlock_irq(&dma->queued_lock); 469 spin_unlock_irq(&dma->queued_lock);