aboutsummaryrefslogblamecommitdiffstats
path: root/arch/arm/plat-samsung/dma-ops.c
blob: 6e3d9abc9e2e856470e304c78f31794a4f715833 (plain) (tree)


































































































































                                                                             
/* linux/arch/arm/plat-samsung/dma-ops.c
 *
 * Copyright (c) 2011 Samsung Electronics Co., Ltd.
 *		http://www.samsung.com
 *
 * Samsung DMA Operations
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 */

#include <linux/kernel.h>
#include <linux/errno.h>
#include <linux/amba/pl330.h>
#include <linux/scatterlist.h>

#include <mach/dma.h>

static inline bool pl330_filter(struct dma_chan *chan, void *param)
{
	struct dma_pl330_peri *peri = chan->private;
	return peri->peri_id == (unsigned)param;
}

static unsigned samsung_dmadev_request(enum dma_ch dma_ch,
				struct samsung_dma_info *info)
{
	struct dma_chan *chan;
	dma_cap_mask_t mask;
	struct dma_slave_config slave_config;

	dma_cap_zero(mask);
	dma_cap_set(info->cap, mask);

	chan = dma_request_channel(mask, pl330_filter, (void *)dma_ch);

	if (info->direction == DMA_FROM_DEVICE) {
		memset(&slave_config, 0, sizeof(struct dma_slave_config));
		slave_config.direction = info->direction;
		slave_config.src_addr = info->fifo;
		slave_config.src_addr_width = info->width;
		slave_config.src_maxburst = 1;
		dmaengine_slave_config(chan, &slave_config);
	} else if (info->direction == DMA_TO_DEVICE) {
		memset(&slave_config, 0, sizeof(struct dma_slave_config));
		slave_config.direction = info->direction;
		slave_config.dst_addr = info->fifo;
		slave_config.dst_addr_width = info->width;
		slave_config.dst_maxburst = 1;
		dmaengine_slave_config(chan, &slave_config);
	}

	return (unsigned)chan;
}

static int samsung_dmadev_release(unsigned ch,
			struct s3c2410_dma_client *client)
{
	dma_release_channel((struct dma_chan *)ch);

	return 0;
}

static int samsung_dmadev_prepare(unsigned ch,
			struct samsung_dma_prep_info *info)
{
	struct scatterlist sg;
	struct dma_chan *chan = (struct dma_chan *)ch;
	struct dma_async_tx_descriptor *desc;

	switch (info->cap) {
	case DMA_SLAVE:
		sg_init_table(&sg, 1);
		sg_dma_len(&sg) = info->len;
		sg_set_page(&sg, pfn_to_page(PFN_DOWN(info->buf)),
			    info->len, offset_in_page(info->buf));
		sg_dma_address(&sg) = info->buf;

		desc = chan->device->device_prep_slave_sg(chan,
			&sg, 1, info->direction, DMA_PREP_INTERRUPT);
		break;
	case DMA_CYCLIC:
		desc = chan->device->device_prep_dma_cyclic(chan,
			info->buf, info->len, info->period, info->direction);
		break;
	default:
		dev_err(&chan->dev->device, "unsupported format\n");
		return -EFAULT;
	}

	if (!desc) {
		dev_err(&chan->dev->device, "cannot prepare cyclic dma\n");
		return -EFAULT;
	}

	desc->callback = info->fp;
	desc->callback_param = info->fp_param;

	dmaengine_submit((struct dma_async_tx_descriptor *)desc);

	return 0;
}

static inline int samsung_dmadev_trigger(unsigned ch)
{
	dma_async_issue_pending((struct dma_chan *)ch);

	return 0;
}

static inline int samsung_dmadev_flush(unsigned ch)
{
	return dmaengine_terminate_all((struct dma_chan *)ch);
}

struct samsung_dma_ops dmadev_ops = {
	.request	= samsung_dmadev_request,
	.release	= samsung_dmadev_release,
	.prepare	= samsung_dmadev_prepare,
	.trigger	= samsung_dmadev_trigger,
	.started	= NULL,
	.flush		= samsung_dmadev_flush,
	.stop		= samsung_dmadev_flush,
};

void *samsung_dmadev_get_ops(void)
{
	return &dmadev_ops;
}
EXPORT_SYMBOL(samsung_dmadev_get_ops);