aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPierre-Yves MORDRET <pierre-yves.mordret@st.com>2017-09-28 11:36:41 -0400
committerVinod Koul <vinod.koul@intel.com>2017-10-08 05:15:34 -0400
commita4ffb13c8946abc4b92621275de6718e19db860d (patch)
tree7005d0c1aa302a5c6843472190599e192f982ff6
parent18d59893eb2e836c920856772ead554eb461ef72 (diff)
dmaengine: Add STM32 MDMA driver
This patch adds the driver for the STM32 MDMA controller. Master Direct memory access (MDMA) is used in order to provide high-speed data transfer between memory and memory or between peripherals and memory. MDMA controller provides a master AXI interface for main memory and peripheral registers access (system access port) and a master AHB interface only for Cortex-M7 TCM memory access (TCM access port). MDMA works in conjunction with the standard DMA controllers (DMA1 or DMA2). It offers up to 64 channels, each dedicated to managing memory access requests from one of the DMA stream memory buffer or other peripherals (w/ integrated FIFO). Signed-off-by: M'boumba Cedric Madianga <cedric.madianga@gmail.com> Signed-off-by: Pierre-Yves MORDRET <pierre-yves.mordret@st.com> Signed-off-by: Vinod Koul <vinod.koul@intel.com>
-rw-r--r--drivers/dma/Kconfig12
-rw-r--r--drivers/dma/Makefile1
-rw-r--r--drivers/dma/stm32-mdma.c1666
3 files changed, 1679 insertions, 0 deletions
diff --git a/drivers/dma/Kconfig b/drivers/dma/Kconfig
index 04e381b522b4..73446622ccc9 100644
--- a/drivers/dma/Kconfig
+++ b/drivers/dma/Kconfig
@@ -492,6 +492,18 @@ config STM32_DMAMUX
492 If you have a board based on such a MCU and wish to use DMAMUX say Y 492 If you have a board based on such a MCU and wish to use DMAMUX say Y
493 here. 493 here.
494 494
495config STM32_MDMA
496 bool "STMicroelectronics STM32 master dma support"
497 depends on ARCH_STM32 || COMPILE_TEST
498 select DMA_ENGINE
499 select DMA_OF
500 select DMA_VIRTUAL_CHANNELS
501 help
502 Enable support for the on-chip MDMA controller on STMicroelectronics
503 STM32 platforms.
504 If you have a board based on STM32 SoC and wish to use the master DMA
505 say Y here.
506
495config S3C24XX_DMAC 507config S3C24XX_DMAC
496 bool "Samsung S3C24XX DMA support" 508 bool "Samsung S3C24XX DMA support"
497 depends on ARCH_S3C24XX || COMPILE_TEST 509 depends on ARCH_S3C24XX || COMPILE_TEST
diff --git a/drivers/dma/Makefile b/drivers/dma/Makefile
index a145ad1426bc..4d2376fdea01 100644
--- a/drivers/dma/Makefile
+++ b/drivers/dma/Makefile
@@ -60,6 +60,7 @@ obj-$(CONFIG_SIRF_DMA) += sirf-dma.o
60obj-$(CONFIG_STE_DMA40) += ste_dma40.o ste_dma40_ll.o 60obj-$(CONFIG_STE_DMA40) += ste_dma40.o ste_dma40_ll.o
61obj-$(CONFIG_STM32_DMA) += stm32-dma.o 61obj-$(CONFIG_STM32_DMA) += stm32-dma.o
62obj-$(CONFIG_STM32_DMAMUX) += stm32-dmamux.o 62obj-$(CONFIG_STM32_DMAMUX) += stm32-dmamux.o
63obj-$(CONFIG_STM32_MDMA) += stm32-mdma.o
63obj-$(CONFIG_S3C24XX_DMAC) += s3c24xx-dma.o 64obj-$(CONFIG_S3C24XX_DMAC) += s3c24xx-dma.o
64obj-$(CONFIG_TXX9_DMAC) += txx9dmac.o 65obj-$(CONFIG_TXX9_DMAC) += txx9dmac.o
65obj-$(CONFIG_TEGRA20_APB_DMA) += tegra20-apb-dma.o 66obj-$(CONFIG_TEGRA20_APB_DMA) += tegra20-apb-dma.o
diff --git a/drivers/dma/stm32-mdma.c b/drivers/dma/stm32-mdma.c
new file mode 100644
index 000000000000..a9cb341c8ee0
--- /dev/null
+++ b/drivers/dma/stm32-mdma.c
@@ -0,0 +1,1666 @@
1/*
2 *
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
5 * Pierre-Yves Mordret <pierre-yves.mordret@st.com>
6 *
7 * License terms: GPL V2.0.
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License version 2 as published by
11 * the Free Software Foundation.
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
16 * details.
17 *
18 * Driver for STM32 MDMA controller
19 *
20 * Inspired by stm32-dma.c and dma-jz4780.c
21 *
22 */
23
24#include <linux/clk.h>
25#include <linux/delay.h>
26#include <linux/dmaengine.h>
27#include <linux/dma-mapping.h>
28#include <linux/dmapool.h>
29#include <linux/err.h>
30#include <linux/init.h>
31#include <linux/iopoll.h>
32#include <linux/jiffies.h>
33#include <linux/list.h>
34#include <linux/log2.h>
35#include <linux/module.h>
36#include <linux/of.h>
37#include <linux/of_device.h>
38#include <linux/of_dma.h>
39#include <linux/platform_device.h>
40#include <linux/reset.h>
41#include <linux/slab.h>
42
43#include "virt-dma.h"
44
45/* MDMA Generic getter/setter */
46#define STM32_MDMA_SHIFT(n) (ffs(n) - 1)
47#define STM32_MDMA_SET(n, mask) (((n) << STM32_MDMA_SHIFT(mask)) & \
48 (mask))
49#define STM32_MDMA_GET(n, mask) (((n) & (mask)) >> \
50 STM32_MDMA_SHIFT(mask))
51
52#define STM32_MDMA_GISR0 0x0000 /* MDMA Int Status Reg 1 */
53#define STM32_MDMA_GISR1 0x0004 /* MDMA Int Status Reg 2 */
54
55/* MDMA Channel x interrupt/status register */
56#define STM32_MDMA_CISR(x) (0x40 + 0x40 * (x)) /* x = 0..62 */
57#define STM32_MDMA_CISR_CRQA BIT(16)
58#define STM32_MDMA_CISR_TCIF BIT(4)
59#define STM32_MDMA_CISR_BTIF BIT(3)
60#define STM32_MDMA_CISR_BRTIF BIT(2)
61#define STM32_MDMA_CISR_CTCIF BIT(1)
62#define STM32_MDMA_CISR_TEIF BIT(0)
63
64/* MDMA Channel x interrupt flag clear register */
65#define STM32_MDMA_CIFCR(x) (0x44 + 0x40 * (x))
66#define STM32_MDMA_CIFCR_CLTCIF BIT(4)
67#define STM32_MDMA_CIFCR_CBTIF BIT(3)
68#define STM32_MDMA_CIFCR_CBRTIF BIT(2)
69#define STM32_MDMA_CIFCR_CCTCIF BIT(1)
70#define STM32_MDMA_CIFCR_CTEIF BIT(0)
71#define STM32_MDMA_CIFCR_CLEAR_ALL (STM32_MDMA_CIFCR_CLTCIF \
72 | STM32_MDMA_CIFCR_CBTIF \
73 | STM32_MDMA_CIFCR_CBRTIF \
74 | STM32_MDMA_CIFCR_CCTCIF \
75 | STM32_MDMA_CIFCR_CTEIF)
76
77/* MDMA Channel x error status register */
78#define STM32_MDMA_CESR(x) (0x48 + 0x40 * (x))
79#define STM32_MDMA_CESR_BSE BIT(11)
80#define STM32_MDMA_CESR_ASR BIT(10)
81#define STM32_MDMA_CESR_TEMD BIT(9)
82#define STM32_MDMA_CESR_TELD BIT(8)
83#define STM32_MDMA_CESR_TED BIT(7)
84#define STM32_MDMA_CESR_TEA_MASK GENMASK(6, 0)
85
86/* MDMA Channel x control register */
87#define STM32_MDMA_CCR(x) (0x4C + 0x40 * (x))
88#define STM32_MDMA_CCR_SWRQ BIT(16)
89#define STM32_MDMA_CCR_WEX BIT(14)
90#define STM32_MDMA_CCR_HEX BIT(13)
91#define STM32_MDMA_CCR_BEX BIT(12)
92#define STM32_MDMA_CCR_PL_MASK GENMASK(7, 6)
93#define STM32_MDMA_CCR_PL(n) STM32_MDMA_SET(n, \
94 STM32_MDMA_CCR_PL_MASK)
95#define STM32_MDMA_CCR_TCIE BIT(5)
96#define STM32_MDMA_CCR_BTIE BIT(4)
97#define STM32_MDMA_CCR_BRTIE BIT(3)
98#define STM32_MDMA_CCR_CTCIE BIT(2)
99#define STM32_MDMA_CCR_TEIE BIT(1)
100#define STM32_MDMA_CCR_EN BIT(0)
101#define STM32_MDMA_CCR_IRQ_MASK (STM32_MDMA_CCR_TCIE \
102 | STM32_MDMA_CCR_BTIE \
103 | STM32_MDMA_CCR_BRTIE \
104 | STM32_MDMA_CCR_CTCIE \
105 | STM32_MDMA_CCR_TEIE)
106
107/* MDMA Channel x transfer configuration register */
108#define STM32_MDMA_CTCR(x) (0x50 + 0x40 * (x))
109#define STM32_MDMA_CTCR_BWM BIT(31)
110#define STM32_MDMA_CTCR_SWRM BIT(30)
111#define STM32_MDMA_CTCR_TRGM_MSK GENMASK(29, 28)
112#define STM32_MDMA_CTCR_TRGM(n) STM32_MDMA_SET((n), \
113 STM32_MDMA_CTCR_TRGM_MSK)
114#define STM32_MDMA_CTCR_TRGM_GET(n) STM32_MDMA_GET((n), \
115 STM32_MDMA_CTCR_TRGM_MSK)
116#define STM32_MDMA_CTCR_PAM_MASK GENMASK(27, 26)
117#define STM32_MDMA_CTCR_PAM(n) STM32_MDMA_SET(n, \
118 STM32_MDMA_CTCR_PAM_MASK)
119#define STM32_MDMA_CTCR_PKE BIT(25)
120#define STM32_MDMA_CTCR_TLEN_MSK GENMASK(24, 18)
121#define STM32_MDMA_CTCR_TLEN(n) STM32_MDMA_SET((n), \
122 STM32_MDMA_CTCR_TLEN_MSK)
123#define STM32_MDMA_CTCR_TLEN_GET(n) STM32_MDMA_GET((n), \
124 STM32_MDMA_CTCR_TLEN_MSK)
125#define STM32_MDMA_CTCR_LEN2_MSK GENMASK(25, 18)
126#define STM32_MDMA_CTCR_LEN2(n) STM32_MDMA_SET((n), \
127 STM32_MDMA_CTCR_LEN2_MSK)
128#define STM32_MDMA_CTCR_LEN2_GET(n) STM32_MDMA_GET((n), \
129 STM32_MDMA_CTCR_LEN2_MSK)
130#define STM32_MDMA_CTCR_DBURST_MASK GENMASK(17, 15)
131#define STM32_MDMA_CTCR_DBURST(n) STM32_MDMA_SET(n, \
132 STM32_MDMA_CTCR_DBURST_MASK)
133#define STM32_MDMA_CTCR_SBURST_MASK GENMASK(14, 12)
134#define STM32_MDMA_CTCR_SBURST(n) STM32_MDMA_SET(n, \
135 STM32_MDMA_CTCR_SBURST_MASK)
136#define STM32_MDMA_CTCR_DINCOS_MASK GENMASK(11, 10)
137#define STM32_MDMA_CTCR_DINCOS(n) STM32_MDMA_SET((n), \
138 STM32_MDMA_CTCR_DINCOS_MASK)
139#define STM32_MDMA_CTCR_SINCOS_MASK GENMASK(9, 8)
140#define STM32_MDMA_CTCR_SINCOS(n) STM32_MDMA_SET((n), \
141 STM32_MDMA_CTCR_SINCOS_MASK)
142#define STM32_MDMA_CTCR_DSIZE_MASK GENMASK(7, 6)
143#define STM32_MDMA_CTCR_DSIZE(n) STM32_MDMA_SET(n, \
144 STM32_MDMA_CTCR_DSIZE_MASK)
145#define STM32_MDMA_CTCR_SSIZE_MASK GENMASK(5, 4)
146#define STM32_MDMA_CTCR_SSIZE(n) STM32_MDMA_SET(n, \
147 STM32_MDMA_CTCR_SSIZE_MASK)
148#define STM32_MDMA_CTCR_DINC_MASK GENMASK(3, 2)
149#define STM32_MDMA_CTCR_DINC(n) STM32_MDMA_SET((n), \
150 STM32_MDMA_CTCR_DINC_MASK)
151#define STM32_MDMA_CTCR_SINC_MASK GENMASK(1, 0)
152#define STM32_MDMA_CTCR_SINC(n) STM32_MDMA_SET((n), \
153 STM32_MDMA_CTCR_SINC_MASK)
154#define STM32_MDMA_CTCR_CFG_MASK (STM32_MDMA_CTCR_SINC_MASK \
155 | STM32_MDMA_CTCR_DINC_MASK \
156 | STM32_MDMA_CTCR_SINCOS_MASK \
157 | STM32_MDMA_CTCR_DINCOS_MASK \
158 | STM32_MDMA_CTCR_LEN2_MSK \
159 | STM32_MDMA_CTCR_TRGM_MSK)
160
161/* MDMA Channel x block number of data register */
162#define STM32_MDMA_CBNDTR(x) (0x54 + 0x40 * (x))
163#define STM32_MDMA_CBNDTR_BRC_MK GENMASK(31, 20)
164#define STM32_MDMA_CBNDTR_BRC(n) STM32_MDMA_SET(n, \
165 STM32_MDMA_CBNDTR_BRC_MK)
166#define STM32_MDMA_CBNDTR_BRC_GET(n) STM32_MDMA_GET((n), \
167 STM32_MDMA_CBNDTR_BRC_MK)
168
169#define STM32_MDMA_CBNDTR_BRDUM BIT(19)
170#define STM32_MDMA_CBNDTR_BRSUM BIT(18)
171#define STM32_MDMA_CBNDTR_BNDT_MASK GENMASK(16, 0)
172#define STM32_MDMA_CBNDTR_BNDT(n) STM32_MDMA_SET(n, \
173 STM32_MDMA_CBNDTR_BNDT_MASK)
174
175/* MDMA Channel x source address register */
176#define STM32_MDMA_CSAR(x) (0x58 + 0x40 * (x))
177
178/* MDMA Channel x destination address register */
179#define STM32_MDMA_CDAR(x) (0x5C + 0x40 * (x))
180
181/* MDMA Channel x block repeat address update register */
182#define STM32_MDMA_CBRUR(x) (0x60 + 0x40 * (x))
183#define STM32_MDMA_CBRUR_DUV_MASK GENMASK(31, 16)
184#define STM32_MDMA_CBRUR_DUV(n) STM32_MDMA_SET(n, \
185 STM32_MDMA_CBRUR_DUV_MASK)
186#define STM32_MDMA_CBRUR_SUV_MASK GENMASK(15, 0)
187#define STM32_MDMA_CBRUR_SUV(n) STM32_MDMA_SET(n, \
188 STM32_MDMA_CBRUR_SUV_MASK)
189
190/* MDMA Channel x link address register */
191#define STM32_MDMA_CLAR(x) (0x64 + 0x40 * (x))
192
193/* MDMA Channel x trigger and bus selection register */
194#define STM32_MDMA_CTBR(x) (0x68 + 0x40 * (x))
195#define STM32_MDMA_CTBR_DBUS BIT(17)
196#define STM32_MDMA_CTBR_SBUS BIT(16)
197#define STM32_MDMA_CTBR_TSEL_MASK GENMASK(7, 0)
198#define STM32_MDMA_CTBR_TSEL(n) STM32_MDMA_SET(n, \
199 STM32_MDMA_CTBR_TSEL_MASK)
200
201/* MDMA Channel x mask address register */
202#define STM32_MDMA_CMAR(x) (0x70 + 0x40 * (x))
203
204/* MDMA Channel x mask data register */
205#define STM32_MDMA_CMDR(x) (0x74 + 0x40 * (x))
206
207#define STM32_MDMA_MAX_BUF_LEN 128
208#define STM32_MDMA_MAX_BLOCK_LEN 65536
209#define STM32_MDMA_MAX_CHANNELS 63
210#define STM32_MDMA_MAX_REQUESTS 256
211#define STM32_MDMA_MAX_BURST 128
212#define STM32_MDMA_VERY_HIGH_PRIORITY 0x11
213
214enum stm32_mdma_trigger_mode {
215 STM32_MDMA_BUFFER,
216 STM32_MDMA_BLOCK,
217 STM32_MDMA_BLOCK_REP,
218 STM32_MDMA_LINKED_LIST,
219};
220
221enum stm32_mdma_width {
222 STM32_MDMA_BYTE,
223 STM32_MDMA_HALF_WORD,
224 STM32_MDMA_WORD,
225 STM32_MDMA_DOUBLE_WORD,
226};
227
228enum stm32_mdma_inc_mode {
229 STM32_MDMA_FIXED = 0,
230 STM32_MDMA_INC = 2,
231 STM32_MDMA_DEC = 3,
232};
233
234struct stm32_mdma_chan_config {
235 u32 request;
236 u32 priority_level;
237 u32 transfer_config;
238 u32 mask_addr;
239 u32 mask_data;
240};
241
242struct stm32_mdma_hwdesc {
243 u32 ctcr;
244 u32 cbndtr;
245 u32 csar;
246 u32 cdar;
247 u32 cbrur;
248 u32 clar;
249 u32 ctbr;
250 u32 dummy;
251 u32 cmar;
252 u32 cmdr;
253} __aligned(64);
254
255struct stm32_mdma_desc {
256 struct virt_dma_desc vdesc;
257 u32 ccr;
258 struct stm32_mdma_hwdesc *hwdesc;
259 dma_addr_t hwdesc_phys;
260 bool cyclic;
261 u32 count;
262};
263
264struct stm32_mdma_chan {
265 struct virt_dma_chan vchan;
266 struct dma_pool *desc_pool;
267 u32 id;
268 struct stm32_mdma_desc *desc;
269 u32 curr_hwdesc;
270 struct dma_slave_config dma_config;
271 struct stm32_mdma_chan_config chan_config;
272 bool busy;
273 u32 mem_burst;
274 u32 mem_width;
275};
276
277struct stm32_mdma_device {
278 struct dma_device ddev;
279 void __iomem *base;
280 struct clk *clk;
281 int irq;
282 struct reset_control *rst;
283 u32 nr_channels;
284 u32 nr_requests;
285 u32 nr_ahb_addr_masks;
286 struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
287 u32 ahb_addr_masks[];
288};
289
290static struct stm32_mdma_device *stm32_mdma_get_dev(
291 struct stm32_mdma_chan *chan)
292{
293 return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
294 ddev);
295}
296
297static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
298{
299 return container_of(c, struct stm32_mdma_chan, vchan.chan);
300}
301
302static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
303{
304 return container_of(vdesc, struct stm32_mdma_desc, vdesc);
305}
306
307static struct device *chan2dev(struct stm32_mdma_chan *chan)
308{
309 return &chan->vchan.chan.dev->device;
310}
311
312static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
313{
314 return mdma_dev->ddev.dev;
315}
316
317static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
318{
319 return readl_relaxed(dmadev->base + reg);
320}
321
322static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
323{
324 writel_relaxed(val, dmadev->base + reg);
325}
326
327static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
328 u32 mask)
329{
330 void __iomem *addr = dmadev->base + reg;
331
332 writel_relaxed(readl_relaxed(addr) | mask, addr);
333}
334
335static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
336 u32 mask)
337{
338 void __iomem *addr = dmadev->base + reg;
339
340 writel_relaxed(readl_relaxed(addr) & ~mask, addr);
341}
342
343static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
344 struct stm32_mdma_chan *chan, u32 count)
345{
346 struct stm32_mdma_desc *desc;
347
348 desc = kzalloc(sizeof(*desc), GFP_NOWAIT);
349 if (!desc)
350 return NULL;
351
352 desc->hwdesc = dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
353 &desc->hwdesc_phys);
354 if (!desc->hwdesc) {
355 dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
356 kfree(desc);
357 return NULL;
358 }
359
360 desc->count = count;
361
362 return desc;
363}
364
365static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
366{
367 struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
368 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
369
370 dma_pool_free(chan->desc_pool, desc->hwdesc, desc->hwdesc_phys);
371 kfree(desc);
372}
373
374static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
375 enum dma_slave_buswidth width)
376{
377 switch (width) {
378 case DMA_SLAVE_BUSWIDTH_1_BYTE:
379 case DMA_SLAVE_BUSWIDTH_2_BYTES:
380 case DMA_SLAVE_BUSWIDTH_4_BYTES:
381 case DMA_SLAVE_BUSWIDTH_8_BYTES:
382 return ffs(width) - 1;
383 default:
384 dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
385 width);
386 return -EINVAL;
387 }
388}
389
390static enum dma_slave_buswidth stm32_mdma_get_max_width(u32 buf_len, u32 tlen)
391{
392 enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
393
394 for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
395 max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
396 max_width >>= 1) {
397 if (((buf_len % max_width) == 0) && (tlen >= max_width))
398 break;
399 }
400
401 return max_width;
402}
403
404static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
405 enum dma_slave_buswidth width)
406{
407 u32 best_burst = max_burst;
408 u32 burst_len = best_burst * width;
409
410 while ((burst_len > 0) && (tlen % burst_len)) {
411 best_burst = best_burst >> 1;
412 burst_len = best_burst * width;
413 }
414
415 return (best_burst > 0) ? best_burst : 1;
416}
417
418static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
419{
420 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
421 u32 ccr, cisr, id, reg;
422 int ret;
423
424 id = chan->id;
425 reg = STM32_MDMA_CCR(id);
426
427 /* Disable interrupts */
428 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
429
430 ccr = stm32_mdma_read(dmadev, reg);
431 if (ccr & STM32_MDMA_CCR_EN) {
432 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
433
434 /* Ensure that any ongoing transfer has been completed */
435 ret = readl_relaxed_poll_timeout_atomic(
436 dmadev->base + STM32_MDMA_CISR(id), cisr,
437 (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
438 if (ret) {
439 dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
440 return -EBUSY;
441 }
442 }
443
444 return 0;
445}
446
447static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
448{
449 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
450 u32 status;
451 int ret;
452
453 /* Disable DMA */
454 ret = stm32_mdma_disable_chan(chan);
455 if (ret < 0)
456 return;
457
458 /* Clear interrupt status if it is there */
459 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
460 if (status) {
461 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
462 __func__, status);
463 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
464 }
465
466 chan->busy = false;
467}
468
469static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
470 u32 ctbr_mask, u32 src_addr)
471{
472 u32 mask;
473 int i;
474
475 /* Check if memory device is on AHB or AXI */
476 *ctbr &= ~ctbr_mask;
477 mask = src_addr & 0xF0000000;
478 for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
479 if (mask == dmadev->ahb_addr_masks[i]) {
480 *ctbr |= ctbr_mask;
481 break;
482 }
483 }
484}
485
486static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
487 enum dma_transfer_direction direction,
488 u32 *mdma_ccr, u32 *mdma_ctcr,
489 u32 *mdma_ctbr, u32 buf_len)
490{
491 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
492 struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
493 enum dma_slave_buswidth src_addr_width, dst_addr_width;
494 phys_addr_t src_addr, dst_addr;
495 int src_bus_width, dst_bus_width;
496 u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
497 u32 ccr, ctcr, ctbr, tlen;
498
499 src_addr_width = chan->dma_config.src_addr_width;
500 dst_addr_width = chan->dma_config.dst_addr_width;
501 src_maxburst = chan->dma_config.src_maxburst;
502 dst_maxburst = chan->dma_config.dst_maxburst;
503
504 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
505 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
506 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
507
508 /* Enable HW request mode */
509 ctcr &= ~STM32_MDMA_CTCR_SWRM;
510
511 /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
512 ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
513 ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
514
515 /*
516 * For buffer transfer length (TLEN) we have to set
517 * the number of bytes - 1 in CTCR register
518 */
519 tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
520 ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
521 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
522
523 /* Check burst size constraints */
524 if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
525 dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
526 dev_err(chan2dev(chan),
527 "burst size * bus width higher than %d bytes\n",
528 STM32_MDMA_MAX_BURST);
529 return -EINVAL;
530 }
531
532 if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
533 (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
534 dev_err(chan2dev(chan), "burst size must be a power of 2\n");
535 return -EINVAL;
536 }
537
538 /*
539 * Configure channel control:
540 * - Clear SW request as in this case this is a HW one
541 * - Clear WEX, HEX and BEX bits
542 * - Set priority level
543 */
544 ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
545 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
546 ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
547
548 /* Configure Trigger selection */
549 ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
550 ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
551
552 switch (direction) {
553 case DMA_MEM_TO_DEV:
554 /* Set device data size */
555 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
556 if (dst_bus_width < 0)
557 return dst_bus_width;
558 ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
559 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
560
561 /* Set device burst value */
562 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
563 dst_maxburst,
564 dst_addr_width);
565 chan->mem_burst = dst_best_burst;
566 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
567 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
568
569 /* Set memory data size */
570 src_addr_width = stm32_mdma_get_max_width(buf_len, tlen);
571 chan->mem_width = src_addr_width;
572 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
573 if (src_bus_width < 0)
574 return src_bus_width;
575 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
576 STM32_MDMA_CTCR_SINCOS_MASK;
577 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
578 STM32_MDMA_CTCR_SINCOS(src_bus_width);
579
580 /* Set memory burst value */
581 src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
582 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
583 src_maxburst,
584 src_addr_width);
585 chan->mem_burst = src_best_burst;
586 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
587 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
588
589 /* Select bus */
590 dst_addr = chan->dma_config.dst_addr;
591 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
592 dst_addr);
593
594 /* Set destination address */
595 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
596 break;
597
598 case DMA_DEV_TO_MEM:
599 /* Set device data size */
600 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
601 if (src_bus_width < 0)
602 return src_bus_width;
603 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
604 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
605
606 /* Set device burst value */
607 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
608 src_maxburst,
609 src_addr_width);
610 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
611 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
612
613 /* Set memory data size */
614 dst_addr_width = stm32_mdma_get_max_width(buf_len, tlen);
615 chan->mem_width = dst_addr_width;
616 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
617 if (dst_bus_width < 0)
618 return dst_bus_width;
619 ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
620 STM32_MDMA_CTCR_DINCOS_MASK);
621 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
622 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
623
624 /* Set memory burst value */
625 dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
626 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
627 dst_maxburst,
628 dst_addr_width);
629 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
630 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
631
632 /* Select bus */
633 src_addr = chan->dma_config.src_addr;
634 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
635 src_addr);
636
637 /* Set source address */
638 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
639 break;
640
641 default:
642 dev_err(chan2dev(chan), "Dma direction is not supported\n");
643 return -EINVAL;
644 }
645
646 *mdma_ccr = ccr;
647 *mdma_ctcr = ctcr;
648 *mdma_ctbr = ctbr;
649
650 return 0;
651}
652
653static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
654 struct stm32_mdma_hwdesc *hwdesc)
655{
656 dev_dbg(chan2dev(chan), "hwdesc: 0x%08x\n", (unsigned int)hwdesc);
657 dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", hwdesc->ctcr);
658 dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", hwdesc->cbndtr);
659 dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", hwdesc->csar);
660 dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", hwdesc->cdar);
661 dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", hwdesc->cbrur);
662 dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", hwdesc->clar);
663 dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", hwdesc->ctbr);
664 dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", hwdesc->cmar);
665 dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", hwdesc->cmdr);
666}
667
668static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
669 struct stm32_mdma_desc *desc,
670 enum dma_transfer_direction dir, u32 count,
671 dma_addr_t src_addr, dma_addr_t dst_addr,
672 u32 len, u32 ctcr, u32 ctbr, bool is_last,
673 bool is_first, bool is_cyclic)
674{
675 struct stm32_mdma_chan_config *config = &chan->chan_config;
676 struct stm32_mdma_hwdesc *hwdesc;
677 u32 next = count + 1;
678
679 hwdesc = &desc->hwdesc[count];
680 hwdesc->ctcr = ctcr;
681 hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
682 STM32_MDMA_CBNDTR_BRDUM |
683 STM32_MDMA_CBNDTR_BRSUM |
684 STM32_MDMA_CBNDTR_BNDT_MASK);
685 hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
686 hwdesc->csar = src_addr;
687 hwdesc->cdar = dst_addr;
688 hwdesc->cbrur = 0;
689 hwdesc->clar = desc->hwdesc_phys + next * sizeof(*hwdesc);
690 hwdesc->ctbr = ctbr;
691 hwdesc->cmar = config->mask_addr;
692 hwdesc->cmdr = config->mask_data;
693
694 if (is_last) {
695 if (is_cyclic)
696 hwdesc->clar = desc->hwdesc_phys;
697 else
698 hwdesc->clar = 0;
699 }
700
701 stm32_mdma_dump_hwdesc(chan, hwdesc);
702}
703
704static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
705 struct stm32_mdma_desc *desc,
706 struct scatterlist *sgl, u32 sg_len,
707 enum dma_transfer_direction direction)
708{
709 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
710 struct dma_slave_config *dma_config = &chan->dma_config;
711 struct scatterlist *sg;
712 dma_addr_t src_addr, dst_addr;
713 u32 ccr, ctcr, ctbr;
714 int i, ret = 0;
715
716 for_each_sg(sgl, sg, sg_len, i) {
717 if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
718 dev_err(chan2dev(chan), "Invalid block len\n");
719 return -EINVAL;
720 }
721
722 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
723 &ctbr, sg_dma_len(sg));
724 if (ret < 0)
725 return ret;
726
727 if (direction == DMA_MEM_TO_DEV) {
728 src_addr = sg_dma_address(sg);
729 dst_addr = dma_config->dst_addr;
730 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
731 src_addr);
732 } else {
733 src_addr = dma_config->src_addr;
734 dst_addr = sg_dma_address(sg);
735 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
736 dst_addr);
737 }
738
739 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
740 dst_addr, sg_dma_len(sg), ctcr, ctbr,
741 i == sg_len - 1, i == 0, false);
742 }
743
744 /* Enable interrupts */
745 ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
746 ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
747 if (sg_len > 1)
748 ccr |= STM32_MDMA_CCR_BTIE;
749 desc->ccr = ccr;
750
751 return 0;
752}
753
754static struct dma_async_tx_descriptor *
755stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
756 u32 sg_len, enum dma_transfer_direction direction,
757 unsigned long flags, void *context)
758{
759 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
760 struct stm32_mdma_desc *desc;
761 int ret;
762
763 /*
764 * Once DMA is in setup cyclic mode the channel we cannot assign this
765 * channel anymore. The DMA channel needs to be aborted or terminated
766 * for allowing another request.
767 */
768 if (chan->desc && chan->desc->cyclic) {
769 dev_err(chan2dev(chan),
770 "Request not allowed when dma in cyclic mode\n");
771 return NULL;
772 }
773
774 desc = stm32_mdma_alloc_desc(chan, sg_len);
775 if (!desc)
776 return NULL;
777
778 ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
779 if (ret < 0)
780 goto xfer_setup_err;
781
782 desc->cyclic = false;
783
784 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
785
786xfer_setup_err:
787 dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys);
788 kfree(desc);
789 return NULL;
790}
791
792static struct dma_async_tx_descriptor *
793stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
794 size_t buf_len, size_t period_len,
795 enum dma_transfer_direction direction,
796 unsigned long flags)
797{
798 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
799 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
800 struct dma_slave_config *dma_config = &chan->dma_config;
801 struct stm32_mdma_desc *desc;
802 dma_addr_t src_addr, dst_addr;
803 u32 ccr, ctcr, ctbr, count;
804 int i, ret;
805
806 /*
807 * Once DMA is in setup cyclic mode the channel we cannot assign this
808 * channel anymore. The DMA channel needs to be aborted or terminated
809 * for allowing another request.
810 */
811 if (chan->desc && chan->desc->cyclic) {
812 dev_err(chan2dev(chan),
813 "Request not allowed when dma in cyclic mode\n");
814 return NULL;
815 }
816
817 if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
818 dev_err(chan2dev(chan), "Invalid buffer/period len\n");
819 return NULL;
820 }
821
822 if (buf_len % period_len) {
823 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
824 return NULL;
825 }
826
827 count = buf_len / period_len;
828
829 desc = stm32_mdma_alloc_desc(chan, count);
830 if (!desc)
831 return NULL;
832
833 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr, &ctbr,
834 period_len);
835 if (ret < 0)
836 goto xfer_setup_err;
837
838 /* Enable interrupts */
839 ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
840 ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
841 desc->ccr = ccr;
842
843 /* Select bus */
844 if (direction == DMA_MEM_TO_DEV) {
845 src_addr = buf_addr;
846 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
847 src_addr);
848 } else {
849 dst_addr = buf_addr;
850 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
851 dst_addr);
852 }
853
854 /* Configure hwdesc list */
855 for (i = 0; i < count; i++) {
856 if (direction == DMA_MEM_TO_DEV) {
857 src_addr = buf_addr + i * period_len;
858 dst_addr = dma_config->dst_addr;
859 } else {
860 src_addr = dma_config->src_addr;
861 dst_addr = buf_addr + i * period_len;
862 }
863
864 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
865 dst_addr, period_len, ctcr, ctbr,
866 i == count - 1, i == 0, true);
867 }
868
869 desc->cyclic = true;
870
871 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
872
873xfer_setup_err:
874 dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys);
875 kfree(desc);
876 return NULL;
877}
878
879static struct dma_async_tx_descriptor *
880stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
881 size_t len, unsigned long flags)
882{
883 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
884 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
885 enum dma_slave_buswidth max_width;
886 struct stm32_mdma_desc *desc;
887 struct stm32_mdma_hwdesc *hwdesc;
888 u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
889 u32 best_burst, tlen;
890 size_t xfer_count, offset;
891 int src_bus_width, dst_bus_width;
892 int i;
893
894 /*
895 * Once DMA is in setup cyclic mode the channel we cannot assign this
896 * channel anymore. The DMA channel needs to be aborted or terminated
897 * to allow another request
898 */
899 if (chan->desc && chan->desc->cyclic) {
900 dev_err(chan2dev(chan),
901 "Request not allowed when dma in cyclic mode\n");
902 return NULL;
903 }
904
905 count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
906 desc = stm32_mdma_alloc_desc(chan, count);
907 if (!desc)
908 return NULL;
909
910 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
911 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
912 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
913 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
914
915 /* Enable sw req, some interrupts and clear other bits */
916 ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
917 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
918 STM32_MDMA_CCR_IRQ_MASK);
919 ccr |= STM32_MDMA_CCR_TEIE;
920
921 /* Enable SW request mode, dest/src inc and clear other bits */
922 ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
923 STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
924 STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
925 STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
926 STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
927 STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
928 STM32_MDMA_CTCR_SINC_MASK);
929 ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
930 STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
931
932 /* Reset HW request */
933 ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
934
935 /* Select bus */
936 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
937 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
938
939 /* Clear CBNDTR registers */
940 cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
941 STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
942
943 if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
944 cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
945 if (len <= STM32_MDMA_MAX_BUF_LEN) {
946 /* Setup a buffer transfer */
947 ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
948 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
949 } else {
950 /* Setup a block transfer */
951 ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
952 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
953 }
954
955 tlen = STM32_MDMA_MAX_BUF_LEN;
956 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
957
958 /* Set source best burst size */
959 max_width = stm32_mdma_get_max_width(len, tlen);
960 if (src % max_width)
961 max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
962 src_bus_width = stm32_mdma_get_width(chan, max_width);
963
964 max_burst = tlen / max_width;
965 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
966 max_width);
967 mdma_burst = ilog2(best_burst);
968
969 ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
970 STM32_MDMA_CTCR_SSIZE(src_bus_width) |
971 STM32_MDMA_CTCR_SINCOS(src_bus_width);
972
973 /* Set destination best burst size */
974 max_width = stm32_mdma_get_max_width(len, tlen);
975 if (dest % max_width)
976 max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
977 dst_bus_width = stm32_mdma_get_width(chan, max_width);
978
979 max_burst = tlen / max_width;
980 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
981 max_width);
982 mdma_burst = ilog2(best_burst);
983
984 ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
985 STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
986 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
987
988 if (dst_bus_width != src_bus_width)
989 ctcr |= STM32_MDMA_CTCR_PKE;
990
991 /* Prepare hardware descriptor */
992 hwdesc = desc->hwdesc;
993 hwdesc->ctcr = ctcr;
994 hwdesc->cbndtr = cbndtr;
995 hwdesc->csar = src;
996 hwdesc->cdar = dest;
997 hwdesc->cbrur = 0;
998 hwdesc->clar = 0;
999 hwdesc->ctbr = ctbr;
1000 hwdesc->cmar = 0;
1001 hwdesc->cmdr = 0;
1002
1003 stm32_mdma_dump_hwdesc(chan, hwdesc);
1004 } else {
1005 /* Setup a LLI transfer */
1006 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1007 STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1008 ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1009 tlen = STM32_MDMA_MAX_BUF_LEN;
1010
1011 for (i = 0, offset = 0; offset < len;
1012 i++, offset += xfer_count) {
1013 xfer_count = min_t(size_t, len - offset,
1014 STM32_MDMA_MAX_BLOCK_LEN);
1015
1016 /* Set source best burst size */
1017 max_width = stm32_mdma_get_max_width(len, tlen);
1018 if (src % max_width)
1019 max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
1020 src_bus_width = stm32_mdma_get_width(chan, max_width);
1021
1022 max_burst = tlen / max_width;
1023 best_burst = stm32_mdma_get_best_burst(len, tlen,
1024 max_burst,
1025 max_width);
1026 mdma_burst = ilog2(best_burst);
1027
1028 ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1029 STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1030 STM32_MDMA_CTCR_SINCOS(src_bus_width);
1031
1032 /* Set destination best burst size */
1033 max_width = stm32_mdma_get_max_width(len, tlen);
1034 if (dest % max_width)
1035 max_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
1036 dst_bus_width = stm32_mdma_get_width(chan, max_width);
1037
1038 max_burst = tlen / max_width;
1039 best_burst = stm32_mdma_get_best_burst(len, tlen,
1040 max_burst,
1041 max_width);
1042 mdma_burst = ilog2(best_burst);
1043
1044 ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1045 STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1046 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1047
1048 if (dst_bus_width != src_bus_width)
1049 ctcr |= STM32_MDMA_CTCR_PKE;
1050
1051 /* Prepare hardware descriptor */
1052 stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1053 src + offset, dest + offset,
1054 xfer_count, ctcr, ctbr,
1055 i == count - 1, i == 0, false);
1056 }
1057 }
1058
1059 desc->ccr = ccr;
1060
1061 desc->cyclic = false;
1062
1063 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1064}
1065
1066static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1067{
1068 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1069
1070 dev_dbg(chan2dev(chan), "CCR: 0x%08x\n",
1071 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1072 dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n",
1073 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1074 dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n",
1075 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1076 dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n",
1077 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1078 dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n",
1079 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1080 dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n",
1081 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1082 dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n",
1083 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1084 dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n",
1085 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1086 dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n",
1087 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1088 dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n",
1089 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1090}
1091
1092static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1093{
1094 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1095 struct virt_dma_desc *vdesc;
1096 struct stm32_mdma_hwdesc *hwdesc;
1097 u32 id = chan->id;
1098 u32 status, reg;
1099
1100 vdesc = vchan_next_desc(&chan->vchan);
1101 if (!vdesc) {
1102 chan->desc = NULL;
1103 return;
1104 }
1105
1106 chan->desc = to_stm32_mdma_desc(vdesc);
1107 hwdesc = chan->desc->hwdesc;
1108 chan->curr_hwdesc = 0;
1109
1110 stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1111 stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1112 stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1113 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1114 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1115 stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1116 stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1117 stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1118 stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1119 stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1120
1121 /* Clear interrupt status if it is there */
1122 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1123 if (status)
1124 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1125
1126 stm32_mdma_dump_reg(chan);
1127
1128 /* Start DMA */
1129 stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1130
1131 /* Set SW request in case of MEM2MEM transfer */
1132 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1133 reg = STM32_MDMA_CCR(id);
1134 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1135 }
1136
1137 chan->busy = true;
1138
1139 dev_dbg(chan2dev(chan), "vchan %p: started\n", &chan->vchan);
1140}
1141
1142static void stm32_mdma_issue_pending(struct dma_chan *c)
1143{
1144 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1145 unsigned long flags;
1146
1147 spin_lock_irqsave(&chan->vchan.lock, flags);
1148
1149 if (!vchan_issue_pending(&chan->vchan))
1150 goto end;
1151
1152 dev_dbg(chan2dev(chan), "vchan %p: issued\n", &chan->vchan);
1153
1154 if (!chan->desc && !chan->busy)
1155 stm32_mdma_start_transfer(chan);
1156
1157end:
1158 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1159}
1160
1161static int stm32_mdma_pause(struct dma_chan *c)
1162{
1163 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1164 unsigned long flags;
1165 int ret;
1166
1167 spin_lock_irqsave(&chan->vchan.lock, flags);
1168 ret = stm32_mdma_disable_chan(chan);
1169 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1170
1171 if (!ret)
1172 dev_dbg(chan2dev(chan), "vchan %p: pause\n", &chan->vchan);
1173
1174 return ret;
1175}
1176
1177static int stm32_mdma_resume(struct dma_chan *c)
1178{
1179 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1180 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1181 struct stm32_mdma_hwdesc *hwdesc;
1182 unsigned long flags;
1183 u32 status, reg;
1184
1185 hwdesc = &chan->desc->hwdesc[chan->curr_hwdesc];
1186
1187 spin_lock_irqsave(&chan->vchan.lock, flags);
1188
1189 /* Re-configure control register */
1190 stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1191
1192 /* Clear interrupt status if it is there */
1193 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1194 if (status)
1195 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1196
1197 stm32_mdma_dump_reg(chan);
1198
1199 /* Re-start DMA */
1200 reg = STM32_MDMA_CCR(chan->id);
1201 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1202
1203 /* Set SW request in case of MEM2MEM transfer */
1204 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1205 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1206
1207 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1208
1209 dev_dbg(chan2dev(chan), "vchan %p: resume\n", &chan->vchan);
1210
1211 return 0;
1212}
1213
1214static int stm32_mdma_terminate_all(struct dma_chan *c)
1215{
1216 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1217 unsigned long flags;
1218 LIST_HEAD(head);
1219
1220 spin_lock_irqsave(&chan->vchan.lock, flags);
1221 if (chan->busy) {
1222 stm32_mdma_stop(chan);
1223 chan->desc = NULL;
1224 }
1225 vchan_get_all_descriptors(&chan->vchan, &head);
1226 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1227
1228 vchan_dma_desc_free_list(&chan->vchan, &head);
1229
1230 return 0;
1231}
1232
1233static void stm32_mdma_synchronize(struct dma_chan *c)
1234{
1235 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1236
1237 vchan_synchronize(&chan->vchan);
1238}
1239
1240static int stm32_mdma_slave_config(struct dma_chan *c,
1241 struct dma_slave_config *config)
1242{
1243 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1244
1245 memcpy(&chan->dma_config, config, sizeof(*config));
1246
1247 return 0;
1248}
1249
1250static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1251 struct stm32_mdma_desc *desc,
1252 u32 curr_hwdesc)
1253{
1254 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1255 struct stm32_mdma_hwdesc *hwdesc = desc->hwdesc;
1256 u32 cbndtr, residue, modulo, burst_size;
1257 int i;
1258
1259 residue = 0;
1260 for (i = curr_hwdesc + 1; i < desc->count; i++) {
1261 hwdesc = &desc->hwdesc[i];
1262 residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1263 }
1264 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1265 residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1266
1267 if (!chan->mem_burst)
1268 return residue;
1269
1270 burst_size = chan->mem_burst * chan->mem_width;
1271 modulo = residue % burst_size;
1272 if (modulo)
1273 residue = residue - modulo + burst_size;
1274
1275 return residue;
1276}
1277
1278static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1279 dma_cookie_t cookie,
1280 struct dma_tx_state *state)
1281{
1282 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1283 struct virt_dma_desc *vdesc;
1284 enum dma_status status;
1285 unsigned long flags;
1286 u32 residue = 0;
1287
1288 status = dma_cookie_status(c, cookie, state);
1289 if ((status == DMA_COMPLETE) || (!state))
1290 return status;
1291
1292 spin_lock_irqsave(&chan->vchan.lock, flags);
1293
1294 vdesc = vchan_find_desc(&chan->vchan, cookie);
1295 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1296 residue = stm32_mdma_desc_residue(chan, chan->desc,
1297 chan->curr_hwdesc);
1298 else if (vdesc)
1299 residue = stm32_mdma_desc_residue(chan,
1300 to_stm32_mdma_desc(vdesc), 0);
1301 dma_set_residue(state, residue);
1302
1303 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1304
1305 return status;
1306}
1307
1308static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1309{
1310 list_del(&chan->desc->vdesc.node);
1311 vchan_cookie_complete(&chan->desc->vdesc);
1312 chan->desc = NULL;
1313 chan->busy = false;
1314
1315 /* Start the next transfer if this driver has a next desc */
1316 stm32_mdma_start_transfer(chan);
1317}
1318
1319static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1320{
1321 struct stm32_mdma_device *dmadev = devid;
1322 struct stm32_mdma_chan *chan = devid;
1323 u32 reg, id, ien, status, flag;
1324
1325 /* Find out which channel generates the interrupt */
1326 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1327 if (status) {
1328 id = __ffs(status);
1329 } else {
1330 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1);
1331 if (!status) {
1332 dev_dbg(mdma2dev(dmadev), "spurious it\n");
1333 return IRQ_NONE;
1334 }
1335 id = __ffs(status);
1336 /*
1337 * As GISR0 provides status for channel id from 0 to 31,
1338 * so GISR1 provides status for channel id from 32 to 62
1339 */
1340 id += 32;
1341 }
1342
1343 chan = &dmadev->chan[id];
1344 if (!chan) {
1345 dev_err(chan2dev(chan), "MDMA channel not initialized\n");
1346 goto exit;
1347 }
1348
1349 /* Handle interrupt for the channel */
1350 spin_lock(&chan->vchan.lock);
1351 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1352 ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
1353 ien &= STM32_MDMA_CCR_IRQ_MASK;
1354 ien >>= 1;
1355
1356 if (!(status & ien)) {
1357 spin_unlock(&chan->vchan.lock);
1358 dev_dbg(chan2dev(chan),
1359 "spurious it (status=0x%04x, ien=0x%04x)\n",
1360 status, ien);
1361 return IRQ_NONE;
1362 }
1363
1364 flag = __ffs(status & ien);
1365 reg = STM32_MDMA_CIFCR(chan->id);
1366
1367 switch (1 << flag) {
1368 case STM32_MDMA_CISR_TEIF:
1369 id = chan->id;
1370 status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id));
1371 dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", status);
1372 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1373 break;
1374
1375 case STM32_MDMA_CISR_CTCIF:
1376 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1377 stm32_mdma_xfer_end(chan);
1378 break;
1379
1380 case STM32_MDMA_CISR_BRTIF:
1381 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1382 break;
1383
1384 case STM32_MDMA_CISR_BTIF:
1385 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1386 chan->curr_hwdesc++;
1387 if (chan->desc && chan->desc->cyclic) {
1388 if (chan->curr_hwdesc == chan->desc->count)
1389 chan->curr_hwdesc = 0;
1390 vchan_cyclic_callback(&chan->desc->vdesc);
1391 }
1392 break;
1393
1394 case STM32_MDMA_CISR_TCIF:
1395 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1396 break;
1397
1398 default:
1399 dev_err(chan2dev(chan), "it %d unhandled (status=0x%04x)\n",
1400 1 << flag, status);
1401 }
1402
1403 spin_unlock(&chan->vchan.lock);
1404
1405exit:
1406 return IRQ_HANDLED;
1407}
1408
1409static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1410{
1411 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1412 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1413 int ret;
1414
1415 chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1416 c->device->dev,
1417 sizeof(struct stm32_mdma_hwdesc),
1418 __alignof__(struct stm32_mdma_hwdesc),
1419 0);
1420 if (!chan->desc_pool) {
1421 dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1422 return -ENOMEM;
1423 }
1424
1425 ret = clk_prepare_enable(dmadev->clk);
1426 if (ret < 0) {
1427 dev_err(chan2dev(chan), "clk_prepare_enable failed: %d\n", ret);
1428 return ret;
1429 }
1430
1431 ret = stm32_mdma_disable_chan(chan);
1432 if (ret < 0)
1433 clk_disable_unprepare(dmadev->clk);
1434
1435 return ret;
1436}
1437
1438static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1439{
1440 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1441 struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1442 unsigned long flags;
1443
1444 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1445
1446 if (chan->busy) {
1447 spin_lock_irqsave(&chan->vchan.lock, flags);
1448 stm32_mdma_stop(chan);
1449 chan->desc = NULL;
1450 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1451 }
1452
1453 clk_disable_unprepare(dmadev->clk);
1454 vchan_free_chan_resources(to_virt_chan(c));
1455 dmam_pool_destroy(chan->desc_pool);
1456 chan->desc_pool = NULL;
1457}
1458
1459static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1460 struct of_dma *ofdma)
1461{
1462 struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1463 struct stm32_mdma_chan *chan;
1464 struct dma_chan *c;
1465 struct stm32_mdma_chan_config config;
1466
1467 if (dma_spec->args_count < 5) {
1468 dev_err(mdma2dev(dmadev), "Bad number of args\n");
1469 return NULL;
1470 }
1471
1472 config.request = dma_spec->args[0];
1473 config.priority_level = dma_spec->args[1];
1474 config.transfer_config = dma_spec->args[2];
1475 config.mask_addr = dma_spec->args[3];
1476 config.mask_data = dma_spec->args[4];
1477
1478 if (config.request >= dmadev->nr_requests) {
1479 dev_err(mdma2dev(dmadev), "Bad request line\n");
1480 return NULL;
1481 }
1482
1483 if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1484 dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1485 return NULL;
1486 }
1487
1488 c = dma_get_any_slave_channel(&dmadev->ddev);
1489 if (!c) {
1490 dev_err(mdma2dev(dmadev), "No more channel avalaible\n");
1491 return NULL;
1492 }
1493
1494 chan = to_stm32_mdma_chan(c);
1495 chan->chan_config = config;
1496
1497 return c;
1498}
1499
1500static const struct of_device_id stm32_mdma_of_match[] = {
1501 { .compatible = "st,stm32h7-mdma", },
1502 { /* sentinel */ },
1503};
1504MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1505
1506static int stm32_mdma_probe(struct platform_device *pdev)
1507{
1508 struct stm32_mdma_chan *chan;
1509 struct stm32_mdma_device *dmadev;
1510 struct dma_device *dd;
1511 struct device_node *of_node;
1512 struct resource *res;
1513 u32 nr_channels, nr_requests;
1514 int i, count, ret;
1515
1516 of_node = pdev->dev.of_node;
1517 if (!of_node)
1518 return -ENODEV;
1519
1520 ret = device_property_read_u32(&pdev->dev, "dma-channels",
1521 &nr_channels);
1522 if (ret) {
1523 nr_channels = STM32_MDMA_MAX_CHANNELS;
1524 dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1525 nr_channels);
1526 }
1527
1528 ret = device_property_read_u32(&pdev->dev, "dma-requests",
1529 &nr_requests);
1530 if (ret) {
1531 nr_requests = STM32_MDMA_MAX_REQUESTS;
1532 dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1533 nr_requests);
1534 }
1535
1536 count = device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1537 NULL, 0);
1538 if (count < 0)
1539 count = 0;
1540
1541 dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1542 GFP_KERNEL);
1543 if (!dmadev)
1544 return -ENOMEM;
1545
1546 dmadev->nr_channels = nr_channels;
1547 dmadev->nr_requests = nr_requests;
1548 device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1549 dmadev->ahb_addr_masks,
1550 count);
1551 dmadev->nr_ahb_addr_masks = count;
1552
1553 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1554 dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1555 if (IS_ERR(dmadev->base))
1556 return PTR_ERR(dmadev->base);
1557
1558 dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1559 if (IS_ERR(dmadev->clk)) {
1560 ret = PTR_ERR(dmadev->clk);
1561 if (ret == -EPROBE_DEFER)
1562 dev_info(&pdev->dev, "Missing controller clock\n");
1563 return ret;
1564 }
1565
1566 dmadev->rst = devm_reset_control_get(&pdev->dev, NULL);
1567 if (!IS_ERR(dmadev->rst)) {
1568 reset_control_assert(dmadev->rst);
1569 udelay(2);
1570 reset_control_deassert(dmadev->rst);
1571 }
1572
1573 dd = &dmadev->ddev;
1574 dma_cap_set(DMA_SLAVE, dd->cap_mask);
1575 dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1576 dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1577 dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1578 dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1579 dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1580 dd->device_tx_status = stm32_mdma_tx_status;
1581 dd->device_issue_pending = stm32_mdma_issue_pending;
1582 dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1583 dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1584 dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1585 dd->device_config = stm32_mdma_slave_config;
1586 dd->device_pause = stm32_mdma_pause;
1587 dd->device_resume = stm32_mdma_resume;
1588 dd->device_terminate_all = stm32_mdma_terminate_all;
1589 dd->device_synchronize = stm32_mdma_synchronize;
1590 dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1591 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1592 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1593 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1594 dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1595 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1596 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1597 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1598 dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1599 BIT(DMA_MEM_TO_MEM);
1600 dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1601 dd->max_burst = STM32_MDMA_MAX_BURST;
1602 dd->dev = &pdev->dev;
1603 INIT_LIST_HEAD(&dd->channels);
1604
1605 for (i = 0; i < dmadev->nr_channels; i++) {
1606 chan = &dmadev->chan[i];
1607 chan->id = i;
1608 chan->vchan.desc_free = stm32_mdma_desc_free;
1609 vchan_init(&chan->vchan, dd);
1610 }
1611
1612 dmadev->irq = platform_get_irq(pdev, 0);
1613 if (dmadev->irq < 0) {
1614 dev_err(&pdev->dev, "failed to get IRQ\n");
1615 return dmadev->irq;
1616 }
1617
1618 ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1619 0, dev_name(&pdev->dev), dmadev);
1620 if (ret) {
1621 dev_err(&pdev->dev, "failed to request IRQ\n");
1622 return ret;
1623 }
1624
1625 ret = dma_async_device_register(dd);
1626 if (ret)
1627 return ret;
1628
1629 ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1630 if (ret < 0) {
1631 dev_err(&pdev->dev,
1632 "STM32 MDMA DMA OF registration failed %d\n", ret);
1633 goto err_unregister;
1634 }
1635
1636 platform_set_drvdata(pdev, dmadev);
1637
1638 dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1639
1640 return 0;
1641
1642err_unregister:
1643 dma_async_device_unregister(dd);
1644
1645 return ret;
1646}
1647
1648static struct platform_driver stm32_mdma_driver = {
1649 .probe = stm32_mdma_probe,
1650 .driver = {
1651 .name = "stm32-mdma",
1652 .of_match_table = stm32_mdma_of_match,
1653 },
1654};
1655
1656static int __init stm32_mdma_init(void)
1657{
1658 return platform_driver_register(&stm32_mdma_driver);
1659}
1660
1661subsys_initcall(stm32_mdma_init);
1662
1663MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1664MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1665MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1666MODULE_LICENSE("GPL v2");