Merge tag 'backlight-next-4.15' of git://git.kernel.org/pub/scm/linux/kernel/git...
[linux-2.6-microblaze.git] / drivers / dma / stm32-mdma.c
1 /*
2  *
3  * Copyright (C) STMicroelectronics SA 2017
4  * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
5  *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
6  *
7  * License terms: GPL V2.0.
8  *
9  * This program is free software; you can redistribute it and/or modify it
10  * under the terms of the GNU General Public License version 2 as published by
11  * the Free Software Foundation.
12  *
13  * This program is distributed in the hope that it will be useful, but
14  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15  * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
16  * details.
17  *
18  * Driver for STM32 MDMA controller
19  *
20  * Inspired by stm32-dma.c and dma-jz4780.c
21  *
22  */
23
24 #include <linux/clk.h>
25 #include <linux/delay.h>
26 #include <linux/dmaengine.h>
27 #include <linux/dma-mapping.h>
28 #include <linux/dmapool.h>
29 #include <linux/err.h>
30 #include <linux/init.h>
31 #include <linux/iopoll.h>
32 #include <linux/jiffies.h>
33 #include <linux/list.h>
34 #include <linux/log2.h>
35 #include <linux/module.h>
36 #include <linux/of.h>
37 #include <linux/of_device.h>
38 #include <linux/of_dma.h>
39 #include <linux/platform_device.h>
40 #include <linux/reset.h>
41 #include <linux/slab.h>
42
43 #include "virt-dma.h"
44
45 /*  MDMA Generic getter/setter */
46 #define STM32_MDMA_SHIFT(n)             (ffs(n) - 1)
47 #define STM32_MDMA_SET(n, mask)         (((n) << STM32_MDMA_SHIFT(mask)) & \
48                                          (mask))
49 #define STM32_MDMA_GET(n, mask)         (((n) & (mask)) >> \
50                                          STM32_MDMA_SHIFT(mask))
51
52 #define STM32_MDMA_GISR0                0x0000 /* MDMA Int Status Reg 1 */
53 #define STM32_MDMA_GISR1                0x0004 /* MDMA Int Status Reg 2 */
54
55 /* MDMA Channel x interrupt/status register */
56 #define STM32_MDMA_CISR(x)              (0x40 + 0x40 * (x)) /* x = 0..62 */
57 #define STM32_MDMA_CISR_CRQA            BIT(16)
58 #define STM32_MDMA_CISR_TCIF            BIT(4)
59 #define STM32_MDMA_CISR_BTIF            BIT(3)
60 #define STM32_MDMA_CISR_BRTIF           BIT(2)
61 #define STM32_MDMA_CISR_CTCIF           BIT(1)
62 #define STM32_MDMA_CISR_TEIF            BIT(0)
63
64 /* MDMA Channel x interrupt flag clear register */
65 #define STM32_MDMA_CIFCR(x)             (0x44 + 0x40 * (x))
66 #define STM32_MDMA_CIFCR_CLTCIF         BIT(4)
67 #define STM32_MDMA_CIFCR_CBTIF          BIT(3)
68 #define STM32_MDMA_CIFCR_CBRTIF         BIT(2)
69 #define STM32_MDMA_CIFCR_CCTCIF         BIT(1)
70 #define STM32_MDMA_CIFCR_CTEIF          BIT(0)
71 #define STM32_MDMA_CIFCR_CLEAR_ALL      (STM32_MDMA_CIFCR_CLTCIF \
72                                         | STM32_MDMA_CIFCR_CBTIF \
73                                         | STM32_MDMA_CIFCR_CBRTIF \
74                                         | STM32_MDMA_CIFCR_CCTCIF \
75                                         | STM32_MDMA_CIFCR_CTEIF)
76
77 /* MDMA Channel x error status register */
78 #define STM32_MDMA_CESR(x)              (0x48 + 0x40 * (x))
79 #define STM32_MDMA_CESR_BSE             BIT(11)
80 #define STM32_MDMA_CESR_ASR             BIT(10)
81 #define STM32_MDMA_CESR_TEMD            BIT(9)
82 #define STM32_MDMA_CESR_TELD            BIT(8)
83 #define STM32_MDMA_CESR_TED             BIT(7)
84 #define STM32_MDMA_CESR_TEA_MASK        GENMASK(6, 0)
85
86 /* MDMA Channel x control register */
87 #define STM32_MDMA_CCR(x)               (0x4C + 0x40 * (x))
88 #define STM32_MDMA_CCR_SWRQ             BIT(16)
89 #define STM32_MDMA_CCR_WEX              BIT(14)
90 #define STM32_MDMA_CCR_HEX              BIT(13)
91 #define STM32_MDMA_CCR_BEX              BIT(12)
92 #define STM32_MDMA_CCR_PL_MASK          GENMASK(7, 6)
93 #define STM32_MDMA_CCR_PL(n)            STM32_MDMA_SET(n, \
94                                                        STM32_MDMA_CCR_PL_MASK)
95 #define STM32_MDMA_CCR_TCIE             BIT(5)
96 #define STM32_MDMA_CCR_BTIE             BIT(4)
97 #define STM32_MDMA_CCR_BRTIE            BIT(3)
98 #define STM32_MDMA_CCR_CTCIE            BIT(2)
99 #define STM32_MDMA_CCR_TEIE             BIT(1)
100 #define STM32_MDMA_CCR_EN               BIT(0)
101 #define STM32_MDMA_CCR_IRQ_MASK         (STM32_MDMA_CCR_TCIE \
102                                         | STM32_MDMA_CCR_BTIE \
103                                         | STM32_MDMA_CCR_BRTIE \
104                                         | STM32_MDMA_CCR_CTCIE \
105                                         | STM32_MDMA_CCR_TEIE)
106
107 /* MDMA Channel x transfer configuration register */
108 #define STM32_MDMA_CTCR(x)              (0x50 + 0x40 * (x))
109 #define STM32_MDMA_CTCR_BWM             BIT(31)
110 #define STM32_MDMA_CTCR_SWRM            BIT(30)
111 #define STM32_MDMA_CTCR_TRGM_MSK        GENMASK(29, 28)
112 #define STM32_MDMA_CTCR_TRGM(n)         STM32_MDMA_SET((n), \
113                                                        STM32_MDMA_CTCR_TRGM_MSK)
114 #define STM32_MDMA_CTCR_TRGM_GET(n)     STM32_MDMA_GET((n), \
115                                                        STM32_MDMA_CTCR_TRGM_MSK)
116 #define STM32_MDMA_CTCR_PAM_MASK        GENMASK(27, 26)
117 #define STM32_MDMA_CTCR_PAM(n)          STM32_MDMA_SET(n, \
118                                                        STM32_MDMA_CTCR_PAM_MASK)
119 #define STM32_MDMA_CTCR_PKE             BIT(25)
120 #define STM32_MDMA_CTCR_TLEN_MSK        GENMASK(24, 18)
121 #define STM32_MDMA_CTCR_TLEN(n)         STM32_MDMA_SET((n), \
122                                                        STM32_MDMA_CTCR_TLEN_MSK)
123 #define STM32_MDMA_CTCR_TLEN_GET(n)     STM32_MDMA_GET((n), \
124                                                        STM32_MDMA_CTCR_TLEN_MSK)
125 #define STM32_MDMA_CTCR_LEN2_MSK        GENMASK(25, 18)
126 #define STM32_MDMA_CTCR_LEN2(n)         STM32_MDMA_SET((n), \
127                                                        STM32_MDMA_CTCR_LEN2_MSK)
128 #define STM32_MDMA_CTCR_LEN2_GET(n)     STM32_MDMA_GET((n), \
129                                                        STM32_MDMA_CTCR_LEN2_MSK)
130 #define STM32_MDMA_CTCR_DBURST_MASK     GENMASK(17, 15)
131 #define STM32_MDMA_CTCR_DBURST(n)       STM32_MDMA_SET(n, \
132                                                     STM32_MDMA_CTCR_DBURST_MASK)
133 #define STM32_MDMA_CTCR_SBURST_MASK     GENMASK(14, 12)
134 #define STM32_MDMA_CTCR_SBURST(n)       STM32_MDMA_SET(n, \
135                                                     STM32_MDMA_CTCR_SBURST_MASK)
136 #define STM32_MDMA_CTCR_DINCOS_MASK     GENMASK(11, 10)
137 #define STM32_MDMA_CTCR_DINCOS(n)       STM32_MDMA_SET((n), \
138                                                     STM32_MDMA_CTCR_DINCOS_MASK)
139 #define STM32_MDMA_CTCR_SINCOS_MASK     GENMASK(9, 8)
140 #define STM32_MDMA_CTCR_SINCOS(n)       STM32_MDMA_SET((n), \
141                                                     STM32_MDMA_CTCR_SINCOS_MASK)
142 #define STM32_MDMA_CTCR_DSIZE_MASK      GENMASK(7, 6)
143 #define STM32_MDMA_CTCR_DSIZE(n)        STM32_MDMA_SET(n, \
144                                                      STM32_MDMA_CTCR_DSIZE_MASK)
145 #define STM32_MDMA_CTCR_SSIZE_MASK      GENMASK(5, 4)
146 #define STM32_MDMA_CTCR_SSIZE(n)        STM32_MDMA_SET(n, \
147                                                      STM32_MDMA_CTCR_SSIZE_MASK)
148 #define STM32_MDMA_CTCR_DINC_MASK       GENMASK(3, 2)
149 #define STM32_MDMA_CTCR_DINC(n)         STM32_MDMA_SET((n), \
150                                                       STM32_MDMA_CTCR_DINC_MASK)
151 #define STM32_MDMA_CTCR_SINC_MASK       GENMASK(1, 0)
152 #define STM32_MDMA_CTCR_SINC(n)         STM32_MDMA_SET((n), \
153                                                       STM32_MDMA_CTCR_SINC_MASK)
154 #define STM32_MDMA_CTCR_CFG_MASK        (STM32_MDMA_CTCR_SINC_MASK \
155                                         | STM32_MDMA_CTCR_DINC_MASK \
156                                         | STM32_MDMA_CTCR_SINCOS_MASK \
157                                         | STM32_MDMA_CTCR_DINCOS_MASK \
158                                         | STM32_MDMA_CTCR_LEN2_MSK \
159                                         | STM32_MDMA_CTCR_TRGM_MSK)
160
161 /* MDMA Channel x block number of data register */
162 #define STM32_MDMA_CBNDTR(x)            (0x54 + 0x40 * (x))
163 #define STM32_MDMA_CBNDTR_BRC_MK        GENMASK(31, 20)
164 #define STM32_MDMA_CBNDTR_BRC(n)        STM32_MDMA_SET(n, \
165                                                        STM32_MDMA_CBNDTR_BRC_MK)
166 #define STM32_MDMA_CBNDTR_BRC_GET(n)    STM32_MDMA_GET((n), \
167                                                        STM32_MDMA_CBNDTR_BRC_MK)
168
169 #define STM32_MDMA_CBNDTR_BRDUM         BIT(19)
170 #define STM32_MDMA_CBNDTR_BRSUM         BIT(18)
171 #define STM32_MDMA_CBNDTR_BNDT_MASK     GENMASK(16, 0)
172 #define STM32_MDMA_CBNDTR_BNDT(n)       STM32_MDMA_SET(n, \
173                                                     STM32_MDMA_CBNDTR_BNDT_MASK)
174
175 /* MDMA Channel x source address register */
176 #define STM32_MDMA_CSAR(x)              (0x58 + 0x40 * (x))
177
178 /* MDMA Channel x destination address register */
179 #define STM32_MDMA_CDAR(x)              (0x5C + 0x40 * (x))
180
181 /* MDMA Channel x block repeat address update register */
182 #define STM32_MDMA_CBRUR(x)             (0x60 + 0x40 * (x))
183 #define STM32_MDMA_CBRUR_DUV_MASK       GENMASK(31, 16)
184 #define STM32_MDMA_CBRUR_DUV(n)         STM32_MDMA_SET(n, \
185                                                       STM32_MDMA_CBRUR_DUV_MASK)
186 #define STM32_MDMA_CBRUR_SUV_MASK       GENMASK(15, 0)
187 #define STM32_MDMA_CBRUR_SUV(n)         STM32_MDMA_SET(n, \
188                                                       STM32_MDMA_CBRUR_SUV_MASK)
189
190 /* MDMA Channel x link address register */
191 #define STM32_MDMA_CLAR(x)              (0x64 + 0x40 * (x))
192
193 /* MDMA Channel x trigger and bus selection register */
194 #define STM32_MDMA_CTBR(x)              (0x68 + 0x40 * (x))
195 #define STM32_MDMA_CTBR_DBUS            BIT(17)
196 #define STM32_MDMA_CTBR_SBUS            BIT(16)
197 #define STM32_MDMA_CTBR_TSEL_MASK       GENMASK(7, 0)
198 #define STM32_MDMA_CTBR_TSEL(n)         STM32_MDMA_SET(n, \
199                                                       STM32_MDMA_CTBR_TSEL_MASK)
200
201 /* MDMA Channel x mask address register */
202 #define STM32_MDMA_CMAR(x)              (0x70 + 0x40 * (x))
203
204 /* MDMA Channel x mask data register */
205 #define STM32_MDMA_CMDR(x)              (0x74 + 0x40 * (x))
206
207 #define STM32_MDMA_MAX_BUF_LEN          128
208 #define STM32_MDMA_MAX_BLOCK_LEN        65536
209 #define STM32_MDMA_MAX_CHANNELS         63
210 #define STM32_MDMA_MAX_REQUESTS         256
211 #define STM32_MDMA_MAX_BURST            128
212 #define STM32_MDMA_VERY_HIGH_PRIORITY   0x11
213
214 enum stm32_mdma_trigger_mode {
215         STM32_MDMA_BUFFER,
216         STM32_MDMA_BLOCK,
217         STM32_MDMA_BLOCK_REP,
218         STM32_MDMA_LINKED_LIST,
219 };
220
221 enum stm32_mdma_width {
222         STM32_MDMA_BYTE,
223         STM32_MDMA_HALF_WORD,
224         STM32_MDMA_WORD,
225         STM32_MDMA_DOUBLE_WORD,
226 };
227
228 enum stm32_mdma_inc_mode {
229         STM32_MDMA_FIXED = 0,
230         STM32_MDMA_INC = 2,
231         STM32_MDMA_DEC = 3,
232 };
233
234 struct stm32_mdma_chan_config {
235         u32 request;
236         u32 priority_level;
237         u32 transfer_config;
238         u32 mask_addr;
239         u32 mask_data;
240 };
241
242 struct stm32_mdma_hwdesc {
243         u32 ctcr;
244         u32 cbndtr;
245         u32 csar;
246         u32 cdar;
247         u32 cbrur;
248         u32 clar;
249         u32 ctbr;
250         u32 dummy;
251         u32 cmar;
252         u32 cmdr;
253 } __aligned(64);
254
255 struct stm32_mdma_desc {
256         struct virt_dma_desc vdesc;
257         u32 ccr;
258         struct stm32_mdma_hwdesc *hwdesc;
259         dma_addr_t hwdesc_phys;
260         bool cyclic;
261         u32 count;
262 };
263
264 struct stm32_mdma_chan {
265         struct virt_dma_chan vchan;
266         struct dma_pool *desc_pool;
267         u32 id;
268         struct stm32_mdma_desc *desc;
269         u32 curr_hwdesc;
270         struct dma_slave_config dma_config;
271         struct stm32_mdma_chan_config chan_config;
272         bool busy;
273         u32 mem_burst;
274         u32 mem_width;
275 };
276
277 struct stm32_mdma_device {
278         struct dma_device ddev;
279         void __iomem *base;
280         struct clk *clk;
281         int irq;
282         struct reset_control *rst;
283         u32 nr_channels;
284         u32 nr_requests;
285         u32 nr_ahb_addr_masks;
286         struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
287         u32 ahb_addr_masks[];
288 };
289
290 static struct stm32_mdma_device *stm32_mdma_get_dev(
291         struct stm32_mdma_chan *chan)
292 {
293         return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
294                             ddev);
295 }
296
297 static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
298 {
299         return container_of(c, struct stm32_mdma_chan, vchan.chan);
300 }
301
302 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
303 {
304         return container_of(vdesc, struct stm32_mdma_desc, vdesc);
305 }
306
307 static struct device *chan2dev(struct stm32_mdma_chan *chan)
308 {
309         return &chan->vchan.chan.dev->device;
310 }
311
312 static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
313 {
314         return mdma_dev->ddev.dev;
315 }
316
317 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
318 {
319         return readl_relaxed(dmadev->base + reg);
320 }
321
322 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
323 {
324         writel_relaxed(val, dmadev->base + reg);
325 }
326
327 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
328                                 u32 mask)
329 {
330         void __iomem *addr = dmadev->base + reg;
331
332         writel_relaxed(readl_relaxed(addr) | mask, addr);
333 }
334
335 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
336                                 u32 mask)
337 {
338         void __iomem *addr = dmadev->base + reg;
339
340         writel_relaxed(readl_relaxed(addr) & ~mask, addr);
341 }
342
343 static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
344                 struct stm32_mdma_chan *chan, u32 count)
345 {
346         struct stm32_mdma_desc *desc;
347
348         desc = kzalloc(sizeof(*desc), GFP_NOWAIT);
349         if (!desc)
350                 return NULL;
351
352         desc->hwdesc = dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
353                                       &desc->hwdesc_phys);
354         if (!desc->hwdesc) {
355                 dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
356                 kfree(desc);
357                 return NULL;
358         }
359
360         desc->count = count;
361
362         return desc;
363 }
364
365 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
366 {
367         struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
368         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
369
370         dma_pool_free(chan->desc_pool, desc->hwdesc, desc->hwdesc_phys);
371         kfree(desc);
372 }
373
374 static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
375                                 enum dma_slave_buswidth width)
376 {
377         switch (width) {
378         case DMA_SLAVE_BUSWIDTH_1_BYTE:
379         case DMA_SLAVE_BUSWIDTH_2_BYTES:
380         case DMA_SLAVE_BUSWIDTH_4_BYTES:
381         case DMA_SLAVE_BUSWIDTH_8_BYTES:
382                 return ffs(width) - 1;
383         default:
384                 dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
385                         width);
386                 return -EINVAL;
387         }
388 }
389
390 static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
391                                                         u32 buf_len, u32 tlen)
392 {
393         enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
394
395         for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
396              max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
397              max_width >>= 1) {
398                 /*
399                  * Address and buffer length both have to be aligned on
400                  * bus width
401                  */
402                 if ((((buf_len | addr) & (max_width - 1)) == 0) &&
403                     tlen >= max_width)
404                         break;
405         }
406
407         return max_width;
408 }
409
410 static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
411                                      enum dma_slave_buswidth width)
412 {
413         u32 best_burst = max_burst;
414         u32 burst_len = best_burst * width;
415
416         while ((burst_len > 0) && (tlen % burst_len)) {
417                 best_burst = best_burst >> 1;
418                 burst_len = best_burst * width;
419         }
420
421         return (best_burst > 0) ? best_burst : 1;
422 }
423
424 static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
425 {
426         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
427         u32 ccr, cisr, id, reg;
428         int ret;
429
430         id = chan->id;
431         reg = STM32_MDMA_CCR(id);
432
433         /* Disable interrupts */
434         stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
435
436         ccr = stm32_mdma_read(dmadev, reg);
437         if (ccr & STM32_MDMA_CCR_EN) {
438                 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
439
440                 /* Ensure that any ongoing transfer has been completed */
441                 ret = readl_relaxed_poll_timeout_atomic(
442                                 dmadev->base + STM32_MDMA_CISR(id), cisr,
443                                 (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
444                 if (ret) {
445                         dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
446                         return -EBUSY;
447                 }
448         }
449
450         return 0;
451 }
452
453 static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
454 {
455         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
456         u32 status;
457         int ret;
458
459         /* Disable DMA */
460         ret = stm32_mdma_disable_chan(chan);
461         if (ret < 0)
462                 return;
463
464         /* Clear interrupt status if it is there */
465         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
466         if (status) {
467                 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
468                         __func__, status);
469                 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
470         }
471
472         chan->busy = false;
473 }
474
475 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
476                                u32 ctbr_mask, u32 src_addr)
477 {
478         u32 mask;
479         int i;
480
481         /* Check if memory device is on AHB or AXI */
482         *ctbr &= ~ctbr_mask;
483         mask = src_addr & 0xF0000000;
484         for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
485                 if (mask == dmadev->ahb_addr_masks[i]) {
486                         *ctbr |= ctbr_mask;
487                         break;
488                 }
489         }
490 }
491
492 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
493                                      enum dma_transfer_direction direction,
494                                      u32 *mdma_ccr, u32 *mdma_ctcr,
495                                      u32 *mdma_ctbr, dma_addr_t addr,
496                                      u32 buf_len)
497 {
498         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
499         struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
500         enum dma_slave_buswidth src_addr_width, dst_addr_width;
501         phys_addr_t src_addr, dst_addr;
502         int src_bus_width, dst_bus_width;
503         u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
504         u32 ccr, ctcr, ctbr, tlen;
505
506         src_addr_width = chan->dma_config.src_addr_width;
507         dst_addr_width = chan->dma_config.dst_addr_width;
508         src_maxburst = chan->dma_config.src_maxburst;
509         dst_maxburst = chan->dma_config.dst_maxburst;
510
511         ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
512         ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
513         ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
514
515         /* Enable HW request mode */
516         ctcr &= ~STM32_MDMA_CTCR_SWRM;
517
518         /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
519         ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
520         ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
521
522         /*
523          * For buffer transfer length (TLEN) we have to set
524          * the number of bytes - 1 in CTCR register
525          */
526         tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
527         ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
528         ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
529
530         /* Disable Pack Enable */
531         ctcr &= ~STM32_MDMA_CTCR_PKE;
532
533         /* Check burst size constraints */
534         if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
535             dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
536                 dev_err(chan2dev(chan),
537                         "burst size * bus width higher than %d bytes\n",
538                         STM32_MDMA_MAX_BURST);
539                 return -EINVAL;
540         }
541
542         if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
543             (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
544                 dev_err(chan2dev(chan), "burst size must be a power of 2\n");
545                 return -EINVAL;
546         }
547
548         /*
549          * Configure channel control:
550          * - Clear SW request as in this case this is a HW one
551          * - Clear WEX, HEX and BEX bits
552          * - Set priority level
553          */
554         ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
555                  STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
556         ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
557
558         /* Configure Trigger selection */
559         ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
560         ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
561
562         switch (direction) {
563         case DMA_MEM_TO_DEV:
564                 dst_addr = chan->dma_config.dst_addr;
565
566                 /* Set device data size */
567                 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
568                 if (dst_bus_width < 0)
569                         return dst_bus_width;
570                 ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
571                 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
572
573                 /* Set device burst value */
574                 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
575                                                            dst_maxburst,
576                                                            dst_addr_width);
577                 chan->mem_burst = dst_best_burst;
578                 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
579                 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
580
581                 /* Set memory data size */
582                 src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
583                 chan->mem_width = src_addr_width;
584                 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
585                 if (src_bus_width < 0)
586                         return src_bus_width;
587                 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
588                         STM32_MDMA_CTCR_SINCOS_MASK;
589                 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
590                         STM32_MDMA_CTCR_SINCOS(src_bus_width);
591
592                 /* Set memory burst value */
593                 src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
594                 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
595                                                            src_maxburst,
596                                                            src_addr_width);
597                 chan->mem_burst = src_best_burst;
598                 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
599                 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
600
601                 /* Select bus */
602                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
603                                    dst_addr);
604
605                 if (dst_bus_width != src_bus_width)
606                         ctcr |= STM32_MDMA_CTCR_PKE;
607
608                 /* Set destination address */
609                 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
610                 break;
611
612         case DMA_DEV_TO_MEM:
613                 src_addr = chan->dma_config.src_addr;
614
615                 /* Set device data size */
616                 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
617                 if (src_bus_width < 0)
618                         return src_bus_width;
619                 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
620                 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
621
622                 /* Set device burst value */
623                 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
624                                                            src_maxburst,
625                                                            src_addr_width);
626                 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
627                 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
628
629                 /* Set memory data size */
630                 dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
631                 chan->mem_width = dst_addr_width;
632                 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
633                 if (dst_bus_width < 0)
634                         return dst_bus_width;
635                 ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
636                         STM32_MDMA_CTCR_DINCOS_MASK);
637                 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
638                         STM32_MDMA_CTCR_DINCOS(dst_bus_width);
639
640                 /* Set memory burst value */
641                 dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
642                 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
643                                                            dst_maxburst,
644                                                            dst_addr_width);
645                 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
646                 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
647
648                 /* Select bus */
649                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
650                                    src_addr);
651
652                 if (dst_bus_width != src_bus_width)
653                         ctcr |= STM32_MDMA_CTCR_PKE;
654
655                 /* Set source address */
656                 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
657                 break;
658
659         default:
660                 dev_err(chan2dev(chan), "Dma direction is not supported\n");
661                 return -EINVAL;
662         }
663
664         *mdma_ccr = ccr;
665         *mdma_ctcr = ctcr;
666         *mdma_ctbr = ctbr;
667
668         return 0;
669 }
670
671 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
672                                    struct stm32_mdma_hwdesc *hwdesc)
673 {
674         dev_dbg(chan2dev(chan), "hwdesc:  0x%p\n", hwdesc);
675         dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", hwdesc->ctcr);
676         dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", hwdesc->cbndtr);
677         dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", hwdesc->csar);
678         dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", hwdesc->cdar);
679         dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", hwdesc->cbrur);
680         dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", hwdesc->clar);
681         dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", hwdesc->ctbr);
682         dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", hwdesc->cmar);
683         dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", hwdesc->cmdr);
684 }
685
686 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
687                                     struct stm32_mdma_desc *desc,
688                                     enum dma_transfer_direction dir, u32 count,
689                                     dma_addr_t src_addr, dma_addr_t dst_addr,
690                                     u32 len, u32 ctcr, u32 ctbr, bool is_last,
691                                     bool is_first, bool is_cyclic)
692 {
693         struct stm32_mdma_chan_config *config = &chan->chan_config;
694         struct stm32_mdma_hwdesc *hwdesc;
695         u32 next = count + 1;
696
697         hwdesc = &desc->hwdesc[count];
698         hwdesc->ctcr = ctcr;
699         hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
700                         STM32_MDMA_CBNDTR_BRDUM |
701                         STM32_MDMA_CBNDTR_BRSUM |
702                         STM32_MDMA_CBNDTR_BNDT_MASK);
703         hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
704         hwdesc->csar = src_addr;
705         hwdesc->cdar = dst_addr;
706         hwdesc->cbrur = 0;
707         hwdesc->clar = desc->hwdesc_phys + next * sizeof(*hwdesc);
708         hwdesc->ctbr = ctbr;
709         hwdesc->cmar = config->mask_addr;
710         hwdesc->cmdr = config->mask_data;
711
712         if (is_last) {
713                 if (is_cyclic)
714                         hwdesc->clar = desc->hwdesc_phys;
715                 else
716                         hwdesc->clar = 0;
717         }
718
719         stm32_mdma_dump_hwdesc(chan, hwdesc);
720 }
721
722 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
723                                  struct stm32_mdma_desc *desc,
724                                  struct scatterlist *sgl, u32 sg_len,
725                                  enum dma_transfer_direction direction)
726 {
727         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
728         struct dma_slave_config *dma_config = &chan->dma_config;
729         struct scatterlist *sg;
730         dma_addr_t src_addr, dst_addr;
731         u32 ccr, ctcr, ctbr;
732         int i, ret = 0;
733
734         for_each_sg(sgl, sg, sg_len, i) {
735                 if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
736                         dev_err(chan2dev(chan), "Invalid block len\n");
737                         return -EINVAL;
738                 }
739
740                 if (direction == DMA_MEM_TO_DEV) {
741                         src_addr = sg_dma_address(sg);
742                         dst_addr = dma_config->dst_addr;
743                         ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
744                                                         &ctcr, &ctbr, src_addr,
745                                                         sg_dma_len(sg));
746                         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
747                                            src_addr);
748                 } else {
749                         src_addr = dma_config->src_addr;
750                         dst_addr = sg_dma_address(sg);
751                         ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
752                                                         &ctcr, &ctbr, dst_addr,
753                                                         sg_dma_len(sg));
754                         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
755                                            dst_addr);
756                 }
757
758                 if (ret < 0)
759                         return ret;
760
761                 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
762                                         dst_addr, sg_dma_len(sg), ctcr, ctbr,
763                                         i == sg_len - 1, i == 0, false);
764         }
765
766         /* Enable interrupts */
767         ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
768         ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
769         if (sg_len > 1)
770                 ccr |= STM32_MDMA_CCR_BTIE;
771         desc->ccr = ccr;
772
773         return 0;
774 }
775
776 static struct dma_async_tx_descriptor *
777 stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
778                          u32 sg_len, enum dma_transfer_direction direction,
779                          unsigned long flags, void *context)
780 {
781         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
782         struct stm32_mdma_desc *desc;
783         int ret;
784
785         /*
786          * Once DMA is in setup cyclic mode the channel we cannot assign this
787          * channel anymore. The DMA channel needs to be aborted or terminated
788          * for allowing another request.
789          */
790         if (chan->desc && chan->desc->cyclic) {
791                 dev_err(chan2dev(chan),
792                         "Request not allowed when dma in cyclic mode\n");
793                 return NULL;
794         }
795
796         desc = stm32_mdma_alloc_desc(chan, sg_len);
797         if (!desc)
798                 return NULL;
799
800         ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
801         if (ret < 0)
802                 goto xfer_setup_err;
803
804         desc->cyclic = false;
805
806         return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
807
808 xfer_setup_err:
809         dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys);
810         kfree(desc);
811         return NULL;
812 }
813
814 static struct dma_async_tx_descriptor *
815 stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
816                            size_t buf_len, size_t period_len,
817                            enum dma_transfer_direction direction,
818                            unsigned long flags)
819 {
820         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
821         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
822         struct dma_slave_config *dma_config = &chan->dma_config;
823         struct stm32_mdma_desc *desc;
824         dma_addr_t src_addr, dst_addr;
825         u32 ccr, ctcr, ctbr, count;
826         int i, ret;
827
828         /*
829          * Once DMA is in setup cyclic mode the channel we cannot assign this
830          * channel anymore. The DMA channel needs to be aborted or terminated
831          * for allowing another request.
832          */
833         if (chan->desc && chan->desc->cyclic) {
834                 dev_err(chan2dev(chan),
835                         "Request not allowed when dma in cyclic mode\n");
836                 return NULL;
837         }
838
839         if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
840                 dev_err(chan2dev(chan), "Invalid buffer/period len\n");
841                 return NULL;
842         }
843
844         if (buf_len % period_len) {
845                 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
846                 return NULL;
847         }
848
849         count = buf_len / period_len;
850
851         desc = stm32_mdma_alloc_desc(chan, count);
852         if (!desc)
853                 return NULL;
854
855         /* Select bus */
856         if (direction == DMA_MEM_TO_DEV) {
857                 src_addr = buf_addr;
858                 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
859                                                 &ctbr, src_addr, period_len);
860                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
861                                    src_addr);
862         } else {
863                 dst_addr = buf_addr;
864                 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
865                                                 &ctbr, dst_addr, period_len);
866                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
867                                    dst_addr);
868         }
869
870         if (ret < 0)
871                 goto xfer_setup_err;
872
873         /* Enable interrupts */
874         ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
875         ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
876         desc->ccr = ccr;
877
878         /* Configure hwdesc list */
879         for (i = 0; i < count; i++) {
880                 if (direction == DMA_MEM_TO_DEV) {
881                         src_addr = buf_addr + i * period_len;
882                         dst_addr = dma_config->dst_addr;
883                 } else {
884                         src_addr = dma_config->src_addr;
885                         dst_addr = buf_addr + i * period_len;
886                 }
887
888                 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
889                                         dst_addr, period_len, ctcr, ctbr,
890                                         i == count - 1, i == 0, true);
891         }
892
893         desc->cyclic = true;
894
895         return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
896
897 xfer_setup_err:
898         dma_pool_free(chan->desc_pool, &desc->hwdesc, desc->hwdesc_phys);
899         kfree(desc);
900         return NULL;
901 }
902
903 static struct dma_async_tx_descriptor *
904 stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
905                            size_t len, unsigned long flags)
906 {
907         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
908         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
909         enum dma_slave_buswidth max_width;
910         struct stm32_mdma_desc *desc;
911         struct stm32_mdma_hwdesc *hwdesc;
912         u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
913         u32 best_burst, tlen;
914         size_t xfer_count, offset;
915         int src_bus_width, dst_bus_width;
916         int i;
917
918         /*
919          * Once DMA is in setup cyclic mode the channel we cannot assign this
920          * channel anymore. The DMA channel needs to be aborted or terminated
921          * to allow another request
922          */
923         if (chan->desc && chan->desc->cyclic) {
924                 dev_err(chan2dev(chan),
925                         "Request not allowed when dma in cyclic mode\n");
926                 return NULL;
927         }
928
929         count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
930         desc = stm32_mdma_alloc_desc(chan, count);
931         if (!desc)
932                 return NULL;
933
934         ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
935         ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
936         ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
937         cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
938
939         /* Enable sw req, some interrupts and clear other bits */
940         ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
941                  STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
942                  STM32_MDMA_CCR_IRQ_MASK);
943         ccr |= STM32_MDMA_CCR_TEIE;
944
945         /* Enable SW request mode, dest/src inc and clear other bits */
946         ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
947                   STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
948                   STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
949                   STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
950                   STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
951                   STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
952                   STM32_MDMA_CTCR_SINC_MASK);
953         ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
954                 STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
955
956         /* Reset HW request */
957         ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
958
959         /* Select bus */
960         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
961         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
962
963         /* Clear CBNDTR registers */
964         cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
965                         STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
966
967         if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
968                 cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
969                 if (len <= STM32_MDMA_MAX_BUF_LEN) {
970                         /* Setup a buffer transfer */
971                         ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
972                         ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
973                 } else {
974                         /* Setup a block transfer */
975                         ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
976                         ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
977                 }
978
979                 tlen = STM32_MDMA_MAX_BUF_LEN;
980                 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
981
982                 /* Set source best burst size */
983                 max_width = stm32_mdma_get_max_width(src, len, tlen);
984                 src_bus_width = stm32_mdma_get_width(chan, max_width);
985
986                 max_burst = tlen / max_width;
987                 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
988                                                        max_width);
989                 mdma_burst = ilog2(best_burst);
990
991                 ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
992                         STM32_MDMA_CTCR_SSIZE(src_bus_width) |
993                         STM32_MDMA_CTCR_SINCOS(src_bus_width);
994
995                 /* Set destination best burst size */
996                 max_width = stm32_mdma_get_max_width(dest, len, tlen);
997                 dst_bus_width = stm32_mdma_get_width(chan, max_width);
998
999                 max_burst = tlen / max_width;
1000                 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1001                                                        max_width);
1002                 mdma_burst = ilog2(best_burst);
1003
1004                 ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1005                         STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1006                         STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1007
1008                 if (dst_bus_width != src_bus_width)
1009                         ctcr |= STM32_MDMA_CTCR_PKE;
1010
1011                 /* Prepare hardware descriptor */
1012                 hwdesc = desc->hwdesc;
1013                 hwdesc->ctcr = ctcr;
1014                 hwdesc->cbndtr = cbndtr;
1015                 hwdesc->csar = src;
1016                 hwdesc->cdar = dest;
1017                 hwdesc->cbrur = 0;
1018                 hwdesc->clar = 0;
1019                 hwdesc->ctbr = ctbr;
1020                 hwdesc->cmar = 0;
1021                 hwdesc->cmdr = 0;
1022
1023                 stm32_mdma_dump_hwdesc(chan, hwdesc);
1024         } else {
1025                 /* Setup a LLI transfer */
1026                 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1027                         STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1028                 ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1029                 tlen = STM32_MDMA_MAX_BUF_LEN;
1030
1031                 for (i = 0, offset = 0; offset < len;
1032                      i++, offset += xfer_count) {
1033                         xfer_count = min_t(size_t, len - offset,
1034                                            STM32_MDMA_MAX_BLOCK_LEN);
1035
1036                         /* Set source best burst size */
1037                         max_width = stm32_mdma_get_max_width(src, len, tlen);
1038                         src_bus_width = stm32_mdma_get_width(chan, max_width);
1039
1040                         max_burst = tlen / max_width;
1041                         best_burst = stm32_mdma_get_best_burst(len, tlen,
1042                                                                max_burst,
1043                                                                max_width);
1044                         mdma_burst = ilog2(best_burst);
1045
1046                         ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1047                                 STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1048                                 STM32_MDMA_CTCR_SINCOS(src_bus_width);
1049
1050                         /* Set destination best burst size */
1051                         max_width = stm32_mdma_get_max_width(dest, len, tlen);
1052                         dst_bus_width = stm32_mdma_get_width(chan, max_width);
1053
1054                         max_burst = tlen / max_width;
1055                         best_burst = stm32_mdma_get_best_burst(len, tlen,
1056                                                                max_burst,
1057                                                                max_width);
1058                         mdma_burst = ilog2(best_burst);
1059
1060                         ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1061                                 STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1062                                 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1063
1064                         if (dst_bus_width != src_bus_width)
1065                                 ctcr |= STM32_MDMA_CTCR_PKE;
1066
1067                         /* Prepare hardware descriptor */
1068                         stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1069                                                 src + offset, dest + offset,
1070                                                 xfer_count, ctcr, ctbr,
1071                                                 i == count - 1, i == 0, false);
1072                 }
1073         }
1074
1075         desc->ccr = ccr;
1076
1077         desc->cyclic = false;
1078
1079         return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1080 }
1081
1082 static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1083 {
1084         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1085
1086         dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1087                 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1088         dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1089                 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1090         dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1091                 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1092         dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1093                 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1094         dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1095                 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1096         dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1097                 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1098         dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1099                 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1100         dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1101                 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1102         dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1103                 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1104         dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1105                 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1106 }
1107
1108 static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1109 {
1110         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1111         struct virt_dma_desc *vdesc;
1112         struct stm32_mdma_hwdesc *hwdesc;
1113         u32 id = chan->id;
1114         u32 status, reg;
1115
1116         vdesc = vchan_next_desc(&chan->vchan);
1117         if (!vdesc) {
1118                 chan->desc = NULL;
1119                 return;
1120         }
1121
1122         chan->desc = to_stm32_mdma_desc(vdesc);
1123         hwdesc = chan->desc->hwdesc;
1124         chan->curr_hwdesc = 0;
1125
1126         stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1127         stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1128         stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1129         stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1130         stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1131         stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1132         stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1133         stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1134         stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1135         stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1136
1137         /* Clear interrupt status if it is there */
1138         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1139         if (status)
1140                 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1141
1142         stm32_mdma_dump_reg(chan);
1143
1144         /* Start DMA */
1145         stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1146
1147         /* Set SW request in case of MEM2MEM transfer */
1148         if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1149                 reg = STM32_MDMA_CCR(id);
1150                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1151         }
1152
1153         chan->busy = true;
1154
1155         dev_dbg(chan2dev(chan), "vchan %p: started\n", &chan->vchan);
1156 }
1157
1158 static void stm32_mdma_issue_pending(struct dma_chan *c)
1159 {
1160         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1161         unsigned long flags;
1162
1163         spin_lock_irqsave(&chan->vchan.lock, flags);
1164
1165         if (!vchan_issue_pending(&chan->vchan))
1166                 goto end;
1167
1168         dev_dbg(chan2dev(chan), "vchan %p: issued\n", &chan->vchan);
1169
1170         if (!chan->desc && !chan->busy)
1171                 stm32_mdma_start_transfer(chan);
1172
1173 end:
1174         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1175 }
1176
1177 static int stm32_mdma_pause(struct dma_chan *c)
1178 {
1179         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1180         unsigned long flags;
1181         int ret;
1182
1183         spin_lock_irqsave(&chan->vchan.lock, flags);
1184         ret = stm32_mdma_disable_chan(chan);
1185         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1186
1187         if (!ret)
1188                 dev_dbg(chan2dev(chan), "vchan %p: pause\n", &chan->vchan);
1189
1190         return ret;
1191 }
1192
1193 static int stm32_mdma_resume(struct dma_chan *c)
1194 {
1195         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1196         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1197         struct stm32_mdma_hwdesc *hwdesc;
1198         unsigned long flags;
1199         u32 status, reg;
1200
1201         hwdesc = &chan->desc->hwdesc[chan->curr_hwdesc];
1202
1203         spin_lock_irqsave(&chan->vchan.lock, flags);
1204
1205         /* Re-configure control register */
1206         stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1207
1208         /* Clear interrupt status if it is there */
1209         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1210         if (status)
1211                 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1212
1213         stm32_mdma_dump_reg(chan);
1214
1215         /* Re-start DMA */
1216         reg = STM32_MDMA_CCR(chan->id);
1217         stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1218
1219         /* Set SW request in case of MEM2MEM transfer */
1220         if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1221                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1222
1223         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1224
1225         dev_dbg(chan2dev(chan), "vchan %p: resume\n", &chan->vchan);
1226
1227         return 0;
1228 }
1229
1230 static int stm32_mdma_terminate_all(struct dma_chan *c)
1231 {
1232         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1233         unsigned long flags;
1234         LIST_HEAD(head);
1235
1236         spin_lock_irqsave(&chan->vchan.lock, flags);
1237         if (chan->busy) {
1238                 stm32_mdma_stop(chan);
1239                 chan->desc = NULL;
1240         }
1241         vchan_get_all_descriptors(&chan->vchan, &head);
1242         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1243
1244         vchan_dma_desc_free_list(&chan->vchan, &head);
1245
1246         return 0;
1247 }
1248
1249 static void stm32_mdma_synchronize(struct dma_chan *c)
1250 {
1251         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1252
1253         vchan_synchronize(&chan->vchan);
1254 }
1255
1256 static int stm32_mdma_slave_config(struct dma_chan *c,
1257                                    struct dma_slave_config *config)
1258 {
1259         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1260
1261         memcpy(&chan->dma_config, config, sizeof(*config));
1262
1263         return 0;
1264 }
1265
1266 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1267                                       struct stm32_mdma_desc *desc,
1268                                       u32 curr_hwdesc)
1269 {
1270         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1271         u32 cbndtr, residue, modulo, burst_size;
1272         int i;
1273
1274         residue = 0;
1275         for (i = curr_hwdesc + 1; i < desc->count; i++) {
1276                 struct stm32_mdma_hwdesc *hwdesc = &desc->hwdesc[i];
1277
1278                 residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1279         }
1280         cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1281         residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1282
1283         if (!chan->mem_burst)
1284                 return residue;
1285
1286         burst_size = chan->mem_burst * chan->mem_width;
1287         modulo = residue % burst_size;
1288         if (modulo)
1289                 residue = residue - modulo + burst_size;
1290
1291         return residue;
1292 }
1293
1294 static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1295                                             dma_cookie_t cookie,
1296                                             struct dma_tx_state *state)
1297 {
1298         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1299         struct virt_dma_desc *vdesc;
1300         enum dma_status status;
1301         unsigned long flags;
1302         u32 residue = 0;
1303
1304         status = dma_cookie_status(c, cookie, state);
1305         if ((status == DMA_COMPLETE) || (!state))
1306                 return status;
1307
1308         spin_lock_irqsave(&chan->vchan.lock, flags);
1309
1310         vdesc = vchan_find_desc(&chan->vchan, cookie);
1311         if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1312                 residue = stm32_mdma_desc_residue(chan, chan->desc,
1313                                                   chan->curr_hwdesc);
1314         else if (vdesc)
1315                 residue = stm32_mdma_desc_residue(chan,
1316                                                   to_stm32_mdma_desc(vdesc), 0);
1317         dma_set_residue(state, residue);
1318
1319         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1320
1321         return status;
1322 }
1323
1324 static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1325 {
1326         list_del(&chan->desc->vdesc.node);
1327         vchan_cookie_complete(&chan->desc->vdesc);
1328         chan->desc = NULL;
1329         chan->busy = false;
1330
1331         /* Start the next transfer if this driver has a next desc */
1332         stm32_mdma_start_transfer(chan);
1333 }
1334
1335 static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1336 {
1337         struct stm32_mdma_device *dmadev = devid;
1338         struct stm32_mdma_chan *chan = devid;
1339         u32 reg, id, ien, status, flag;
1340
1341         /* Find out which channel generates the interrupt */
1342         status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1343         if (status) {
1344                 id = __ffs(status);
1345         } else {
1346                 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1);
1347                 if (!status) {
1348                         dev_dbg(mdma2dev(dmadev), "spurious it\n");
1349                         return IRQ_NONE;
1350                 }
1351                 id = __ffs(status);
1352                 /*
1353                  * As GISR0 provides status for channel id from 0 to 31,
1354                  * so GISR1 provides status for channel id from 32 to 62
1355                  */
1356                 id += 32;
1357         }
1358
1359         chan = &dmadev->chan[id];
1360         if (!chan) {
1361                 dev_err(chan2dev(chan), "MDMA channel not initialized\n");
1362                 goto exit;
1363         }
1364
1365         /* Handle interrupt for the channel */
1366         spin_lock(&chan->vchan.lock);
1367         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1368         ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
1369         ien &= STM32_MDMA_CCR_IRQ_MASK;
1370         ien >>= 1;
1371
1372         if (!(status & ien)) {
1373                 spin_unlock(&chan->vchan.lock);
1374                 dev_dbg(chan2dev(chan),
1375                         "spurious it (status=0x%04x, ien=0x%04x)\n",
1376                         status, ien);
1377                 return IRQ_NONE;
1378         }
1379
1380         flag = __ffs(status & ien);
1381         reg = STM32_MDMA_CIFCR(chan->id);
1382
1383         switch (1 << flag) {
1384         case STM32_MDMA_CISR_TEIF:
1385                 id = chan->id;
1386                 status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id));
1387                 dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", status);
1388                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1389                 break;
1390
1391         case STM32_MDMA_CISR_CTCIF:
1392                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1393                 stm32_mdma_xfer_end(chan);
1394                 break;
1395
1396         case STM32_MDMA_CISR_BRTIF:
1397                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1398                 break;
1399
1400         case STM32_MDMA_CISR_BTIF:
1401                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1402                 chan->curr_hwdesc++;
1403                 if (chan->desc && chan->desc->cyclic) {
1404                         if (chan->curr_hwdesc == chan->desc->count)
1405                                 chan->curr_hwdesc = 0;
1406                         vchan_cyclic_callback(&chan->desc->vdesc);
1407                 }
1408                 break;
1409
1410         case STM32_MDMA_CISR_TCIF:
1411                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1412                 break;
1413
1414         default:
1415                 dev_err(chan2dev(chan), "it %d unhandled (status=0x%04x)\n",
1416                         1 << flag, status);
1417         }
1418
1419         spin_unlock(&chan->vchan.lock);
1420
1421 exit:
1422         return IRQ_HANDLED;
1423 }
1424
1425 static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1426 {
1427         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1428         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1429         int ret;
1430
1431         chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1432                                            c->device->dev,
1433                                            sizeof(struct stm32_mdma_hwdesc),
1434                                           __alignof__(struct stm32_mdma_hwdesc),
1435                                            0);
1436         if (!chan->desc_pool) {
1437                 dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1438                 return -ENOMEM;
1439         }
1440
1441         ret = clk_prepare_enable(dmadev->clk);
1442         if (ret < 0) {
1443                 dev_err(chan2dev(chan), "clk_prepare_enable failed: %d\n", ret);
1444                 return ret;
1445         }
1446
1447         ret = stm32_mdma_disable_chan(chan);
1448         if (ret < 0)
1449                 clk_disable_unprepare(dmadev->clk);
1450
1451         return ret;
1452 }
1453
1454 static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1455 {
1456         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1457         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1458         unsigned long flags;
1459
1460         dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1461
1462         if (chan->busy) {
1463                 spin_lock_irqsave(&chan->vchan.lock, flags);
1464                 stm32_mdma_stop(chan);
1465                 chan->desc = NULL;
1466                 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1467         }
1468
1469         clk_disable_unprepare(dmadev->clk);
1470         vchan_free_chan_resources(to_virt_chan(c));
1471         dmam_pool_destroy(chan->desc_pool);
1472         chan->desc_pool = NULL;
1473 }
1474
1475 static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1476                                             struct of_dma *ofdma)
1477 {
1478         struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1479         struct stm32_mdma_chan *chan;
1480         struct dma_chan *c;
1481         struct stm32_mdma_chan_config config;
1482
1483         if (dma_spec->args_count < 5) {
1484                 dev_err(mdma2dev(dmadev), "Bad number of args\n");
1485                 return NULL;
1486         }
1487
1488         config.request = dma_spec->args[0];
1489         config.priority_level = dma_spec->args[1];
1490         config.transfer_config = dma_spec->args[2];
1491         config.mask_addr = dma_spec->args[3];
1492         config.mask_data = dma_spec->args[4];
1493
1494         if (config.request >= dmadev->nr_requests) {
1495                 dev_err(mdma2dev(dmadev), "Bad request line\n");
1496                 return NULL;
1497         }
1498
1499         if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1500                 dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1501                 return NULL;
1502         }
1503
1504         c = dma_get_any_slave_channel(&dmadev->ddev);
1505         if (!c) {
1506                 dev_err(mdma2dev(dmadev), "No more channel avalaible\n");
1507                 return NULL;
1508         }
1509
1510         chan = to_stm32_mdma_chan(c);
1511         chan->chan_config = config;
1512
1513         return c;
1514 }
1515
1516 static const struct of_device_id stm32_mdma_of_match[] = {
1517         { .compatible = "st,stm32h7-mdma", },
1518         { /* sentinel */ },
1519 };
1520 MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1521
1522 static int stm32_mdma_probe(struct platform_device *pdev)
1523 {
1524         struct stm32_mdma_chan *chan;
1525         struct stm32_mdma_device *dmadev;
1526         struct dma_device *dd;
1527         struct device_node *of_node;
1528         struct resource *res;
1529         u32 nr_channels, nr_requests;
1530         int i, count, ret;
1531
1532         of_node = pdev->dev.of_node;
1533         if (!of_node)
1534                 return -ENODEV;
1535
1536         ret = device_property_read_u32(&pdev->dev, "dma-channels",
1537                                        &nr_channels);
1538         if (ret) {
1539                 nr_channels = STM32_MDMA_MAX_CHANNELS;
1540                 dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1541                          nr_channels);
1542         }
1543
1544         ret = device_property_read_u32(&pdev->dev, "dma-requests",
1545                                        &nr_requests);
1546         if (ret) {
1547                 nr_requests = STM32_MDMA_MAX_REQUESTS;
1548                 dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1549                          nr_requests);
1550         }
1551
1552         count = device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1553                                                NULL, 0);
1554         if (count < 0)
1555                 count = 0;
1556
1557         dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1558                               GFP_KERNEL);
1559         if (!dmadev)
1560                 return -ENOMEM;
1561
1562         dmadev->nr_channels = nr_channels;
1563         dmadev->nr_requests = nr_requests;
1564         device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1565                                        dmadev->ahb_addr_masks,
1566                                        count);
1567         dmadev->nr_ahb_addr_masks = count;
1568
1569         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1570         dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1571         if (IS_ERR(dmadev->base))
1572                 return PTR_ERR(dmadev->base);
1573
1574         dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1575         if (IS_ERR(dmadev->clk)) {
1576                 ret = PTR_ERR(dmadev->clk);
1577                 if (ret == -EPROBE_DEFER)
1578                         dev_info(&pdev->dev, "Missing controller clock\n");
1579                 return ret;
1580         }
1581
1582         dmadev->rst = devm_reset_control_get(&pdev->dev, NULL);
1583         if (!IS_ERR(dmadev->rst)) {
1584                 reset_control_assert(dmadev->rst);
1585                 udelay(2);
1586                 reset_control_deassert(dmadev->rst);
1587         }
1588
1589         dd = &dmadev->ddev;
1590         dma_cap_set(DMA_SLAVE, dd->cap_mask);
1591         dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1592         dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1593         dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1594         dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1595         dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1596         dd->device_tx_status = stm32_mdma_tx_status;
1597         dd->device_issue_pending = stm32_mdma_issue_pending;
1598         dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1599         dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1600         dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1601         dd->device_config = stm32_mdma_slave_config;
1602         dd->device_pause = stm32_mdma_pause;
1603         dd->device_resume = stm32_mdma_resume;
1604         dd->device_terminate_all = stm32_mdma_terminate_all;
1605         dd->device_synchronize = stm32_mdma_synchronize;
1606         dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1607                 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1608                 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1609                 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1610         dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1611                 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1612                 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1613                 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1614         dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1615                 BIT(DMA_MEM_TO_MEM);
1616         dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1617         dd->max_burst = STM32_MDMA_MAX_BURST;
1618         dd->dev = &pdev->dev;
1619         INIT_LIST_HEAD(&dd->channels);
1620
1621         for (i = 0; i < dmadev->nr_channels; i++) {
1622                 chan = &dmadev->chan[i];
1623                 chan->id = i;
1624                 chan->vchan.desc_free = stm32_mdma_desc_free;
1625                 vchan_init(&chan->vchan, dd);
1626         }
1627
1628         dmadev->irq = platform_get_irq(pdev, 0);
1629         if (dmadev->irq < 0) {
1630                 dev_err(&pdev->dev, "failed to get IRQ\n");
1631                 return dmadev->irq;
1632         }
1633
1634         ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1635                                0, dev_name(&pdev->dev), dmadev);
1636         if (ret) {
1637                 dev_err(&pdev->dev, "failed to request IRQ\n");
1638                 return ret;
1639         }
1640
1641         ret = dma_async_device_register(dd);
1642         if (ret)
1643                 return ret;
1644
1645         ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1646         if (ret < 0) {
1647                 dev_err(&pdev->dev,
1648                         "STM32 MDMA DMA OF registration failed %d\n", ret);
1649                 goto err_unregister;
1650         }
1651
1652         platform_set_drvdata(pdev, dmadev);
1653
1654         dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1655
1656         return 0;
1657
1658 err_unregister:
1659         dma_async_device_unregister(dd);
1660
1661         return ret;
1662 }
1663
1664 static struct platform_driver stm32_mdma_driver = {
1665         .probe = stm32_mdma_probe,
1666         .driver = {
1667                 .name = "stm32-mdma",
1668                 .of_match_table = stm32_mdma_of_match,
1669         },
1670 };
1671
1672 static int __init stm32_mdma_init(void)
1673 {
1674         return platform_driver_register(&stm32_mdma_driver);
1675 }
1676
1677 subsys_initcall(stm32_mdma_init);
1678
1679 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1680 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1681 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1682 MODULE_LICENSE("GPL v2");