1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2023 Cai Huoqing
4 * Synopsys DesignWare HDMA v0 core
7 #include <linux/bitfield.h>
8 #include <linux/irqreturn.h>
9 #include <linux/io-64-nonatomic-lo-hi.h>
11 #include "dw-edma-core.h"
12 #include "dw-hdma-v0-core.h"
13 #include "dw-hdma-v0-regs.h"
14 #include "dw-hdma-v0-debugfs.h"
16 enum dw_hdma_control {
17 DW_HDMA_V0_CB = BIT(0),
18 DW_HDMA_V0_TCB = BIT(1),
19 DW_HDMA_V0_LLP = BIT(2),
20 DW_HDMA_V0_LIE = BIT(3),
21 DW_HDMA_V0_RIE = BIT(4),
22 DW_HDMA_V0_CCS = BIT(8),
23 DW_HDMA_V0_LLE = BIT(9),
26 static inline struct dw_hdma_v0_regs __iomem *__dw_regs(struct dw_edma *dw)
28 return dw->chip->reg_base;
31 static inline struct dw_hdma_v0_ch_regs __iomem *
32 __dw_ch_regs(struct dw_edma *dw, enum dw_edma_dir dir, u16 ch)
34 if (dir == EDMA_DIR_WRITE)
35 return &(__dw_regs(dw)->ch[ch].wr);
37 return &(__dw_regs(dw)->ch[ch].rd);
40 #define SET_CH_32(dw, dir, ch, name, value) \
41 writel(value, &(__dw_ch_regs(dw, dir, ch)->name))
43 #define GET_CH_32(dw, dir, ch, name) \
44 readl(&(__dw_ch_regs(dw, dir, ch)->name))
46 #define SET_BOTH_CH_32(dw, ch, name, value) \
48 writel(value, &(__dw_ch_regs(dw, EDMA_DIR_WRITE, ch)->name)); \
49 writel(value, &(__dw_ch_regs(dw, EDMA_DIR_READ, ch)->name)); \
52 /* HDMA management callbacks */
53 static void dw_hdma_v0_core_off(struct dw_edma *dw)
57 for (id = 0; id < HDMA_V0_MAX_NR_CH; id++) {
58 SET_BOTH_CH_32(dw, id, int_setup,
59 HDMA_V0_STOP_INT_MASK | HDMA_V0_ABORT_INT_MASK);
60 SET_BOTH_CH_32(dw, id, int_clear,
61 HDMA_V0_STOP_INT_MASK | HDMA_V0_ABORT_INT_MASK);
62 SET_BOTH_CH_32(dw, id, ch_en, 0);
66 static u16 dw_hdma_v0_core_ch_count(struct dw_edma *dw, enum dw_edma_dir dir)
71 for (id = 0; id < HDMA_V0_MAX_NR_CH; id++) {
72 if (GET_CH_32(dw, id, dir, ch_en) & BIT(0))
76 if (num_ch > HDMA_V0_MAX_NR_CH)
77 num_ch = HDMA_V0_MAX_NR_CH;
82 static enum dma_status dw_hdma_v0_core_ch_status(struct dw_edma_chan *chan)
84 struct dw_edma *dw = chan->dw;
87 tmp = FIELD_GET(HDMA_V0_CH_STATUS_MASK,
88 GET_CH_32(dw, chan->id, chan->dir, ch_stat));
91 return DMA_IN_PROGRESS;
98 static void dw_hdma_v0_core_clear_done_int(struct dw_edma_chan *chan)
100 struct dw_edma *dw = chan->dw;
102 SET_CH_32(dw, chan->dir, chan->id, int_clear, HDMA_V0_STOP_INT_MASK);
105 static void dw_hdma_v0_core_clear_abort_int(struct dw_edma_chan *chan)
107 struct dw_edma *dw = chan->dw;
109 SET_CH_32(dw, chan->dir, chan->id, int_clear, HDMA_V0_ABORT_INT_MASK);
112 static u32 dw_hdma_v0_core_status_int(struct dw_edma_chan *chan)
114 struct dw_edma *dw = chan->dw;
116 return GET_CH_32(dw, chan->dir, chan->id, int_stat);
120 dw_hdma_v0_core_handle_int(struct dw_edma_irq *dw_irq, enum dw_edma_dir dir,
121 dw_edma_handler_t done, dw_edma_handler_t abort)
123 struct dw_edma *dw = dw_irq->dw;
124 unsigned long total, pos, val;
125 irqreturn_t ret = IRQ_NONE;
126 struct dw_edma_chan *chan;
127 unsigned long off, mask;
129 if (dir == EDMA_DIR_WRITE) {
130 total = dw->wr_ch_cnt;
132 mask = dw_irq->wr_mask;
134 total = dw->rd_ch_cnt;
136 mask = dw_irq->rd_mask;
139 for_each_set_bit(pos, &mask, total) {
140 chan = &dw->chan[pos + off];
142 val = dw_hdma_v0_core_status_int(chan);
143 if (FIELD_GET(HDMA_V0_STOP_INT_MASK, val)) {
144 dw_hdma_v0_core_clear_done_int(chan);
150 if (FIELD_GET(HDMA_V0_ABORT_INT_MASK, val)) {
151 dw_hdma_v0_core_clear_abort_int(chan);
161 static void dw_hdma_v0_write_ll_data(struct dw_edma_chunk *chunk, int i,
162 u32 control, u32 size, u64 sar, u64 dar)
164 ptrdiff_t ofs = i * sizeof(struct dw_hdma_v0_lli);
166 if (chunk->chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) {
167 struct dw_hdma_v0_lli *lli = chunk->ll_region.vaddr.mem + ofs;
169 lli->control = control;
170 lli->transfer_size = size;
174 struct dw_hdma_v0_lli __iomem *lli = chunk->ll_region.vaddr.io + ofs;
176 writel(control, &lli->control);
177 writel(size, &lli->transfer_size);
178 writeq(sar, &lli->sar.reg);
179 writeq(dar, &lli->dar.reg);
183 static void dw_hdma_v0_write_ll_link(struct dw_edma_chunk *chunk,
184 int i, u32 control, u64 pointer)
186 ptrdiff_t ofs = i * sizeof(struct dw_hdma_v0_lli);
188 if (chunk->chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL) {
189 struct dw_hdma_v0_llp *llp = chunk->ll_region.vaddr.mem + ofs;
191 llp->control = control;
192 llp->llp.reg = pointer;
194 struct dw_hdma_v0_llp __iomem *llp = chunk->ll_region.vaddr.io + ofs;
196 writel(control, &llp->control);
197 writeq(pointer, &llp->llp.reg);
201 static void dw_hdma_v0_core_write_chunk(struct dw_edma_chunk *chunk)
203 struct dw_edma_burst *child;
204 struct dw_edma_chan *chan = chunk->chan;
205 u32 control = 0, i = 0;
209 control = DW_HDMA_V0_CB;
211 j = chunk->bursts_alloc;
212 list_for_each_entry(child, &chunk->burst->list, list) {
215 control |= DW_HDMA_V0_LIE;
216 if (!(chan->dw->chip->flags & DW_EDMA_CHIP_LOCAL))
217 control |= DW_HDMA_V0_RIE;
220 dw_hdma_v0_write_ll_data(chunk, i++, control, child->sz,
221 child->sar, child->dar);
224 control = DW_HDMA_V0_LLP | DW_HDMA_V0_TCB;
226 control |= DW_HDMA_V0_CB;
228 dw_hdma_v0_write_ll_link(chunk, i, control, chunk->ll_region.paddr);
231 static void dw_hdma_v0_core_start(struct dw_edma_chunk *chunk, bool first)
233 struct dw_edma_chan *chan = chunk->chan;
234 struct dw_edma *dw = chan->dw;
237 dw_hdma_v0_core_write_chunk(chunk);
241 SET_CH_32(dw, chan->dir, chan->id, ch_en, BIT(0));
242 /* Interrupt enable&unmask - done, abort */
243 tmp = GET_CH_32(dw, chan->dir, chan->id, int_setup) |
244 HDMA_V0_STOP_INT_MASK | HDMA_V0_ABORT_INT_MASK |
245 HDMA_V0_LOCAL_STOP_INT_EN | HDMA_V0_LOCAL_STOP_INT_EN;
246 SET_CH_32(dw, chan->dir, chan->id, int_setup, tmp);
247 /* Channel control */
248 SET_CH_32(dw, chan->dir, chan->id, control1, HDMA_V0_LINKLIST_EN);
250 /* llp is not aligned on 64bit -> keep 32bit accesses */
251 SET_CH_32(dw, chan->dir, chan->id, llp.lsb,
252 lower_32_bits(chunk->ll_region.paddr));
253 SET_CH_32(dw, chan->dir, chan->id, llp.msb,
254 upper_32_bits(chunk->ll_region.paddr));
256 /* Set consumer cycle */
257 SET_CH_32(dw, chan->dir, chan->id, cycle_sync,
258 HDMA_V0_CONSUMER_CYCLE_STAT | HDMA_V0_CONSUMER_CYCLE_BIT);
260 SET_CH_32(dw, chan->dir, chan->id, doorbell, HDMA_V0_DOORBELL_START);
263 static void dw_hdma_v0_core_ch_config(struct dw_edma_chan *chan)
265 struct dw_edma *dw = chan->dw;
267 /* MSI done addr - low, high */
268 SET_CH_32(dw, chan->dir, chan->id, msi_stop.lsb, chan->msi.address_lo);
269 SET_CH_32(dw, chan->dir, chan->id, msi_stop.msb, chan->msi.address_hi);
270 /* MSI abort addr - low, high */
271 SET_CH_32(dw, chan->dir, chan->id, msi_abort.lsb, chan->msi.address_lo);
272 SET_CH_32(dw, chan->dir, chan->id, msi_abort.msb, chan->msi.address_hi);
273 /* config MSI data */
274 SET_CH_32(dw, chan->dir, chan->id, msi_msgdata, chan->msi.data);
277 /* HDMA debugfs callbacks */
278 static void dw_hdma_v0_core_debugfs_on(struct dw_edma *dw)
280 dw_hdma_v0_debugfs_on(dw);
283 static const struct dw_edma_core_ops dw_hdma_v0_core = {
284 .off = dw_hdma_v0_core_off,
285 .ch_count = dw_hdma_v0_core_ch_count,
286 .ch_status = dw_hdma_v0_core_ch_status,
287 .handle_int = dw_hdma_v0_core_handle_int,
288 .start = dw_hdma_v0_core_start,
289 .ch_config = dw_hdma_v0_core_ch_config,
290 .debugfs_on = dw_hdma_v0_core_debugfs_on,
293 void dw_hdma_v0_core_register(struct dw_edma *dw)
295 dw->core = &dw_hdma_v0_core;