1 From 9026efd53eae4b320941e581f141ddc87f2adc20 Mon Sep 17 00:00:00 2001
2 From: Martin Sperl <kernel@martin.sperl.org>
3 Date: Wed, 16 Mar 2016 12:25:01 -0700
4 Subject: [PATCH] dmaengine: bcm2835: add slave_sg support to bcm2835-dma
6 Add slave_sg support to bcm2835-dma using shared allocation
7 code for bcm2835_desc and DMA-control blocks already used by
10 Note that bcm2835_dma_callback had to get modified to support
11 both modes of operation (cyclic and non-cyclic).
14 * Hifiberry I2S card (using cyclic DMA)
15 * fb_st7735r SPI-framebuffer (using slave_sg DMA via spi-bcm2835)
16 playing BigBuckBunny for audio and video.
18 Signed-off-by: Martin Sperl <kernel@martin.sperl.org>
19 Reviewed-by: Eric Anholt <eric@anholt.net>
20 Signed-off-by: Eric Anholt <eric@anholt.net>
21 Signed-off-by: Vinod Koul <vinod.koul@intel.com>
23 drivers/dma/bcm2835-dma.c | 113 ++++++++++++++++++++++++++++++++++++++++++++--
24 1 file changed, 108 insertions(+), 5 deletions(-)
26 --- a/drivers/dma/bcm2835-dma.c
27 +++ b/drivers/dma/bcm2835-dma.c
28 @@ -260,6 +260,23 @@ static void bcm2835_dma_create_cb_set_le
29 control_block->info |= finalextrainfo;
32 +static inline size_t bcm2835_dma_count_frames_for_sg(
33 + struct bcm2835_chan *c,
34 + struct scatterlist *sgl,
35 + unsigned int sg_len)
38 + struct scatterlist *sgent;
40 + size_t plength = bcm2835_dma_max_frame_length(c);
42 + for_each_sg(sgl, sgent, sg_len, i)
43 + frames += bcm2835_dma_frames_for_length(
44 + sg_dma_len(sgent), plength);
50 * bcm2835_dma_create_cb_chain - create a control block and fills data in
52 @@ -361,6 +378,32 @@ error_cb:
56 +static void bcm2835_dma_fill_cb_chain_with_sg(
57 + struct dma_chan *chan,
58 + enum dma_transfer_direction direction,
59 + struct bcm2835_cb_entry *cb,
60 + struct scatterlist *sgl,
61 + unsigned int sg_len)
63 + struct bcm2835_chan *c = to_bcm2835_dma_chan(chan);
64 + size_t max_len = bcm2835_dma_max_frame_length(c);
65 + unsigned int i, len;
67 + struct scatterlist *sgent;
69 + for_each_sg(sgl, sgent, sg_len, i) {
70 + for (addr = sg_dma_address(sgent), len = sg_dma_len(sgent);
72 + addr += cb->cb->length, len -= cb->cb->length, cb++) {
73 + if (direction == DMA_DEV_TO_MEM)
77 + cb->cb->length = min(len, max_len);
82 static int bcm2835_dma_abort(void __iomem *chan_base)
85 @@ -428,13 +471,19 @@ static irqreturn_t bcm2835_dma_callback(
89 - /* TODO Only works for cyclic DMA */
90 - vchan_cyclic_callback(&d->vd);
92 + /* call the cyclic callback */
93 + vchan_cyclic_callback(&d->vd);
95 + /* Keep the DMA engine running */
96 + writel(BCM2835_DMA_ACTIVE,
97 + c->chan_base + BCM2835_DMA_CS);
99 + vchan_cookie_complete(&c->desc->vd);
100 + bcm2835_dma_start_desc(c);
104 - /* Keep the DMA engine running */
105 - writel(BCM2835_DMA_ACTIVE, c->chan_base + BCM2835_DMA_CS);
107 spin_unlock_irqrestore(&c->vc.lock, flags);
110 @@ -548,6 +597,58 @@ static void bcm2835_dma_issue_pending(st
111 spin_unlock_irqrestore(&c->vc.lock, flags);
114 +static struct dma_async_tx_descriptor *bcm2835_dma_prep_slave_sg(
115 + struct dma_chan *chan,
116 + struct scatterlist *sgl, unsigned int sg_len,
117 + enum dma_transfer_direction direction,
118 + unsigned long flags, void *context)
120 + struct bcm2835_chan *c = to_bcm2835_dma_chan(chan);
121 + struct bcm2835_desc *d;
122 + dma_addr_t src = 0, dst = 0;
123 + u32 info = BCM2835_DMA_WAIT_RESP;
124 + u32 extra = BCM2835_DMA_INT_EN;
127 + if (!is_slave_direction(direction)) {
128 + dev_err(chan->device->dev,
129 + "%s: bad direction?\n", __func__);
134 + info |= BCM2835_DMA_PER_MAP(c->dreq);
136 + if (direction == DMA_DEV_TO_MEM) {
137 + if (c->cfg.src_addr_width != DMA_SLAVE_BUSWIDTH_4_BYTES)
139 + src = c->cfg.src_addr;
140 + info |= BCM2835_DMA_S_DREQ | BCM2835_DMA_D_INC;
142 + if (c->cfg.dst_addr_width != DMA_SLAVE_BUSWIDTH_4_BYTES)
144 + dst = c->cfg.dst_addr;
145 + info |= BCM2835_DMA_D_DREQ | BCM2835_DMA_S_INC;
148 + /* count frames in sg list */
149 + frames = bcm2835_dma_count_frames_for_sg(c, sgl, sg_len);
151 + /* allocate the CB chain */
152 + d = bcm2835_dma_create_cb_chain(chan, direction, false,
154 + frames, src, dst, 0, 0,
159 + /* fill in frames with scatterlist pointers */
160 + bcm2835_dma_fill_cb_chain_with_sg(chan, direction, d->cb_list,
163 + return vchan_tx_prep(&c->vc, &d->vd, flags);
166 static struct dma_async_tx_descriptor *bcm2835_dma_prep_dma_cyclic(
167 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
168 size_t period_len, enum dma_transfer_direction direction,
169 @@ -778,11 +879,13 @@ static int bcm2835_dma_probe(struct plat
170 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask);
171 dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask);
172 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask);
173 + dma_cap_set(DMA_SLAVE, od->ddev.cap_mask);
174 od->ddev.device_alloc_chan_resources = bcm2835_dma_alloc_chan_resources;
175 od->ddev.device_free_chan_resources = bcm2835_dma_free_chan_resources;
176 od->ddev.device_tx_status = bcm2835_dma_tx_status;
177 od->ddev.device_issue_pending = bcm2835_dma_issue_pending;
178 od->ddev.device_prep_dma_cyclic = bcm2835_dma_prep_dma_cyclic;
179 + od->ddev.device_prep_slave_sg = bcm2835_dma_prep_slave_sg;
180 od->ddev.device_config = bcm2835_dma_slave_config;
181 od->ddev.device_terminate_all = bcm2835_dma_terminate_all;
182 od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);