Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Dmaengine driver base library for DMA controllers, found on SH-based SoCs |
| 3 | * |
| 4 | * extracted from shdma.c and headers |
| 5 | * |
| 6 | * Copyright (C) 2011-2012 Guennadi Liakhovetski <g.liakhovetski@gmx.de> |
| 7 | * Copyright (C) 2009 Nobuhiro Iwamatsu <iwamatsu.nobuhiro@renesas.com> |
| 8 | * Copyright (C) 2009 Renesas Solutions, Inc. All rights reserved. |
| 9 | * Copyright (C) 2007 Freescale Semiconductor, Inc. All rights reserved. |
| 10 | * |
| 11 | * This is free software; you can redistribute it and/or modify |
| 12 | * it under the terms of version 2 of the GNU General Public License as |
| 13 | * published by the Free Software Foundation. |
| 14 | */ |
| 15 | |
| 16 | #ifndef SHDMA_BASE_H |
| 17 | #define SHDMA_BASE_H |
| 18 | |
| 19 | #include <linux/dmaengine.h> |
| 20 | #include <linux/interrupt.h> |
| 21 | #include <linux/list.h> |
| 22 | #include <linux/types.h> |
| 23 | |
| 24 | /** |
| 25 | * shdma_pm_state - DMA channel PM state |
| 26 | * SHDMA_PM_ESTABLISHED: either idle or during data transfer |
| 27 | * SHDMA_PM_BUSY: during the transfer preparation, when we have to |
| 28 | * drop the lock temporarily |
| 29 | * SHDMA_PM_PENDING: transfers pending |
| 30 | */ |
| 31 | enum shdma_pm_state { |
| 32 | SHDMA_PM_ESTABLISHED, |
| 33 | SHDMA_PM_BUSY, |
| 34 | SHDMA_PM_PENDING, |
| 35 | }; |
| 36 | |
| 37 | struct device; |
| 38 | |
| 39 | /* |
| 40 | * Drivers, using this library are expected to embed struct shdma_dev, |
| 41 | * struct shdma_chan, struct shdma_desc, and struct shdma_slave |
| 42 | * in their respective device, channel, descriptor and slave objects. |
| 43 | */ |
| 44 | |
| 45 | struct shdma_slave { |
Guennadi Liakhovetski | c2cdb7e | 2012-07-05 12:29:41 +0200 | [diff] [blame] | 46 | int slave_id; |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 47 | }; |
| 48 | |
| 49 | struct shdma_desc { |
| 50 | struct list_head node; |
| 51 | struct dma_async_tx_descriptor async_tx; |
| 52 | enum dma_transfer_direction direction; |
Guennadi Liakhovetski | 4f46f8a | 2012-07-30 21:28:27 +0200 | [diff] [blame] | 53 | size_t partial; |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 54 | dma_cookie_t cookie; |
| 55 | int chunks; |
| 56 | int mark; |
Kuninori Morimoto | dfbb85c | 2014-04-02 20:17:00 -0700 | [diff] [blame] | 57 | bool cyclic; /* used as cyclic transfer */ |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 58 | }; |
| 59 | |
| 60 | struct shdma_chan { |
| 61 | spinlock_t chan_lock; /* Channel operation lock */ |
| 62 | struct list_head ld_queue; /* Link descriptors queue */ |
| 63 | struct list_head ld_free; /* Free link descriptors */ |
| 64 | struct dma_chan dma_chan; /* DMA channel */ |
| 65 | struct device *dev; /* Channel device */ |
| 66 | void *desc; /* buffer for descriptor array */ |
| 67 | int desc_num; /* desc count */ |
| 68 | size_t max_xfer_len; /* max transfer length */ |
| 69 | int id; /* Raw id of this channel */ |
| 70 | int irq; /* Channel IRQ */ |
Guennadi Liakhovetski | c2cdb7e | 2012-07-05 12:29:41 +0200 | [diff] [blame] | 71 | int slave_id; /* Client ID for slave DMA */ |
Arnd Bergmann | 411fdaf | 2015-02-17 01:46:49 +0000 | [diff] [blame] | 72 | int real_slave_id; /* argument passed to filter function */ |
Guennadi Liakhovetski | 67eacc1 | 2013-06-18 18:16:57 +0200 | [diff] [blame] | 73 | int hw_req; /* DMA request line for slave DMA - same |
| 74 | * as MID/RID, used with DT */ |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 75 | enum shdma_pm_state pm_state; |
| 76 | }; |
| 77 | |
| 78 | /** |
| 79 | * struct shdma_ops - simple DMA driver operations |
| 80 | * desc_completed: return true, if this is the descriptor, that just has |
| 81 | * completed (atomic) |
| 82 | * halt_channel: stop DMA channel operation (atomic) |
| 83 | * channel_busy: return true, if the channel is busy (atomic) |
| 84 | * slave_addr: return slave DMA address |
| 85 | * desc_setup: set up the hardware specific descriptor portion (atomic) |
| 86 | * set_slave: bind channel to a slave |
| 87 | * setup_xfer: configure channel hardware for operation (atomic) |
| 88 | * start_xfer: start the DMA transfer (atomic) |
| 89 | * embedded_desc: return Nth struct shdma_desc pointer from the |
| 90 | * descriptor array |
| 91 | * chan_irq: process channel IRQ, return true if a transfer has |
| 92 | * completed (atomic) |
| 93 | */ |
| 94 | struct shdma_ops { |
| 95 | bool (*desc_completed)(struct shdma_chan *, struct shdma_desc *); |
| 96 | void (*halt_channel)(struct shdma_chan *); |
| 97 | bool (*channel_busy)(struct shdma_chan *); |
| 98 | dma_addr_t (*slave_addr)(struct shdma_chan *); |
| 99 | int (*desc_setup)(struct shdma_chan *, struct shdma_desc *, |
| 100 | dma_addr_t, dma_addr_t, size_t *); |
Guennadi Liakhovetski | 4981c4d | 2013-08-02 16:50:36 +0200 | [diff] [blame] | 101 | int (*set_slave)(struct shdma_chan *, int, dma_addr_t, bool); |
Guennadi Liakhovetski | c2cdb7e | 2012-07-05 12:29:41 +0200 | [diff] [blame] | 102 | void (*setup_xfer)(struct shdma_chan *, int); |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 103 | void (*start_xfer)(struct shdma_chan *, struct shdma_desc *); |
| 104 | struct shdma_desc *(*embedded_desc)(void *, int); |
| 105 | bool (*chan_irq)(struct shdma_chan *, int); |
Guennadi Liakhovetski | 4f46f8a | 2012-07-30 21:28:27 +0200 | [diff] [blame] | 106 | size_t (*get_partial)(struct shdma_chan *, struct shdma_desc *); |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 107 | }; |
| 108 | |
| 109 | struct shdma_dev { |
| 110 | struct dma_device dma_dev; |
| 111 | struct shdma_chan **schan; |
| 112 | const struct shdma_ops *ops; |
| 113 | size_t desc_size; |
| 114 | }; |
| 115 | |
| 116 | #define shdma_for_each_chan(c, d, i) for (i = 0, c = (d)->schan[0]; \ |
| 117 | i < (d)->dma_dev.chancnt; c = (d)->schan[++i]) |
| 118 | |
| 119 | int shdma_request_irq(struct shdma_chan *, int, |
| 120 | unsigned long, const char *); |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 121 | bool shdma_reset(struct shdma_dev *sdev); |
| 122 | void shdma_chan_probe(struct shdma_dev *sdev, |
| 123 | struct shdma_chan *schan, int id); |
| 124 | void shdma_chan_remove(struct shdma_chan *schan); |
| 125 | int shdma_init(struct device *dev, struct shdma_dev *sdev, |
| 126 | int chan_num); |
| 127 | void shdma_cleanup(struct shdma_dev *sdev); |
Guennadi Liakhovetski | ab116a4 | 2013-07-10 11:09:12 +0900 | [diff] [blame] | 128 | #if IS_ENABLED(CONFIG_SH_DMAE_BASE) |
Guennadi Liakhovetski | d0951a2 | 2013-06-06 17:37:14 +0200 | [diff] [blame] | 129 | bool shdma_chan_filter(struct dma_chan *chan, void *arg); |
Guennadi Liakhovetski | ab116a4 | 2013-07-10 11:09:12 +0900 | [diff] [blame] | 130 | #else |
Geert Uytterhoeven | 056f6c8 | 2015-07-10 12:07:25 +0200 | [diff] [blame] | 131 | static inline bool shdma_chan_filter(struct dma_chan *chan, void *arg) |
| 132 | { |
| 133 | return false; |
| 134 | } |
Guennadi Liakhovetski | ab116a4 | 2013-07-10 11:09:12 +0900 | [diff] [blame] | 135 | #endif |
Guennadi Liakhovetski | 9a7b8e0 | 2012-05-09 17:09:13 +0200 | [diff] [blame] | 136 | |
| 137 | #endif |