blob: 886326ee6f6c373c36d16fddef48b9ce58d5f7c9 [file] [log] [blame]
Boojin Kimc4e16622011-09-02 09:44:35 +09001/* linux/arch/arm/plat-samsung/dma-ops.c
2 *
3 * Copyright (c) 2011 Samsung Electronics Co., Ltd.
4 * http://www.samsung.com
5 *
6 * Samsung DMA Operations
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12
13#include <linux/kernel.h>
14#include <linux/errno.h>
15#include <linux/amba/pl330.h>
16#include <linux/scatterlist.h>
Paul Gortmaker0c073e32011-10-08 23:24:48 -040017#include <linux/export.h>
Boojin Kimc4e16622011-09-02 09:44:35 +090018
19#include <mach/dma.h>
20
Tomasz Figa1db02872013-10-16 21:10:54 +020021#if defined(CONFIG_PL330_DMA)
22#define dma_filter pl330_filter
23#elif defined(CONFIG_S3C64XX_PL080)
24#define dma_filter pl08x_filter_id
25#endif
26
Boojin Kimc4e16622011-09-02 09:44:35 +090027static unsigned samsung_dmadev_request(enum dma_ch dma_ch,
Padmavathi Vennae7ba5f12013-01-18 17:17:02 +053028 struct samsung_dma_req *param,
29 struct device *dev, char *ch_name)
Boojin Kimc4e16622011-09-02 09:44:35 +090030{
Boojin Kimc4e16622011-09-02 09:44:35 +090031 dma_cap_mask_t mask;
Boojin Kimc4e16622011-09-02 09:44:35 +090032
33 dma_cap_zero(mask);
Boojin Kimfbb20e82012-06-19 13:26:53 +090034 dma_cap_set(param->cap, mask);
Boojin Kimc4e16622011-09-02 09:44:35 +090035
Padmavathi Vennae7ba5f12013-01-18 17:17:02 +053036 if (dev->of_node)
37 return (unsigned)dma_request_slave_channel(dev, ch_name);
38 else
Tomasz Figa1db02872013-10-16 21:10:54 +020039 return (unsigned)dma_request_channel(mask, dma_filter,
Padmavathi Vennaba7a9a72013-04-08 21:42:10 +090040 (void *)dma_ch);
Boojin Kimc4e16622011-09-02 09:44:35 +090041}
42
Sachin Kamat3688be42012-06-27 11:29:54 +090043static int samsung_dmadev_release(unsigned ch, void *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090044{
45 dma_release_channel((struct dma_chan *)ch);
46
47 return 0;
48}
49
Boojin Kimfbb20e82012-06-19 13:26:53 +090050static int samsung_dmadev_config(unsigned ch,
51 struct samsung_dma_config *param)
52{
53 struct dma_chan *chan = (struct dma_chan *)ch;
54 struct dma_slave_config slave_config;
55
56 if (param->direction == DMA_DEV_TO_MEM) {
57 memset(&slave_config, 0, sizeof(struct dma_slave_config));
58 slave_config.direction = param->direction;
59 slave_config.src_addr = param->fifo;
60 slave_config.src_addr_width = param->width;
61 slave_config.src_maxburst = 1;
62 dmaengine_slave_config(chan, &slave_config);
63 } else if (param->direction == DMA_MEM_TO_DEV) {
64 memset(&slave_config, 0, sizeof(struct dma_slave_config));
65 slave_config.direction = param->direction;
66 slave_config.dst_addr = param->fifo;
67 slave_config.dst_addr_width = param->width;
68 slave_config.dst_maxburst = 1;
69 dmaengine_slave_config(chan, &slave_config);
70 } else {
71 pr_warn("unsupported direction\n");
72 return -EINVAL;
73 }
74
75 return 0;
76}
77
Boojin Kimc4e16622011-09-02 09:44:35 +090078static int samsung_dmadev_prepare(unsigned ch,
Boojin Kimfbb20e82012-06-19 13:26:53 +090079 struct samsung_dma_prep *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090080{
81 struct scatterlist sg;
82 struct dma_chan *chan = (struct dma_chan *)ch;
83 struct dma_async_tx_descriptor *desc;
84
Boojin Kimfbb20e82012-06-19 13:26:53 +090085 switch (param->cap) {
Boojin Kimc4e16622011-09-02 09:44:35 +090086 case DMA_SLAVE:
87 sg_init_table(&sg, 1);
Boojin Kimfbb20e82012-06-19 13:26:53 +090088 sg_dma_len(&sg) = param->len;
89 sg_set_page(&sg, pfn_to_page(PFN_DOWN(param->buf)),
90 param->len, offset_in_page(param->buf));
91 sg_dma_address(&sg) = param->buf;
Boojin Kimc4e16622011-09-02 09:44:35 +090092
Alexandre Bounine16052822012-03-08 16:11:18 -050093 desc = dmaengine_prep_slave_sg(chan,
Boojin Kimfbb20e82012-06-19 13:26:53 +090094 &sg, 1, param->direction, DMA_PREP_INTERRUPT);
Boojin Kimc4e16622011-09-02 09:44:35 +090095 break;
96 case DMA_CYCLIC:
Boojin Kimfbb20e82012-06-19 13:26:53 +090097 desc = dmaengine_prep_dma_cyclic(chan, param->buf,
Peter Ujfalusib7ef37d2012-09-24 10:58:05 +030098 param->len, param->period, param->direction,
99 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
Boojin Kimc4e16622011-09-02 09:44:35 +0900100 break;
101 default:
102 dev_err(&chan->dev->device, "unsupported format\n");
103 return -EFAULT;
104 }
105
106 if (!desc) {
107 dev_err(&chan->dev->device, "cannot prepare cyclic dma\n");
108 return -EFAULT;
109 }
110
Boojin Kimfbb20e82012-06-19 13:26:53 +0900111 desc->callback = param->fp;
112 desc->callback_param = param->fp_param;
Boojin Kimc4e16622011-09-02 09:44:35 +0900113
114 dmaengine_submit((struct dma_async_tx_descriptor *)desc);
115
116 return 0;
117}
118
119static inline int samsung_dmadev_trigger(unsigned ch)
120{
121 dma_async_issue_pending((struct dma_chan *)ch);
122
123 return 0;
124}
125
126static inline int samsung_dmadev_flush(unsigned ch)
127{
128 return dmaengine_terminate_all((struct dma_chan *)ch);
129}
130
Kukjin Kim6d259a22012-01-21 12:00:13 +0900131static struct samsung_dma_ops dmadev_ops = {
Boojin Kimc4e16622011-09-02 09:44:35 +0900132 .request = samsung_dmadev_request,
133 .release = samsung_dmadev_release,
Boojin Kimfbb20e82012-06-19 13:26:53 +0900134 .config = samsung_dmadev_config,
Boojin Kimc4e16622011-09-02 09:44:35 +0900135 .prepare = samsung_dmadev_prepare,
136 .trigger = samsung_dmadev_trigger,
137 .started = NULL,
138 .flush = samsung_dmadev_flush,
139 .stop = samsung_dmadev_flush,
140};
141
142void *samsung_dmadev_get_ops(void)
143{
144 return &dmadev_ops;
145}
146EXPORT_SYMBOL(samsung_dmadev_get_ops);