blob: d088afa034e87bf2d4df760a690e1bdb33bc0281 [file] [log] [blame]
Boojin Kimc4e16622011-09-02 09:44:35 +09001/* linux/arch/arm/plat-samsung/dma-ops.c
2 *
3 * Copyright (c) 2011 Samsung Electronics Co., Ltd.
4 * http://www.samsung.com
5 *
6 * Samsung DMA Operations
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12
13#include <linux/kernel.h>
14#include <linux/errno.h>
15#include <linux/amba/pl330.h>
16#include <linux/scatterlist.h>
Paul Gortmaker0c073e32011-10-08 23:24:48 -040017#include <linux/export.h>
Boojin Kimc4e16622011-09-02 09:44:35 +090018
19#include <mach/dma.h>
20
Boojin Kimc4e16622011-09-02 09:44:35 +090021static unsigned samsung_dmadev_request(enum dma_ch dma_ch,
Boojin Kimfbb20e82012-06-19 13:26:53 +090022 struct samsung_dma_req *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090023{
Boojin Kimc4e16622011-09-02 09:44:35 +090024 dma_cap_mask_t mask;
Thomas Abraham4972a802011-10-24 11:43:38 +020025 void *filter_param;
Boojin Kimc4e16622011-09-02 09:44:35 +090026
27 dma_cap_zero(mask);
Boojin Kimfbb20e82012-06-19 13:26:53 +090028 dma_cap_set(param->cap, mask);
Boojin Kimc4e16622011-09-02 09:44:35 +090029
Thomas Abraham4972a802011-10-24 11:43:38 +020030 /*
31 * If a dma channel property of a device node from device tree is
32 * specified, use that as the fliter parameter.
33 */
Boojin Kimfbb20e82012-06-19 13:26:53 +090034 filter_param = (dma_ch == DMACH_DT_PROP) ?
35 (void *)param->dt_dmach_prop : (void *)dma_ch;
36 return (unsigned)dma_request_channel(mask, pl330_filter, filter_param);
Boojin Kimc4e16622011-09-02 09:44:35 +090037}
38
Sachin Kamat3688be42012-06-27 11:29:54 +090039static int samsung_dmadev_release(unsigned ch, void *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090040{
41 dma_release_channel((struct dma_chan *)ch);
42
43 return 0;
44}
45
Boojin Kimfbb20e82012-06-19 13:26:53 +090046static int samsung_dmadev_config(unsigned ch,
47 struct samsung_dma_config *param)
48{
49 struct dma_chan *chan = (struct dma_chan *)ch;
50 struct dma_slave_config slave_config;
51
52 if (param->direction == DMA_DEV_TO_MEM) {
53 memset(&slave_config, 0, sizeof(struct dma_slave_config));
54 slave_config.direction = param->direction;
55 slave_config.src_addr = param->fifo;
56 slave_config.src_addr_width = param->width;
57 slave_config.src_maxburst = 1;
58 dmaengine_slave_config(chan, &slave_config);
59 } else if (param->direction == DMA_MEM_TO_DEV) {
60 memset(&slave_config, 0, sizeof(struct dma_slave_config));
61 slave_config.direction = param->direction;
62 slave_config.dst_addr = param->fifo;
63 slave_config.dst_addr_width = param->width;
64 slave_config.dst_maxburst = 1;
65 dmaengine_slave_config(chan, &slave_config);
66 } else {
67 pr_warn("unsupported direction\n");
68 return -EINVAL;
69 }
70
71 return 0;
72}
73
Boojin Kimc4e16622011-09-02 09:44:35 +090074static int samsung_dmadev_prepare(unsigned ch,
Boojin Kimfbb20e82012-06-19 13:26:53 +090075 struct samsung_dma_prep *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090076{
77 struct scatterlist sg;
78 struct dma_chan *chan = (struct dma_chan *)ch;
79 struct dma_async_tx_descriptor *desc;
80
Boojin Kimfbb20e82012-06-19 13:26:53 +090081 switch (param->cap) {
Boojin Kimc4e16622011-09-02 09:44:35 +090082 case DMA_SLAVE:
83 sg_init_table(&sg, 1);
Boojin Kimfbb20e82012-06-19 13:26:53 +090084 sg_dma_len(&sg) = param->len;
85 sg_set_page(&sg, pfn_to_page(PFN_DOWN(param->buf)),
86 param->len, offset_in_page(param->buf));
87 sg_dma_address(&sg) = param->buf;
Boojin Kimc4e16622011-09-02 09:44:35 +090088
Alexandre Bounine16052822012-03-08 16:11:18 -050089 desc = dmaengine_prep_slave_sg(chan,
Boojin Kimfbb20e82012-06-19 13:26:53 +090090 &sg, 1, param->direction, DMA_PREP_INTERRUPT);
Boojin Kimc4e16622011-09-02 09:44:35 +090091 break;
92 case DMA_CYCLIC:
Boojin Kimfbb20e82012-06-19 13:26:53 +090093 desc = dmaengine_prep_dma_cyclic(chan, param->buf,
Peter Ujfalusib7ef37d2012-09-24 10:58:05 +030094 param->len, param->period, param->direction,
95 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
Boojin Kimc4e16622011-09-02 09:44:35 +090096 break;
97 default:
98 dev_err(&chan->dev->device, "unsupported format\n");
99 return -EFAULT;
100 }
101
102 if (!desc) {
103 dev_err(&chan->dev->device, "cannot prepare cyclic dma\n");
104 return -EFAULT;
105 }
106
Boojin Kimfbb20e82012-06-19 13:26:53 +0900107 desc->callback = param->fp;
108 desc->callback_param = param->fp_param;
Boojin Kimc4e16622011-09-02 09:44:35 +0900109
110 dmaengine_submit((struct dma_async_tx_descriptor *)desc);
111
112 return 0;
113}
114
115static inline int samsung_dmadev_trigger(unsigned ch)
116{
117 dma_async_issue_pending((struct dma_chan *)ch);
118
119 return 0;
120}
121
122static inline int samsung_dmadev_flush(unsigned ch)
123{
124 return dmaengine_terminate_all((struct dma_chan *)ch);
125}
126
Kukjin Kim6d259a22012-01-21 12:00:13 +0900127static struct samsung_dma_ops dmadev_ops = {
Boojin Kimc4e16622011-09-02 09:44:35 +0900128 .request = samsung_dmadev_request,
129 .release = samsung_dmadev_release,
Boojin Kimfbb20e82012-06-19 13:26:53 +0900130 .config = samsung_dmadev_config,
Boojin Kimc4e16622011-09-02 09:44:35 +0900131 .prepare = samsung_dmadev_prepare,
132 .trigger = samsung_dmadev_trigger,
133 .started = NULL,
134 .flush = samsung_dmadev_flush,
135 .stop = samsung_dmadev_flush,
136};
137
138void *samsung_dmadev_get_ops(void)
139{
140 return &dmadev_ops;
141}
142EXPORT_SYMBOL(samsung_dmadev_get_ops);