blob: 58efa98313aa0bf5c2aca5d8458032fac566a0eb [file] [log] [blame]
Mika Westerbergcd7bed02013-01-22 12:26:28 +02001/*
2 * Copyright (C) 2005 Stephen Street / StreetFire Sound Labs
3 * Copyright (C) 2013, Intel Corporation
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 */
9
10#ifndef SPI_PXA2XX_H
11#define SPI_PXA2XX_H
12
Mika Westerberg59288082013-01-22 12:26:29 +020013#include <linux/atomic.h>
14#include <linux/dmaengine.h>
Mika Westerbergcd7bed02013-01-22 12:26:28 +020015#include <linux/errno.h>
16#include <linux/io.h>
17#include <linux/interrupt.h>
18#include <linux/platform_device.h>
19#include <linux/pxa2xx_ssp.h>
Mika Westerberg59288082013-01-22 12:26:29 +020020#include <linux/scatterlist.h>
21#include <linux/sizes.h>
Mika Westerbergcd7bed02013-01-22 12:26:28 +020022#include <linux/spi/spi.h>
23#include <linux/spi/pxa2xx_spi.h>
24
25struct driver_data {
26 /* Driver model hookup */
27 struct platform_device *pdev;
28
29 /* SSP Info */
30 struct ssp_device *ssp;
31
32 /* SPI framework hookup */
33 enum pxa_ssp_type ssp_type;
34 struct spi_master *master;
35
36 /* PXA hookup */
37 struct pxa2xx_spi_master *master_info;
38
Mika Westerbergcd7bed02013-01-22 12:26:28 +020039 /* SSP register addresses */
40 void __iomem *ioaddr;
41 u32 ssdr_physical;
42
43 /* SSP masks*/
44 u32 dma_cr1;
45 u32 int_cr1;
46 u32 clear_sr;
47 u32 mask_sr;
48
Mika Westerbergcd7bed02013-01-22 12:26:28 +020049 /* Message Transfer pump */
50 struct tasklet_struct pump_transfers;
51
Mika Westerberg59288082013-01-22 12:26:29 +020052 /* DMA engine support */
53 struct dma_chan *rx_chan;
54 struct dma_chan *tx_chan;
55 struct sg_table rx_sgt;
56 struct sg_table tx_sgt;
57 int rx_nents;
58 int tx_nents;
59 void *dummy;
60 atomic_t dma_running;
61
Mika Westerbergcd7bed02013-01-22 12:26:28 +020062 /* Current message transfer state info */
63 struct spi_message *cur_msg;
64 struct spi_transfer *cur_transfer;
65 struct chip_data *cur_chip;
66 size_t len;
67 void *tx;
68 void *tx_end;
69 void *rx;
70 void *rx_end;
71 int dma_mapped;
72 dma_addr_t rx_dma;
73 dma_addr_t tx_dma;
74 size_t rx_map_len;
75 size_t tx_map_len;
76 u8 n_bytes;
77 int (*write)(struct driver_data *drv_data);
78 int (*read)(struct driver_data *drv_data);
79 irqreturn_t (*transfer_handler)(struct driver_data *drv_data);
80 void (*cs_control)(u32 command);
Mika Westerberga0d26422013-01-22 12:26:32 +020081
82 void __iomem *lpss_base;
Mika Westerbergcd7bed02013-01-22 12:26:28 +020083};
84
85struct chip_data {
Mika Westerbergcd7bed02013-01-22 12:26:28 +020086 u32 cr1;
Weike Chene5262d02014-11-26 02:35:10 -080087 u32 dds_rate;
Mika Westerbergcd7bed02013-01-22 12:26:28 +020088 u32 timeout;
89 u8 n_bytes;
90 u32 dma_burst_size;
91 u32 threshold;
92 u32 dma_threshold;
Mika Westerberga0d26422013-01-22 12:26:32 +020093 u16 lpss_rx_threshold;
94 u16 lpss_tx_threshold;
Mika Westerbergcd7bed02013-01-22 12:26:28 +020095 u8 enable_dma;
Mika Westerbergcd7bed02013-01-22 12:26:28 +020096 union {
97 int gpio_cs;
98 unsigned int frm;
99 };
100 int gpio_cs_inverted;
101 int (*write)(struct driver_data *drv_data);
102 int (*read)(struct driver_data *drv_data);
103 void (*cs_control)(u32 command);
104};
105
Jarkko Nikulac039dd22014-12-18 15:04:23 +0200106static inline u32 pxa2xx_spi_read(const struct driver_data *drv_data,
107 unsigned reg)
108{
109 return __raw_readl(drv_data->ioaddr + reg);
110}
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200111
Jarkko Nikulac039dd22014-12-18 15:04:23 +0200112static inline void pxa2xx_spi_write(const struct driver_data *drv_data,
113 unsigned reg, u32 val)
114{
115 __raw_writel(val, drv_data->ioaddr + reg);
116}
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200117
118#define START_STATE ((void *)0)
119#define RUNNING_STATE ((void *)1)
120#define DONE_STATE ((void *)2)
121#define ERROR_STATE ((void *)-1)
122
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200123#define IS_DMA_ALIGNED(x) IS_ALIGNED((unsigned long)(x), DMA_ALIGNMENT)
124#define DMA_ALIGNMENT 8
125
126static inline int pxa25x_ssp_comp(struct driver_data *drv_data)
127{
Weike Chene5262d02014-11-26 02:35:10 -0800128 switch (drv_data->ssp_type) {
129 case PXA25x_SSP:
130 case CE4100_SSP:
131 case QUARK_X1000_SSP:
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200132 return 1;
Weike Chene5262d02014-11-26 02:35:10 -0800133 default:
134 return 0;
135 }
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200136}
137
138static inline void write_SSSR_CS(struct driver_data *drv_data, u32 val)
139{
Weike Chene5262d02014-11-26 02:35:10 -0800140 if (drv_data->ssp_type == CE4100_SSP ||
141 drv_data->ssp_type == QUARK_X1000_SSP)
Jarkko Nikulac039dd22014-12-18 15:04:23 +0200142 val |= pxa2xx_spi_read(drv_data, SSSR) & SSSR_ALT_FRM_MASK;
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200143
Jarkko Nikulac039dd22014-12-18 15:04:23 +0200144 pxa2xx_spi_write(drv_data, SSSR, val);
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200145}
146
147extern int pxa2xx_spi_flush(struct driver_data *drv_data);
148extern void *pxa2xx_spi_next_transfer(struct driver_data *drv_data);
149
Mika Westerberg59288082013-01-22 12:26:29 +0200150/*
151 * Select the right DMA implementation.
152 */
Daniel Mack63564372015-06-08 22:32:37 +0200153#if defined(CONFIG_SPI_PXA2XX_DMA)
Mika Westerberg59288082013-01-22 12:26:29 +0200154#define SPI_PXA2XX_USE_DMA 1
155#define MAX_DMA_LEN SZ_64K
156#define DEFAULT_DMA_CR1 (SSCR1_TSRE | SSCR1_RSRE | SSCR1_TRAIL)
157#else
158#undef SPI_PXA2XX_USE_DMA
159#define MAX_DMA_LEN 0
160#define DEFAULT_DMA_CR1 0
161#endif
162
163#ifdef SPI_PXA2XX_USE_DMA
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200164extern bool pxa2xx_spi_dma_is_possible(size_t len);
165extern int pxa2xx_spi_map_dma_buffers(struct driver_data *drv_data);
166extern irqreturn_t pxa2xx_spi_dma_transfer(struct driver_data *drv_data);
167extern int pxa2xx_spi_dma_prepare(struct driver_data *drv_data, u32 dma_burst);
168extern void pxa2xx_spi_dma_start(struct driver_data *drv_data);
169extern int pxa2xx_spi_dma_setup(struct driver_data *drv_data);
170extern void pxa2xx_spi_dma_release(struct driver_data *drv_data);
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200171extern int pxa2xx_spi_set_dma_burst_and_threshold(struct chip_data *chip,
172 struct spi_device *spi,
173 u8 bits_per_word,
174 u32 *burst_code,
175 u32 *threshold);
176#else
177static inline bool pxa2xx_spi_dma_is_possible(size_t len) { return false; }
178static inline int pxa2xx_spi_map_dma_buffers(struct driver_data *drv_data)
179{
180 return 0;
181}
182#define pxa2xx_spi_dma_transfer NULL
183static inline void pxa2xx_spi_dma_prepare(struct driver_data *drv_data,
184 u32 dma_burst) {}
185static inline void pxa2xx_spi_dma_start(struct driver_data *drv_data) {}
186static inline int pxa2xx_spi_dma_setup(struct driver_data *drv_data)
187{
188 return 0;
189}
190static inline void pxa2xx_spi_dma_release(struct driver_data *drv_data) {}
Mika Westerbergcd7bed02013-01-22 12:26:28 +0200191static inline int pxa2xx_spi_set_dma_burst_and_threshold(struct chip_data *chip,
192 struct spi_device *spi,
193 u8 bits_per_word,
194 u32 *burst_code,
195 u32 *threshold)
196{
197 return -ENODEV;
198}
199#endif
200
201#endif /* SPI_PXA2XX_H */