Merge branch 'for-linus-update' of git://git.kernel.org/pub/scm/linux/kernel/git...
[pandora-kernel.git] / drivers / spi / spi-dw-mid.c
1 /*
2  * Special handling for DW core on Intel MID platform
3  *
4  * Copyright (c) 2009, 2014 Intel Corporation.
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms and conditions of the GNU General Public License,
8  * version 2, as published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  */
15
16 #include <linux/dma-mapping.h>
17 #include <linux/dmaengine.h>
18 #include <linux/interrupt.h>
19 #include <linux/slab.h>
20 #include <linux/spi/spi.h>
21 #include <linux/types.h>
22
23 #include "spi-dw.h"
24
25 #ifdef CONFIG_SPI_DW_MID_DMA
26 #include <linux/intel_mid_dma.h>
27 #include <linux/pci.h>
28
29 struct mid_dma {
30         struct intel_mid_dma_slave      dmas_tx;
31         struct intel_mid_dma_slave      dmas_rx;
32 };
33
34 static bool mid_spi_dma_chan_filter(struct dma_chan *chan, void *param)
35 {
36         struct dw_spi *dws = param;
37
38         return dws->dma_dev == chan->device->dev;
39 }
40
41 static int mid_spi_dma_init(struct dw_spi *dws)
42 {
43         struct mid_dma *dw_dma = dws->dma_priv;
44         struct pci_dev *dma_dev;
45         struct intel_mid_dma_slave *rxs, *txs;
46         dma_cap_mask_t mask;
47
48         /*
49          * Get pci device for DMA controller, currently it could only
50          * be the DMA controller of Medfield
51          */
52         dma_dev = pci_get_device(PCI_VENDOR_ID_INTEL, 0x0827, NULL);
53         if (!dma_dev)
54                 return -ENODEV;
55
56         dws->dma_dev = &dma_dev->dev;
57
58         dma_cap_zero(mask);
59         dma_cap_set(DMA_SLAVE, mask);
60
61         /* 1. Init rx channel */
62         dws->rxchan = dma_request_channel(mask, mid_spi_dma_chan_filter, dws);
63         if (!dws->rxchan)
64                 goto err_exit;
65         rxs = &dw_dma->dmas_rx;
66         rxs->hs_mode = LNW_DMA_HW_HS;
67         rxs->cfg_mode = LNW_DMA_PER_TO_MEM;
68         dws->rxchan->private = rxs;
69
70         /* 2. Init tx channel */
71         dws->txchan = dma_request_channel(mask, mid_spi_dma_chan_filter, dws);
72         if (!dws->txchan)
73                 goto free_rxchan;
74         txs = &dw_dma->dmas_tx;
75         txs->hs_mode = LNW_DMA_HW_HS;
76         txs->cfg_mode = LNW_DMA_MEM_TO_PER;
77         dws->txchan->private = txs;
78
79         dws->dma_inited = 1;
80         return 0;
81
82 free_rxchan:
83         dma_release_channel(dws->rxchan);
84 err_exit:
85         return -EBUSY;
86 }
87
88 static void mid_spi_dma_exit(struct dw_spi *dws)
89 {
90         if (!dws->dma_inited)
91                 return;
92
93         dmaengine_terminate_all(dws->txchan);
94         dma_release_channel(dws->txchan);
95
96         dmaengine_terminate_all(dws->rxchan);
97         dma_release_channel(dws->rxchan);
98 }
99
100 /*
101  * dws->dma_chan_done is cleared before the dma transfer starts,
102  * callback for rx/tx channel will each increment it by 1.
103  * Reaching 2 means the whole spi transaction is done.
104  */
105 static void dw_spi_dma_done(void *arg)
106 {
107         struct dw_spi *dws = arg;
108
109         if (++dws->dma_chan_done != 2)
110                 return;
111         dw_spi_xfer_done(dws);
112 }
113
114 static int mid_spi_dma_transfer(struct dw_spi *dws, int cs_change)
115 {
116         struct dma_async_tx_descriptor *txdesc, *rxdesc;
117         struct dma_slave_config txconf, rxconf;
118         u16 dma_ctrl = 0;
119
120         /* 1. setup DMA related registers */
121         if (cs_change) {
122                 spi_enable_chip(dws, 0);
123                 dw_writew(dws, DW_SPI_DMARDLR, 0xf);
124                 dw_writew(dws, DW_SPI_DMATDLR, 0x10);
125                 if (dws->tx_dma)
126                         dma_ctrl |= SPI_DMA_TDMAE;
127                 if (dws->rx_dma)
128                         dma_ctrl |= SPI_DMA_RDMAE;
129                 dw_writew(dws, DW_SPI_DMACR, dma_ctrl);
130                 spi_enable_chip(dws, 1);
131         }
132
133         dws->dma_chan_done = 0;
134
135         /* 2. Prepare the TX dma transfer */
136         txconf.direction = DMA_MEM_TO_DEV;
137         txconf.dst_addr = dws->dma_addr;
138         txconf.dst_maxburst = LNW_DMA_MSIZE_16;
139         txconf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
140         txconf.dst_addr_width = dws->dma_width;
141         txconf.device_fc = false;
142
143         dmaengine_slave_config(dws->txchan, &txconf);
144
145         memset(&dws->tx_sgl, 0, sizeof(dws->tx_sgl));
146         dws->tx_sgl.dma_address = dws->tx_dma;
147         dws->tx_sgl.length = dws->len;
148
149         txdesc = dmaengine_prep_slave_sg(dws->txchan,
150                                 &dws->tx_sgl,
151                                 1,
152                                 DMA_MEM_TO_DEV,
153                                 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
154         txdesc->callback = dw_spi_dma_done;
155         txdesc->callback_param = dws;
156
157         /* 3. Prepare the RX dma transfer */
158         rxconf.direction = DMA_DEV_TO_MEM;
159         rxconf.src_addr = dws->dma_addr;
160         rxconf.src_maxburst = LNW_DMA_MSIZE_16;
161         rxconf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
162         rxconf.src_addr_width = dws->dma_width;
163         rxconf.device_fc = false;
164
165         dmaengine_slave_config(dws->rxchan, &rxconf);
166
167         memset(&dws->rx_sgl, 0, sizeof(dws->rx_sgl));
168         dws->rx_sgl.dma_address = dws->rx_dma;
169         dws->rx_sgl.length = dws->len;
170
171         rxdesc = dmaengine_prep_slave_sg(dws->rxchan,
172                                 &dws->rx_sgl,
173                                 1,
174                                 DMA_DEV_TO_MEM,
175                                 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
176         rxdesc->callback = dw_spi_dma_done;
177         rxdesc->callback_param = dws;
178
179         /* rx must be started before tx due to spi instinct */
180         dmaengine_submit(rxdesc);
181         dma_async_issue_pending(dws->rxchan);
182
183         dmaengine_submit(txdesc);
184         dma_async_issue_pending(dws->txchan);
185
186         return 0;
187 }
188
189 static struct dw_spi_dma_ops mid_dma_ops = {
190         .dma_init       = mid_spi_dma_init,
191         .dma_exit       = mid_spi_dma_exit,
192         .dma_transfer   = mid_spi_dma_transfer,
193 };
194 #endif
195
196 /* Some specific info for SPI0 controller on Intel MID */
197
198 /* HW info for MRST CLk Control Unit, one 32b reg */
199 #define MRST_SPI_CLK_BASE       100000000       /* 100m */
200 #define MRST_CLK_SPI0_REG       0xff11d86c
201 #define CLK_SPI_BDIV_OFFSET     0
202 #define CLK_SPI_BDIV_MASK       0x00000007
203 #define CLK_SPI_CDIV_OFFSET     9
204 #define CLK_SPI_CDIV_MASK       0x00000e00
205 #define CLK_SPI_DISABLE_OFFSET  8
206
207 int dw_spi_mid_init(struct dw_spi *dws)
208 {
209         void __iomem *clk_reg;
210         u32 clk_cdiv;
211
212         clk_reg = ioremap_nocache(MRST_CLK_SPI0_REG, 16);
213         if (!clk_reg)
214                 return -ENOMEM;
215
216         /* get SPI controller operating freq info */
217         clk_cdiv  = (readl(clk_reg) & CLK_SPI_CDIV_MASK) >> CLK_SPI_CDIV_OFFSET;
218         dws->max_freq = MRST_SPI_CLK_BASE / (clk_cdiv + 1);
219         iounmap(clk_reg);
220
221         dws->num_cs = 16;
222         dws->fifo_len = 40;     /* FIFO has 40 words buffer */
223
224 #ifdef CONFIG_SPI_DW_MID_DMA
225         dws->dma_priv = kzalloc(sizeof(struct mid_dma), GFP_KERNEL);
226         if (!dws->dma_priv)
227                 return -ENOMEM;
228         dws->dma_ops = &mid_dma_ops;
229 #endif
230         return 0;
231 }