blob: efcbd0557fbd0f64cfeaa2ec56b3bc9b60aa29ec [file] [log] [blame]
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
8#include <common.h>
Simon Glass1eb69ae2019-11-14 12:57:39 -07009#include <cpu_func.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060010#include <log.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053011#include <asm/arch/sys_proto.h>
Simon Glass90526e92020-05-10 11:39:56 -060012#include <asm/cache.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053013#include <asm/io.h>
14#include <clk.h>
15#include <dm.h>
16#include <malloc.h>
17#include <memalign.h>
18#include <spi.h>
19#include <ubi_uboot.h>
20#include <wait_bit.h>
Simon Glass336d4612020-02-03 07:36:16 -070021#include <dm/device_compat.h>
Simon Glasscd93d622020-05-10 11:40:13 -060022#include <linux/bitops.h>
Simon Glass61b29b82020-02-03 07:36:15 -070023#include <linux/err.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053024
25#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
26#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
27#define GQSPI_CONFIG_DMA_MODE (2 << 30)
28#define GQSPI_CONFIG_CPHA_MASK BIT(2)
29#define GQSPI_CONFIG_CPOL_MASK BIT(1)
30
31/*
32 * QSPI Interrupt Registers bit Masks
33 *
34 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
35 * bit definitions.
36 */
37#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
38#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
39#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
40#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
41#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
42 GQSPI_IXR_RXNEMTY_MASK)
43
44/*
45 * QSPI Enable Register bit Masks
46 *
47 * This register is used to enable or disable the QSPI controller
48 */
49#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
50
51#define GQSPI_GFIFO_LOW_BUS BIT(14)
52#define GQSPI_GFIFO_CS_LOWER BIT(12)
53#define GQSPI_GFIFO_UP_BUS BIT(15)
54#define GQSPI_GFIFO_CS_UPPER BIT(13)
55#define GQSPI_SPI_MODE_QSPI (3 << 10)
56#define GQSPI_SPI_MODE_SPI BIT(10)
57#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
58#define GQSPI_IMD_DATA_CS_ASSERT 5
59#define GQSPI_IMD_DATA_CS_DEASSERT 5
60#define GQSPI_GFIFO_TX BIT(16)
61#define GQSPI_GFIFO_RX BIT(17)
62#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
63#define GQSPI_GFIFO_IMD_MASK 0xFF
64#define GQSPI_GFIFO_EXP_MASK BIT(9)
65#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
66#define GQSPI_STRT_GEN_FIFO BIT(28)
67#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
68#define GQSPI_GFIFO_WP_HOLD BIT(19)
69#define GQSPI_BAUD_DIV_MASK (7 << 3)
70#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
71#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
72#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
73#define GQSPI_DMA_DST_I_STS_MASK 0xFE
74#define MODEBITS 0x6
75
76#define GQSPI_GFIFO_SELECT BIT(0)
77#define GQSPI_FIFO_THRESHOLD 1
78
79#define SPI_XFER_ON_BOTH 0
80#define SPI_XFER_ON_LOWER 1
81#define SPI_XFER_ON_UPPER 2
82
83#define GQSPI_DMA_ALIGN 0x4
84#define GQSPI_MAX_BAUD_RATE_VAL 7
85#define GQSPI_DFLT_BAUD_RATE_VAL 2
86
87#define GQSPI_TIMEOUT 100000000
88
89#define GQSPI_BAUD_DIV_SHIFT 2
90#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
91#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
92#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
93#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
94#define GQSPI_USE_DATA_DLY 0x1
95#define GQSPI_USE_DATA_DLY_SHIFT 31
96#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
97#define GQSPI_DATA_DLY_ADJ_SHIFT 28
98#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
99#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
100#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
101#define IOU_TAPDLY_BYPASS_OFST 0xFF180390
102#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
103#define GQSPI_FREQ_40MHZ 40000000
104#define GQSPI_FREQ_100MHZ 100000000
105#define GQSPI_FREQ_150MHZ 150000000
106#define IOU_TAPDLY_BYPASS_MASK 0x7
107
108#define GQSPI_REG_OFFSET 0x100
109#define GQSPI_DMA_REG_OFFSET 0x800
110
111/* QSPI register offsets */
112struct zynqmp_qspi_regs {
113 u32 confr; /* 0x00 */
114 u32 isr; /* 0x04 */
115 u32 ier; /* 0x08 */
116 u32 idisr; /* 0x0C */
117 u32 imaskr; /* 0x10 */
118 u32 enbr; /* 0x14 */
119 u32 dr; /* 0x18 */
120 u32 txd0r; /* 0x1C */
121 u32 drxr; /* 0x20 */
122 u32 sicr; /* 0x24 */
123 u32 txftr; /* 0x28 */
124 u32 rxftr; /* 0x2C */
125 u32 gpior; /* 0x30 */
126 u32 reserved0; /* 0x34 */
127 u32 lpbkdly; /* 0x38 */
128 u32 reserved1; /* 0x3C */
129 u32 genfifo; /* 0x40 */
130 u32 gqspisel; /* 0x44 */
131 u32 reserved2; /* 0x48 */
132 u32 gqfifoctrl; /* 0x4C */
133 u32 gqfthr; /* 0x50 */
134 u32 gqpollcfg; /* 0x54 */
135 u32 gqpollto; /* 0x58 */
136 u32 gqxfersts; /* 0x5C */
137 u32 gqfifosnap; /* 0x60 */
138 u32 gqrxcpy; /* 0x64 */
139 u32 reserved3[36]; /* 0x68 */
140 u32 gqspidlyadj; /* 0xF8 */
141};
142
143struct zynqmp_qspi_dma_regs {
144 u32 dmadst; /* 0x00 */
145 u32 dmasize; /* 0x04 */
146 u32 dmasts; /* 0x08 */
147 u32 dmactrl; /* 0x0C */
148 u32 reserved0; /* 0x10 */
149 u32 dmaisr; /* 0x14 */
150 u32 dmaier; /* 0x18 */
151 u32 dmaidr; /* 0x1C */
152 u32 dmaimr; /* 0x20 */
153 u32 dmactrl2; /* 0x24 */
154 u32 dmadstmsb; /* 0x28 */
155};
156
157DECLARE_GLOBAL_DATA_PTR;
158
Simon Glass8a8d24b2020-12-03 16:55:23 -0700159struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530160 struct zynqmp_qspi_regs *regs;
161 struct zynqmp_qspi_dma_regs *dma_regs;
162 u32 frequency;
163 u32 speed_hz;
164};
165
166struct zynqmp_qspi_priv {
167 struct zynqmp_qspi_regs *regs;
168 struct zynqmp_qspi_dma_regs *dma_regs;
169 const void *tx_buf;
170 void *rx_buf;
171 unsigned int len;
172 int bytes_to_transfer;
173 int bytes_to_receive;
174 unsigned int is_inst;
175 unsigned int cs_change:1;
176};
177
Simon Glassd1998a92020-12-03 16:55:21 -0700178static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530179{
Simon Glass0fd3d912020-12-22 19:30:28 -0700180 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530181
182 debug("%s\n", __func__);
183
Masahiro Yamada25484932020-07-17 14:36:48 +0900184 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530185 GQSPI_REG_OFFSET);
186 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamada25484932020-07-17 14:36:48 +0900187 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530188
189 return 0;
190}
191
192static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
193{
194 u32 config_reg;
195 struct zynqmp_qspi_regs *regs = priv->regs;
196
197 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
198 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
199 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
200 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
201 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
202
203 config_reg = readl(&regs->confr);
204 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
205 GQSPI_CONFIG_MODE_EN_MASK);
206 config_reg |= GQSPI_CONFIG_DMA_MODE |
207 GQSPI_GFIFO_WP_HOLD |
208 GQSPI_DFLT_BAUD_RATE_DIV;
209 writel(config_reg, &regs->confr);
210
211 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
212}
213
214static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
215{
216 u32 gqspi_fifo_reg = 0;
217
218 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
219 GQSPI_GFIFO_CS_LOWER;
220
221 return gqspi_fifo_reg;
222}
223
224static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
225 u32 gqspi_fifo_reg)
226{
227 struct zynqmp_qspi_regs *regs = priv->regs;
228 int ret = 0;
229
230 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
231 GQSPI_TIMEOUT, 1);
232 if (ret)
233 printf("%s Timeout\n", __func__);
234
235 writel(gqspi_fifo_reg, &regs->genfifo);
236}
237
238static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
239{
240 u32 gqspi_fifo_reg = 0;
241
242 if (is_on) {
243 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
244 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
245 GQSPI_IMD_DATA_CS_ASSERT;
246 } else {
247 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
248 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
249 }
250
251 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
252
253 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
254}
255
256void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
257{
Simon Glass0fd3d912020-12-22 19:30:28 -0700258 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530259 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
260 struct zynqmp_qspi_regs *regs = priv->regs;
261 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
262 u32 reqhz = 0;
263
264 clk_rate = plat->frequency;
265 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
266
267 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
268 __func__, reqhz, clk_rate, baudrateval);
269
270 if (reqhz < GQSPI_FREQ_40MHZ) {
271 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
272 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
273 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
Siva Durga Prasad Paladugu1a474382019-03-07 16:08:48 +0530274 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530275 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
276 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
277 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
278 lpbkdlyadj = readl(&regs->lpbkdly);
279 lpbkdlyadj |= (GQSPI_LPBK_DLY_ADJ_LPBK_MASK);
280 datadlyadj = readl(&regs->gqspidlyadj);
281 datadlyadj |= ((GQSPI_USE_DATA_DLY << GQSPI_USE_DATA_DLY_SHIFT)
282 | (GQSPI_DATA_DLY_ADJ_VALUE <<
283 GQSPI_DATA_DLY_ADJ_SHIFT));
Siva Durga Prasad Paladugu1a474382019-03-07 16:08:48 +0530284 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530285 lpbkdlyadj = readl(&regs->lpbkdly);
286 lpbkdlyadj |= ((GQSPI_LPBK_DLY_ADJ_LPBK_MASK) |
287 GQSPI_LPBK_DLY_ADJ_DLY_0);
288 }
289
290 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST, IOU_TAPDLY_BYPASS_MASK,
291 tapdlybypass);
292 writel(lpbkdlyadj, &regs->lpbkdly);
293 writel(datadlyadj, &regs->gqspidlyadj);
294}
295
296static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
297{
Simon Glass0fd3d912020-12-22 19:30:28 -0700298 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530299 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
300 struct zynqmp_qspi_regs *regs = priv->regs;
301 u32 confr;
302 u8 baud_rate_val = 0;
303
304 debug("%s\n", __func__);
305 if (speed > plat->frequency)
306 speed = plat->frequency;
307
308 /* Set the clock frequency */
309 confr = readl(&regs->confr);
310 if (speed == 0) {
311 /* Set baudrate x8, if the freq is 0 */
312 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
313 } else if (plat->speed_hz != speed) {
314 while ((baud_rate_val < 8) &&
315 ((plat->frequency /
316 (2 << baud_rate_val)) > speed))
317 baud_rate_val++;
318
319 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
320 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
321
322 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
323 }
324 confr &= ~GQSPI_BAUD_DIV_MASK;
325 confr |= (baud_rate_val << 3);
326 writel(confr, &regs->confr);
327
328 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
329 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
330
331 return 0;
332}
333
334static int zynqmp_qspi_probe(struct udevice *bus)
335{
Simon Glass8a8d24b2020-12-03 16:55:23 -0700336 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530337 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
338 struct clk clk;
339 unsigned long clock;
340 int ret;
341
342 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
343
344 priv->regs = plat->regs;
345 priv->dma_regs = plat->dma_regs;
346
347 ret = clk_get_by_index(bus, 0, &clk);
348 if (ret < 0) {
Sean Anderson49dfbe92020-09-15 10:45:12 -0400349 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530350 return ret;
351 }
352
353 clock = clk_get_rate(&clk);
354 if (IS_ERR_VALUE(clock)) {
Sean Anderson49dfbe92020-09-15 10:45:12 -0400355 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530356 return clock;
357 }
358 debug("%s: CLK %ld\n", __func__, clock);
359
360 ret = clk_enable(&clk);
361 if (ret && ret != -ENOSYS) {
Sean Anderson49dfbe92020-09-15 10:45:12 -0400362 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530363 return ret;
364 }
365 plat->frequency = clock;
366 plat->speed_hz = plat->frequency / 2;
367
368 /* init the zynq spi hw */
369 zynqmp_qspi_init_hw(priv);
370
371 return 0;
372}
373
374static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
375{
376 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
377 struct zynqmp_qspi_regs *regs = priv->regs;
378 u32 confr;
379
380 debug("%s\n", __func__);
381 /* Set the SPI Clock phase and polarities */
382 confr = readl(&regs->confr);
383 confr &= ~(GQSPI_CONFIG_CPHA_MASK |
384 GQSPI_CONFIG_CPOL_MASK);
385
386 if (mode & SPI_CPHA)
387 confr |= GQSPI_CONFIG_CPHA_MASK;
388 if (mode & SPI_CPOL)
389 confr |= GQSPI_CONFIG_CPOL_MASK;
390
391 writel(confr, &regs->confr);
392
393 return 0;
394}
395
396static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
397{
398 u32 data;
399 int ret = 0;
400 struct zynqmp_qspi_regs *regs = priv->regs;
401 u32 *buf = (u32 *)priv->tx_buf;
402 u32 len = size;
403
404 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(&regs->isr),
405 size);
406
407 while (size) {
408 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
409 GQSPI_TIMEOUT, 1);
410 if (ret) {
411 printf("%s: Timeout\n", __func__);
412 return ret;
413 }
414
415 if (size >= 4) {
416 writel(*buf, &regs->txd0r);
417 buf++;
418 size -= 4;
419 } else {
420 switch (size) {
421 case 1:
422 data = *((u8 *)buf);
423 buf += 1;
424 data |= GENMASK(31, 8);
425 break;
426 case 2:
427 data = *((u16 *)buf);
428 buf += 2;
429 data |= GENMASK(31, 16);
430 break;
431 case 3:
T Karthik Reddy90217482020-11-19 05:00:36 -0700432 data = *buf;
433 buf += 3;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530434 data |= GENMASK(31, 24);
435 break;
436 }
437 writel(data, &regs->txd0r);
438 size = 0;
439 }
440 }
441
442 priv->tx_buf += len;
443 return 0;
444}
445
446static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
447{
448 u32 gen_fifo_cmd;
449 u32 bytecount = 0;
450
451 while (priv->len) {
452 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
453 gen_fifo_cmd |= GQSPI_GFIFO_TX | GQSPI_SPI_MODE_SPI;
454 gen_fifo_cmd |= *(u8 *)priv->tx_buf;
455 bytecount++;
456 priv->len--;
457 priv->tx_buf = (u8 *)priv->tx_buf + 1;
458
459 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
460
461 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
462 }
463}
464
465static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
466 u32 *gen_fifo_cmd)
467{
468 u32 expval = 8;
469 u32 len;
470
471 while (1) {
472 if (priv->len > 255) {
473 if (priv->len & (1 << expval)) {
474 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
475 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
476 *gen_fifo_cmd |= expval;
477 priv->len -= (1 << expval);
478 return expval;
479 }
480 expval++;
481 } else {
482 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
483 GQSPI_GFIFO_EXP_MASK);
484 *gen_fifo_cmd |= (u8)priv->len;
485 len = (u8)priv->len;
486 priv->len = 0;
487 return len;
488 }
489 }
490}
491
492static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
493{
494 u32 gen_fifo_cmd;
495 u32 len;
496 int ret = 0;
497
498 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
499 gen_fifo_cmd |= GQSPI_GFIFO_TX |
500 GQSPI_GFIFO_DATA_XFR_MASK;
501
502 gen_fifo_cmd |= GQSPI_SPI_MODE_SPI;
503
504 while (priv->len) {
505 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
506 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
507
508 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
509
510 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
511 ret = zynqmp_qspi_fill_tx_fifo(priv,
512 1 << len);
513 else
514 ret = zynqmp_qspi_fill_tx_fifo(priv,
515 len);
516
517 if (ret)
518 return ret;
519 }
520 return ret;
521}
522
523static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
524 u32 gen_fifo_cmd, u32 *buf)
525{
526 u32 addr;
527 u32 size, len;
528 u32 actuallen = priv->len;
529 int ret = 0;
530 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
531
532 writel((unsigned long)buf, &dma_regs->dmadst);
533 writel(roundup(priv->len, ARCH_DMA_MINALIGN), &dma_regs->dmasize);
534 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
535 addr = (unsigned long)buf;
536 size = roundup(priv->len, ARCH_DMA_MINALIGN);
537 flush_dcache_range(addr, addr + size);
538
539 while (priv->len) {
540 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
541 if (!(gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK) &&
542 (len % ARCH_DMA_MINALIGN)) {
543 gen_fifo_cmd &= ~GENMASK(7, 0);
544 gen_fifo_cmd |= roundup(len, ARCH_DMA_MINALIGN);
545 }
546 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
547
548 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
549 }
550
551 ret = wait_for_bit_le32(&dma_regs->dmaisr, GQSPI_DMA_DST_I_STS_DONE,
552 1, GQSPI_TIMEOUT, 1);
553 if (ret) {
554 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
555 return -ETIMEDOUT;
556 }
557
558 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
559
560 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
561 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
562 actuallen);
563
564 if (buf != priv->rx_buf)
565 memcpy(priv->rx_buf, buf, actuallen);
566
567 return 0;
568}
569
570static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
571{
572 u32 gen_fifo_cmd;
573 u32 *buf;
574 u32 actuallen = priv->len;
575
576 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
577 gen_fifo_cmd |= GQSPI_GFIFO_RX |
578 GQSPI_GFIFO_DATA_XFR_MASK;
579
580 gen_fifo_cmd |= GQSPI_SPI_MODE_SPI;
581
582 /*
583 * Check if receive buffer is aligned to 4 byte and length
584 * is multiples of four byte as we are using dma to receive.
585 */
586 if (!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
587 !(actuallen % GQSPI_DMA_ALIGN)) {
588 buf = (u32 *)priv->rx_buf;
589 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
590 }
591
592 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len,
593 GQSPI_DMA_ALIGN));
594 buf = (u32 *)tmp;
595 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
596}
597
598static int zynqmp_qspi_start_transfer(struct zynqmp_qspi_priv *priv)
599{
600 int ret = 0;
601
602 if (priv->is_inst) {
603 if (priv->tx_buf)
604 zynqmp_qspi_genfifo_cmd(priv);
605 else
606 return -EINVAL;
607 } else {
608 if (priv->tx_buf)
609 ret = zynqmp_qspi_genfifo_fill_tx(priv);
610 else if (priv->rx_buf)
611 ret = zynqmp_qspi_genfifo_fill_rx(priv);
612 else
613 return -EINVAL;
614 }
615 return ret;
616}
617
618static int zynqmp_qspi_transfer(struct zynqmp_qspi_priv *priv)
619{
620 static unsigned int cs_change = 1;
621 int status = 0;
622
623 debug("%s\n", __func__);
624
625 while (1) {
626 /* Select the chip if required */
627 if (cs_change)
628 zynqmp_qspi_chipselect(priv, 1);
629
630 cs_change = priv->cs_change;
631
632 if (!priv->tx_buf && !priv->rx_buf && priv->len) {
633 status = -EINVAL;
634 break;
635 }
636
637 /* Request the transfer */
638 if (priv->len) {
639 status = zynqmp_qspi_start_transfer(priv);
640 priv->is_inst = 0;
641 if (status < 0)
642 break;
643 }
644
645 if (cs_change)
646 /* Deselect the chip */
647 zynqmp_qspi_chipselect(priv, 0);
648 break;
649 }
650
651 return status;
652}
653
654static int zynqmp_qspi_claim_bus(struct udevice *dev)
655{
656 struct udevice *bus = dev->parent;
657 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
658 struct zynqmp_qspi_regs *regs = priv->regs;
659
660 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
661
662 return 0;
663}
664
665static int zynqmp_qspi_release_bus(struct udevice *dev)
666{
667 struct udevice *bus = dev->parent;
668 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
669 struct zynqmp_qspi_regs *regs = priv->regs;
670
671 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
672
673 return 0;
674}
675
676int zynqmp_qspi_xfer(struct udevice *dev, unsigned int bitlen, const void *dout,
677 void *din, unsigned long flags)
678{
679 struct udevice *bus = dev->parent;
680 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
681
682 debug("%s: priv: 0x%08lx bitlen: %d dout: 0x%08lx ", __func__,
683 (unsigned long)priv, bitlen, (unsigned long)dout);
684 debug("din: 0x%08lx flags: 0x%lx\n", (unsigned long)din, flags);
685
686 priv->tx_buf = dout;
687 priv->rx_buf = din;
688 priv->len = bitlen / 8;
689
690 /*
691 * Assume that the beginning of a transfer with bits to
692 * transmit must contain a device command.
693 */
694 if (dout && flags & SPI_XFER_BEGIN)
695 priv->is_inst = 1;
696 else
697 priv->is_inst = 0;
698
699 if (flags & SPI_XFER_END)
700 priv->cs_change = 1;
701 else
702 priv->cs_change = 0;
703
704 zynqmp_qspi_transfer(priv);
705
706 return 0;
707}
708
709static const struct dm_spi_ops zynqmp_qspi_ops = {
710 .claim_bus = zynqmp_qspi_claim_bus,
711 .release_bus = zynqmp_qspi_release_bus,
712 .xfer = zynqmp_qspi_xfer,
713 .set_speed = zynqmp_qspi_set_speed,
714 .set_mode = zynqmp_qspi_set_mode,
715};
716
717static const struct udevice_id zynqmp_qspi_ids[] = {
718 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simekf3976cc2018-11-29 08:48:28 +0100719 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530720 { }
721};
722
723U_BOOT_DRIVER(zynqmp_qspi) = {
724 .name = "zynqmp_qspi",
725 .id = UCLASS_SPI,
726 .of_match = zynqmp_qspi_ids,
727 .ops = &zynqmp_qspi_ops,
Simon Glassd1998a92020-12-03 16:55:21 -0700728 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glass8a8d24b2020-12-03 16:55:23 -0700729 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass41575d82020-12-03 16:55:17 -0700730 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530731 .probe = zynqmp_qspi_probe,
732};