blob: a323994fb2d3bfd4d6b6a8dda67b23d0e35f4c0d [file] [log] [blame]
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
Ibai Erkiaga3e891442023-10-13 13:37:27 +01008#define LOG_CATEGORY UCLASS_SPI
9
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053010#include <common.h>
Simon Glass1eb69ae2019-11-14 12:57:39 -070011#include <cpu_func.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060012#include <log.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053013#include <asm/arch/sys_proto.h>
Simon Glass90526e92020-05-10 11:39:56 -060014#include <asm/cache.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053015#include <asm/io.h>
16#include <clk.h>
17#include <dm.h>
18#include <malloc.h>
19#include <memalign.h>
20#include <spi.h>
Brandon Maierf1fd79a2021-01-20 10:39:46 -060021#include <spi-mem.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053022#include <ubi_uboot.h>
23#include <wait_bit.h>
Simon Glass336d4612020-02-03 07:36:16 -070024#include <dm/device_compat.h>
Simon Glasscd93d622020-05-10 11:40:13 -060025#include <linux/bitops.h>
Simon Glass61b29b82020-02-03 07:36:15 -070026#include <linux/err.h>
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -060027#include <linux/sizes.h>
Ashok Reddy Somaf4f1b652022-08-25 06:59:01 -060028#include <zynqmp_firmware.h>
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053029
30#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
31#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
32#define GQSPI_CONFIG_DMA_MODE (2 << 30)
33#define GQSPI_CONFIG_CPHA_MASK BIT(2)
34#define GQSPI_CONFIG_CPOL_MASK BIT(1)
35
36/*
37 * QSPI Interrupt Registers bit Masks
38 *
39 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
40 * bit definitions.
41 */
42#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
43#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
Ashok Reddy Soma4f9d2552021-10-19 19:43:00 +053044#define GQSPI_IXR_TXFIFOEMPTY_MASK 0x00000100 /* QSPI TX FIFO is Empty */
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053045#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
46#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
Ashok Reddy Soma2ffa6532021-05-25 06:36:27 -060047#define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053048#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
49 GQSPI_IXR_RXNEMTY_MASK)
50
51/*
52 * QSPI Enable Register bit Masks
53 *
54 * This register is used to enable or disable the QSPI controller
55 */
56#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
57
58#define GQSPI_GFIFO_LOW_BUS BIT(14)
59#define GQSPI_GFIFO_CS_LOWER BIT(12)
60#define GQSPI_GFIFO_UP_BUS BIT(15)
61#define GQSPI_GFIFO_CS_UPPER BIT(13)
62#define GQSPI_SPI_MODE_QSPI (3 << 10)
63#define GQSPI_SPI_MODE_SPI BIT(10)
64#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
65#define GQSPI_IMD_DATA_CS_ASSERT 5
66#define GQSPI_IMD_DATA_CS_DEASSERT 5
67#define GQSPI_GFIFO_TX BIT(16)
68#define GQSPI_GFIFO_RX BIT(17)
69#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
70#define GQSPI_GFIFO_IMD_MASK 0xFF
71#define GQSPI_GFIFO_EXP_MASK BIT(9)
72#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
73#define GQSPI_STRT_GEN_FIFO BIT(28)
74#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
75#define GQSPI_GFIFO_WP_HOLD BIT(19)
76#define GQSPI_BAUD_DIV_MASK (7 << 3)
77#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
78#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
79#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
80#define GQSPI_DMA_DST_I_STS_MASK 0xFE
81#define MODEBITS 0x6
82
83#define GQSPI_GFIFO_SELECT BIT(0)
84#define GQSPI_FIFO_THRESHOLD 1
Ashok Reddy Soma020b3532021-08-20 07:43:17 -060085#define GQSPI_GENFIFO_THRESHOLD 31
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +053086
87#define SPI_XFER_ON_BOTH 0
88#define SPI_XFER_ON_LOWER 1
89#define SPI_XFER_ON_UPPER 2
90
91#define GQSPI_DMA_ALIGN 0x4
92#define GQSPI_MAX_BAUD_RATE_VAL 7
93#define GQSPI_DFLT_BAUD_RATE_VAL 2
94
95#define GQSPI_TIMEOUT 100000000
96
97#define GQSPI_BAUD_DIV_SHIFT 2
98#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
T Karthik Reddya5e770b2022-11-23 02:04:51 -070099#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x1
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530100#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
101#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
102#define GQSPI_USE_DATA_DLY 0x1
103#define GQSPI_USE_DATA_DLY_SHIFT 31
104#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
105#define GQSPI_DATA_DLY_ADJ_SHIFT 28
106#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
107#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
108#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
Ashok Reddy Soma450d8eb2022-11-16 16:40:30 +0100109#define IOU_TAPDLY_BYPASS_OFST !(IS_ENABLED(CONFIG_ARCH_VERSAL) || \
110 IS_ENABLED(CONFIG_ARCH_VERSAL_NET)) ? \
Ashok Reddy Somaf4f1b652022-08-25 06:59:01 -0600111 0xFF180390 : 0xF103003C
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530112#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
Ashok Reddy Somaf4f1b652022-08-25 06:59:01 -0600113#define GQSPI_FREQ_37_5MHZ 37500000
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530114#define GQSPI_FREQ_40MHZ 40000000
115#define GQSPI_FREQ_100MHZ 100000000
116#define GQSPI_FREQ_150MHZ 150000000
117#define IOU_TAPDLY_BYPASS_MASK 0x7
118
119#define GQSPI_REG_OFFSET 0x100
120#define GQSPI_DMA_REG_OFFSET 0x800
121
122/* QSPI register offsets */
123struct zynqmp_qspi_regs {
124 u32 confr; /* 0x00 */
125 u32 isr; /* 0x04 */
126 u32 ier; /* 0x08 */
127 u32 idisr; /* 0x0C */
128 u32 imaskr; /* 0x10 */
129 u32 enbr; /* 0x14 */
130 u32 dr; /* 0x18 */
131 u32 txd0r; /* 0x1C */
132 u32 drxr; /* 0x20 */
133 u32 sicr; /* 0x24 */
134 u32 txftr; /* 0x28 */
135 u32 rxftr; /* 0x2C */
136 u32 gpior; /* 0x30 */
137 u32 reserved0; /* 0x34 */
138 u32 lpbkdly; /* 0x38 */
139 u32 reserved1; /* 0x3C */
140 u32 genfifo; /* 0x40 */
141 u32 gqspisel; /* 0x44 */
142 u32 reserved2; /* 0x48 */
143 u32 gqfifoctrl; /* 0x4C */
144 u32 gqfthr; /* 0x50 */
145 u32 gqpollcfg; /* 0x54 */
146 u32 gqpollto; /* 0x58 */
147 u32 gqxfersts; /* 0x5C */
148 u32 gqfifosnap; /* 0x60 */
149 u32 gqrxcpy; /* 0x64 */
150 u32 reserved3[36]; /* 0x68 */
151 u32 gqspidlyadj; /* 0xF8 */
152};
153
154struct zynqmp_qspi_dma_regs {
155 u32 dmadst; /* 0x00 */
156 u32 dmasize; /* 0x04 */
157 u32 dmasts; /* 0x08 */
158 u32 dmactrl; /* 0x0C */
159 u32 reserved0; /* 0x10 */
160 u32 dmaisr; /* 0x14 */
161 u32 dmaier; /* 0x18 */
162 u32 dmaidr; /* 0x1C */
163 u32 dmaimr; /* 0x20 */
164 u32 dmactrl2; /* 0x24 */
165 u32 dmadstmsb; /* 0x28 */
166};
167
Simon Glass8a8d24b2020-12-03 16:55:23 -0700168struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530169 struct zynqmp_qspi_regs *regs;
170 struct zynqmp_qspi_dma_regs *dma_regs;
171 u32 frequency;
172 u32 speed_hz;
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600173 unsigned int io_mode;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530174};
175
176struct zynqmp_qspi_priv {
177 struct zynqmp_qspi_regs *regs;
178 struct zynqmp_qspi_dma_regs *dma_regs;
179 const void *tx_buf;
180 void *rx_buf;
181 unsigned int len;
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600182 unsigned int io_mode;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530183 int bytes_to_transfer;
184 int bytes_to_receive;
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600185 const struct spi_mem_op *op;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530186};
187
Algapally Santosh Sagarcc24fd72023-03-01 03:33:33 -0700188__weak int zynqmp_mmio_write(const u32 address, const u32 mask, const u32 value)
189{
190 return 0;
191}
192
Simon Glassd1998a92020-12-03 16:55:21 -0700193static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530194{
Simon Glass0fd3d912020-12-22 19:30:28 -0700195 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530196
Masahiro Yamada25484932020-07-17 14:36:48 +0900197 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530198 GQSPI_REG_OFFSET);
199 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamada25484932020-07-17 14:36:48 +0900200 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530201
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600202 plat->io_mode = dev_read_bool(bus, "has-io-mode");
203
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530204 return 0;
205}
206
207static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
208{
209 u32 config_reg;
210 struct zynqmp_qspi_regs *regs = priv->regs;
211
212 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
213 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
214 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
215 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
Ashok Reddy Soma020b3532021-08-20 07:43:17 -0600216 writel(GQSPI_GENFIFO_THRESHOLD, &regs->gqfthr);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530217 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
Ashok Reddy Soma020b3532021-08-20 07:43:17 -0600218 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530219
220 config_reg = readl(&regs->confr);
221 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
222 GQSPI_CONFIG_MODE_EN_MASK);
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600223 config_reg |= GQSPI_GFIFO_WP_HOLD | GQSPI_DFLT_BAUD_RATE_DIV;
224 config_reg |= GQSPI_GFIFO_STRT_MODE_MASK;
225 if (!priv->io_mode)
226 config_reg |= GQSPI_CONFIG_DMA_MODE;
227
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530228 writel(config_reg, &regs->confr);
229
230 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
231}
232
233static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
234{
235 u32 gqspi_fifo_reg = 0;
236
237 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
238 GQSPI_GFIFO_CS_LOWER;
239
240 return gqspi_fifo_reg;
241}
242
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600243static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
244{
245 switch (buswidth) {
246 case 1:
247 return GQSPI_SPI_MODE_SPI;
248 case 2:
249 return GQSPI_SPI_MODE_DUAL_SPI;
250 case 4:
251 return GQSPI_SPI_MODE_QSPI;
252 default:
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100253 log_warning("Unsupported bus width %u\n", buswidth);
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600254 return GQSPI_SPI_MODE_SPI;
255 }
256}
257
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530258static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
259 u32 gqspi_fifo_reg)
260{
261 struct zynqmp_qspi_regs *regs = priv->regs;
Ashok Reddy Soma2ffa6532021-05-25 06:36:27 -0600262 u32 config_reg, ier;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530263 int ret = 0;
264
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100265 log_content("%s, GFIFO_CMD: 0x%X\n", __func__, gqspi_fifo_reg);
266
Ashok Reddy Soma72022a52021-08-20 07:43:16 -0600267 writel(gqspi_fifo_reg, &regs->genfifo);
268
Ashok Reddy Soma2ffa6532021-05-25 06:36:27 -0600269 config_reg = readl(&regs->confr);
270 /* Manual start if needed */
271 config_reg |= GQSPI_STRT_GEN_FIFO;
272 writel(config_reg, &regs->confr);
273
274 /* Enable interrupts */
275 ier = readl(&regs->ier);
Ashok Reddy Soma72022a52021-08-20 07:43:16 -0600276 ier |= GQSPI_IXR_GFEMTY_MASK;
Ashok Reddy Soma2ffa6532021-05-25 06:36:27 -0600277 writel(ier, &regs->ier);
278
Ashok Reddy Soma72022a52021-08-20 07:43:16 -0600279 /* Wait until the gen fifo is empty to write the new command */
280 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530281 GQSPI_TIMEOUT, 1);
282 if (ret)
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100283 log_warning("%s, Timeout\n", __func__);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530284
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530285}
286
287static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
288{
289 u32 gqspi_fifo_reg = 0;
290
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100291 log_debug("%s, assert: %d\r\n", __func__, is_on);
292
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530293 if (is_on) {
294 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
295 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
296 GQSPI_IMD_DATA_CS_ASSERT;
297 } else {
298 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
299 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
300 }
301
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530302 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
303}
304
Venkatesh Yadav Abbarapub4513302022-10-04 11:07:30 +0530305static void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530306{
Simon Glass0fd3d912020-12-22 19:30:28 -0700307 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530308 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
309 struct zynqmp_qspi_regs *regs = priv->regs;
310 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
311 u32 reqhz = 0;
312
313 clk_rate = plat->frequency;
314 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
315
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100316 log_debug("%s, clk_rate:%d, baudrateval:%d, bus_clk: %d\n",
317 __func__, clk_rate, baudrateval, reqhz);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530318
Michal Simek23896472022-09-19 14:21:04 +0200319 if (!(IS_ENABLED(CONFIG_ARCH_VERSAL) ||
320 IS_ENABLED(CONFIG_ARCH_VERSAL_NET))) {
Ashok Reddy Somaf4f1b652022-08-25 06:59:01 -0600321 if (reqhz <= GQSPI_FREQ_40MHZ) {
322 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
323 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
324 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
325 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
326 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
327 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
328 datadlyadj = (GQSPI_USE_DATA_DLY <<
329 GQSPI_USE_DATA_DLY_SHIFT) |
330 (GQSPI_DATA_DLY_ADJ_VALUE <<
331 GQSPI_DATA_DLY_ADJ_SHIFT);
332 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
333 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
334 GQSPI_LPBK_DLY_ADJ_DLY_0;
335 }
336 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST,
337 IOU_TAPDLY_BYPASS_MASK, tapdlybypass);
338 } else {
339 if (reqhz <= GQSPI_FREQ_37_5MHZ) {
340 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
341 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
342 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
343 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
344 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
345 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
346 datadlyadj = GQSPI_USE_DATA_DLY <<
347 GQSPI_USE_DATA_DLY_SHIFT;
348 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
349 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
350 (GQSPI_LPBK_DLY_ADJ_DLY_1 <<
351 GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT);
352 }
353 writel(tapdlybypass, IOU_TAPDLY_BYPASS_OFST);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530354 }
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530355 writel(lpbkdlyadj, &regs->lpbkdly);
356 writel(datadlyadj, &regs->gqspidlyadj);
357}
358
359static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
360{
Simon Glass0fd3d912020-12-22 19:30:28 -0700361 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530362 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
363 struct zynqmp_qspi_regs *regs = priv->regs;
364 u32 confr;
365 u8 baud_rate_val = 0;
366
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100367 log_debug("%s, Speed: %d, Max: %d\n", __func__, speed, plat->frequency);
368
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530369 if (speed > plat->frequency)
370 speed = plat->frequency;
371
Brandon Maierd9aa19e2021-01-20 14:28:30 -0600372 if (plat->speed_hz != speed) {
373 /* Set the clock frequency */
374 /* If speed == 0, default to lowest speed */
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530375 while ((baud_rate_val < 8) &&
376 ((plat->frequency /
377 (2 << baud_rate_val)) > speed))
378 baud_rate_val++;
379
380 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
381 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
382
383 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530384
Brandon Maierd9aa19e2021-01-20 14:28:30 -0600385 confr = readl(&regs->confr);
386 confr &= ~GQSPI_BAUD_DIV_MASK;
387 confr |= (baud_rate_val << 3);
388 writel(confr, &regs->confr);
Brandon Maierd9aa19e2021-01-20 14:28:30 -0600389
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100390 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
Brandon Maierd9aa19e2021-01-20 14:28:30 -0600391 }
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530392
393 return 0;
394}
395
396static int zynqmp_qspi_probe(struct udevice *bus)
397{
Simon Glass8a8d24b2020-12-03 16:55:23 -0700398 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530399 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
400 struct clk clk;
401 unsigned long clock;
402 int ret;
403
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530404 priv->regs = plat->regs;
405 priv->dma_regs = plat->dma_regs;
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600406 priv->io_mode = plat->io_mode;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530407
408 ret = clk_get_by_index(bus, 0, &clk);
409 if (ret < 0) {
Sean Anderson49dfbe92020-09-15 10:45:12 -0400410 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530411 return ret;
412 }
413
414 clock = clk_get_rate(&clk);
415 if (IS_ERR_VALUE(clock)) {
Sean Anderson49dfbe92020-09-15 10:45:12 -0400416 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530417 return clock;
418 }
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530419
420 ret = clk_enable(&clk);
Michal Simek9b7aac72021-02-09 15:28:15 +0100421 if (ret) {
Sean Anderson49dfbe92020-09-15 10:45:12 -0400422 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530423 return ret;
424 }
425 plat->frequency = clock;
426 plat->speed_hz = plat->frequency / 2;
427
428 /* init the zynq spi hw */
429 zynqmp_qspi_init_hw(priv);
430
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100431 log_debug("%s, Rerence clock frequency: %ld\n", __func__, clock);
432
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530433 return 0;
434}
435
436static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
437{
438 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
439 struct zynqmp_qspi_regs *regs = priv->regs;
440 u32 confr;
441
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100442 log_debug("%s, 0x%X\n", __func__, mode);
443
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530444 /* Set the SPI Clock phase and polarities */
445 confr = readl(&regs->confr);
Ashok Reddy Somaafe03862022-08-25 06:59:05 -0600446 confr &= ~(GQSPI_CONFIG_CPHA_MASK | GQSPI_CONFIG_CPOL_MASK);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530447
448 if (mode & SPI_CPHA)
449 confr |= GQSPI_CONFIG_CPHA_MASK;
450 if (mode & SPI_CPOL)
451 confr |= GQSPI_CONFIG_CPOL_MASK;
452
453 writel(confr, &regs->confr);
454
455 return 0;
456}
457
458static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
459{
460 u32 data;
461 int ret = 0;
462 struct zynqmp_qspi_regs *regs = priv->regs;
463 u32 *buf = (u32 *)priv->tx_buf;
464 u32 len = size;
465
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530466 while (size) {
467 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
468 GQSPI_TIMEOUT, 1);
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100469 if (ret)
470 return log_msg_ret("Timeout\n", ret);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530471
472 if (size >= 4) {
473 writel(*buf, &regs->txd0r);
474 buf++;
475 size -= 4;
476 } else {
477 switch (size) {
478 case 1:
479 data = *((u8 *)buf);
480 buf += 1;
481 data |= GENMASK(31, 8);
482 break;
483 case 2:
484 data = *((u16 *)buf);
485 buf += 2;
486 data |= GENMASK(31, 16);
487 break;
488 case 3:
T Karthik Reddy90217482020-11-19 05:00:36 -0700489 data = *buf;
490 buf += 3;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530491 data |= GENMASK(31, 24);
492 break;
493 }
494 writel(data, &regs->txd0r);
495 size = 0;
496 }
497 }
498
Ashok Reddy Soma4f9d2552021-10-19 19:43:00 +0530499 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXFIFOEMPTY_MASK, 1,
500 GQSPI_TIMEOUT, 1);
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100501 if (ret)
502 return log_msg_ret("Timeout\n", ret);
Ashok Reddy Soma4f9d2552021-10-19 19:43:00 +0530503
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530504 priv->tx_buf += len;
505 return 0;
506}
507
508static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
509{
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600510 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530511 u32 gen_fifo_cmd;
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600512 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530513
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100514 log_debug("%s, opcode: 0x%0X, addr.nbytes: %d, dummy.mbytes: %d\r\n",
515 __func__, op->cmd.opcode, op->addr.nbytes, op->dummy.nbytes);
516
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600517 /* Send opcode */
518 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
519 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
520 gen_fifo_cmd |= GQSPI_GFIFO_TX;
521 gen_fifo_cmd |= op->cmd.opcode;
522 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
523
524 /* Send address */
525 for (i = 0; i < op->addr.nbytes; i++) {
526 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
527
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530528 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600529 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
530 gen_fifo_cmd |= GQSPI_GFIFO_TX;
531 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530532
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530533 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
534 }
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600535
536 /* Send dummy */
537 if (op->dummy.nbytes) {
538 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
539
540 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
541 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
542 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
543 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
544 gen_fifo_cmd |= dummy_cycles;
545 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
546 }
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530547}
548
549static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
550 u32 *gen_fifo_cmd)
551{
552 u32 expval = 8;
553 u32 len;
554
555 while (1) {
556 if (priv->len > 255) {
557 if (priv->len & (1 << expval)) {
558 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
559 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
560 *gen_fifo_cmd |= expval;
561 priv->len -= (1 << expval);
562 return expval;
563 }
564 expval++;
565 } else {
566 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
567 GQSPI_GFIFO_EXP_MASK);
568 *gen_fifo_cmd |= (u8)priv->len;
569 len = (u8)priv->len;
570 priv->len = 0;
571 return len;
572 }
573 }
574}
575
576static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
577{
578 u32 gen_fifo_cmd;
579 u32 len;
580 int ret = 0;
581
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100582 log_debug("%s, length: %d\r\n", __func__, priv->len);
583
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530584 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600585 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Ashok Reddy Somaafe03862022-08-25 06:59:05 -0600586 gen_fifo_cmd |= GQSPI_GFIFO_TX | GQSPI_GFIFO_DATA_XFR_MASK;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530587
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530588 while (priv->len) {
589 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
590 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
591
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530592 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
Ashok Reddy Somaafe03862022-08-25 06:59:05 -0600593 ret = zynqmp_qspi_fill_tx_fifo(priv, 1 << len);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530594 else
Ashok Reddy Somaafe03862022-08-25 06:59:05 -0600595 ret = zynqmp_qspi_fill_tx_fifo(priv, len);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530596
597 if (ret)
598 return ret;
599 }
600 return ret;
601}
602
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600603static int zynqmp_qspi_start_io(struct zynqmp_qspi_priv *priv,
604 u32 gen_fifo_cmd, u32 *buf)
605{
606 u32 len;
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600607 u32 config_reg, ier, isr;
608 u32 timeout = GQSPI_TIMEOUT;
609 struct zynqmp_qspi_regs *regs = priv->regs;
610 u32 last_bits;
611 u32 *traverse = buf;
612
613 while (priv->len) {
614 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
615 /* If exponent bit is set, reset immediate to be 2^len */
616 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
617 priv->bytes_to_receive = (1 << len);
618 else
619 priv->bytes_to_receive = len;
620 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100621
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600622 /* Manual start */
623 config_reg = readl(&regs->confr);
624 config_reg |= GQSPI_STRT_GEN_FIFO;
625 writel(config_reg, &regs->confr);
626 /* Enable RX interrupts for IO mode */
627 ier = readl(&regs->ier);
628 ier |= GQSPI_IXR_ALL_MASK;
629 writel(ier, &regs->ier);
630 while (priv->bytes_to_receive && timeout) {
631 isr = readl(&regs->isr);
632 if (isr & GQSPI_IXR_RXNEMTY_MASK) {
633 if (priv->bytes_to_receive >= 4) {
634 *traverse = readl(&regs->drxr);
635 traverse++;
636 priv->bytes_to_receive -= 4;
637 } else {
638 last_bits = readl(&regs->drxr);
639 memcpy(traverse, &last_bits,
640 priv->bytes_to_receive);
641 priv->bytes_to_receive = 0;
642 }
643 timeout = GQSPI_TIMEOUT;
644 } else {
645 udelay(1);
646 timeout--;
647 }
648 }
649
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100650 if (!timeout)
651 return log_msg_retz("Timeout\n", timeout);
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600652 }
653
654 return 0;
655}
656
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530657static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
658 u32 gen_fifo_cmd, u32 *buf)
659{
Venkatesh Yadav Abbarapu906e20a2022-11-25 16:14:13 +0530660 unsigned long addr;
Ashok Reddy Soma020b3532021-08-20 07:43:17 -0600661 u32 size;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530662 u32 actuallen = priv->len;
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600663 u32 totallen = priv->len;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530664 int ret = 0;
665 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
666
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600667 while (totallen) {
668 if (totallen >= SZ_512M)
669 priv->len = SZ_256M;
670 else
671 priv->len = totallen;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530672
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600673 totallen -= priv->len; /* Save remaining bytes length to read */
674 actuallen = priv->len; /* Actual number of bytes reading */
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530675
Venkatesh Yadav Abbarapu906e20a2022-11-25 16:14:13 +0530676 writel(lower_32_bits((unsigned long)buf), &dma_regs->dmadst);
677 writel(upper_32_bits((unsigned long)buf) & GENMASK(11, 0),
678 &dma_regs->dmadstmsb);
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600679 writel(roundup(priv->len, GQSPI_DMA_ALIGN), &dma_regs->dmasize);
680 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
681 addr = (unsigned long)buf;
682 size = roundup(priv->len, GQSPI_DMA_ALIGN);
Ashok Reddy Soma638189d2023-09-15 08:47:58 +0530683 invalidate_dcache_range(addr, addr + size);
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600684
685 while (priv->len) {
686 zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
687 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600688 }
689
690 ret = wait_for_bit_le32(&dma_regs->dmaisr,
691 GQSPI_DMA_DST_I_STS_DONE, 1,
692 GQSPI_TIMEOUT, 1);
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100693 if (ret)
694 return log_msg_ret("Timeout:\n", ret);
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600695
Venkatesh Yadav Abbarapua3ade3d2023-09-15 08:47:59 +0530696 invalidate_dcache_range(addr, addr + size);
697
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600698 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
699
Ashok Reddy Somaa3d4bfb2022-08-25 06:59:04 -0600700 if (buf != priv->rx_buf)
701 memcpy(priv->rx_buf, buf, actuallen);
702
703 buf = (u32 *)((u8 *)buf + actuallen);
704 priv->rx_buf = (u8 *)priv->rx_buf + actuallen;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530705 }
706
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530707 return 0;
708}
709
710static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
711{
712 u32 gen_fifo_cmd;
713 u32 *buf;
714 u32 actuallen = priv->len;
715
Ibai Erkiaga3e891442023-10-13 13:37:27 +0100716 log_debug("%s, length: %d\r\n", __func__, priv->len);
717
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530718 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600719 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Ashok Reddy Somaafe03862022-08-25 06:59:05 -0600720 gen_fifo_cmd |= GQSPI_GFIFO_RX | GQSPI_GFIFO_DATA_XFR_MASK;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530721
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530722 /*
723 * Check if receive buffer is aligned to 4 byte and length
724 * is multiples of four byte as we are using dma to receive.
725 */
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600726 if ((!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
727 !(actuallen % GQSPI_DMA_ALIGN)) || priv->io_mode) {
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530728 buf = (u32 *)priv->rx_buf;
Ashok Reddy Somad91b0f42022-08-25 06:59:03 -0600729 if (priv->io_mode)
730 return zynqmp_qspi_start_io(priv, gen_fifo_cmd, buf);
731 else
732 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530733 }
734
Ashok Reddy Somaafe03862022-08-25 06:59:05 -0600735 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len, GQSPI_DMA_ALIGN));
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530736 buf = (u32 *)tmp;
737 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
738}
739
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530740static int zynqmp_qspi_claim_bus(struct udevice *dev)
741{
742 struct udevice *bus = dev->parent;
743 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
744 struct zynqmp_qspi_regs *regs = priv->regs;
745
746 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
747
748 return 0;
749}
750
751static int zynqmp_qspi_release_bus(struct udevice *dev)
752{
753 struct udevice *bus = dev->parent;
754 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
755 struct zynqmp_qspi_regs *regs = priv->regs;
756
757 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
758
759 return 0;
760}
761
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600762static int zynqmp_qspi_exec_op(struct spi_slave *slave,
763 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530764{
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600765 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
766 int ret = 0;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530767
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600768 priv->op = op;
769 priv->tx_buf = op->data.buf.out;
770 priv->rx_buf = op->data.buf.in;
771 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530772
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600773 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530774
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600775 /* Send opcode, addr, dummy */
776 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530777
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600778 /* Request the transfer */
779 if (op->data.dir == SPI_MEM_DATA_IN)
780 ret = zynqmp_qspi_genfifo_fill_rx(priv);
781 else if (op->data.dir == SPI_MEM_DATA_OUT)
782 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530783
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600784 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530785
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600786 return ret;
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530787}
788
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600789static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
790 .exec_op = zynqmp_qspi_exec_op,
791};
792
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530793static const struct dm_spi_ops zynqmp_qspi_ops = {
794 .claim_bus = zynqmp_qspi_claim_bus,
795 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530796 .set_speed = zynqmp_qspi_set_speed,
797 .set_mode = zynqmp_qspi_set_mode,
Brandon Maierf1fd79a2021-01-20 10:39:46 -0600798 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530799};
800
801static const struct udevice_id zynqmp_qspi_ids[] = {
802 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simekf3976cc2018-11-29 08:48:28 +0100803 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530804 { }
805};
806
807U_BOOT_DRIVER(zynqmp_qspi) = {
808 .name = "zynqmp_qspi",
809 .id = UCLASS_SPI,
810 .of_match = zynqmp_qspi_ids,
811 .ops = &zynqmp_qspi_ops,
Simon Glassd1998a92020-12-03 16:55:21 -0700812 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glass8a8d24b2020-12-03 16:55:23 -0700813 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass41575d82020-12-03 16:55:17 -0700814 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu22cca172018-07-04 17:31:23 +0530815 .probe = zynqmp_qspi_probe,
816};