Tom Rini | 83d290c | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2012 |
| 4 | * Altera Corporation <www.altera.com> |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <common.h> |
Simon Goldschmidt | 64c7c8c | 2019-11-20 22:27:31 +0100 | [diff] [blame] | 8 | #include <clk.h> |
Simon Glass | f7ae49f | 2020-05-10 11:40:05 -0600 | [diff] [blame] | 9 | #include <log.h> |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 10 | #include <dm.h> |
| 11 | #include <fdtdec.h> |
| 12 | #include <malloc.h> |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 13 | #include <reset.h> |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 14 | #include <spi.h> |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 15 | #include <spi-mem.h> |
Simon Glass | 336d461 | 2020-02-03 07:36:16 -0700 | [diff] [blame] | 16 | #include <dm/device_compat.h> |
Simon Glass | 61b29b8 | 2020-02-03 07:36:15 -0700 | [diff] [blame] | 17 | #include <linux/err.h> |
Masahiro Yamada | 1221ce4 | 2016-09-21 11:28:55 +0900 | [diff] [blame] | 18 | #include <linux/errno.h> |
Igor Prusov | e515a2b | 2023-11-14 14:02:56 +0300 | [diff] [blame] | 19 | #include <linux/io.h> |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 20 | #include <linux/sizes.h> |
Igor Prusov | 13248d6 | 2023-11-09 20:10:04 +0300 | [diff] [blame] | 21 | #include <linux/time.h> |
T Karthik Reddy | 248fe9f | 2022-05-12 04:05:34 -0600 | [diff] [blame] | 22 | #include <zynqmp_firmware.h> |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 23 | #include "cadence_qspi.h" |
T Karthik Reddy | 248fe9f | 2022-05-12 04:05:34 -0600 | [diff] [blame] | 24 | #include <dt-bindings/power/xlnx-versal-power.h> |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 25 | |
| 26 | #define CQSPI_STIG_READ 0 |
| 27 | #define CQSPI_STIG_WRITE 1 |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 28 | #define CQSPI_READ 2 |
| 29 | #define CQSPI_WRITE 3 |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 30 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 31 | __weak int cadence_qspi_apb_dma_read(struct cadence_spi_priv *priv, |
T Karthik Reddy | cf553bf | 2022-05-12 04:05:32 -0600 | [diff] [blame] | 32 | const struct spi_mem_op *op) |
| 33 | { |
| 34 | return 0; |
| 35 | } |
| 36 | |
T Karthik Reddy | bf8dae5 | 2022-05-12 04:05:33 -0600 | [diff] [blame] | 37 | __weak int cadence_qspi_versal_flash_reset(struct udevice *dev) |
| 38 | { |
| 39 | return 0; |
| 40 | } |
| 41 | |
Udit Kumar | c77efca | 2023-09-12 15:20:35 +0530 | [diff] [blame] | 42 | __weak ofnode cadence_qspi_get_subnode(struct udevice *dev) |
| 43 | { |
| 44 | return dev_read_first_subnode(dev); |
| 45 | } |
| 46 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 47 | static int cadence_spi_write_speed(struct udevice *bus, uint hz) |
| 48 | { |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 49 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
| 50 | |
| 51 | cadence_qspi_apb_config_baudrate_div(priv->regbase, |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 52 | priv->ref_clk_hz, hz); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 53 | |
| 54 | /* Reconfigure delay timing if speed is changed. */ |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 55 | cadence_qspi_apb_delay(priv->regbase, priv->ref_clk_hz, hz, |
| 56 | priv->tshsl_ns, priv->tsd2d_ns, |
| 57 | priv->tchsh_ns, priv->tslch_ns); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 58 | |
| 59 | return 0; |
| 60 | } |
| 61 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 62 | static int cadence_spi_read_id(struct cadence_spi_priv *priv, u8 len, |
Pratyush Yadav | 38b0852 | 2021-06-26 00:47:09 +0530 | [diff] [blame] | 63 | u8 *idcode) |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 64 | { |
Ashok Reddy Soma | d0003b5 | 2022-08-24 05:38:46 -0600 | [diff] [blame] | 65 | int err; |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 66 | |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 67 | struct spi_mem_op op = SPI_MEM_OP(SPI_MEM_OP_CMD(0x9F, 1), |
| 68 | SPI_MEM_OP_NO_ADDR, |
| 69 | SPI_MEM_OP_NO_DUMMY, |
| 70 | SPI_MEM_OP_DATA_IN(len, idcode, 1)); |
| 71 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 72 | err = cadence_qspi_apb_command_read_setup(priv, &op); |
Ashok Reddy Soma | d0003b5 | 2022-08-24 05:38:46 -0600 | [diff] [blame] | 73 | if (!err) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 74 | err = cadence_qspi_apb_command_read(priv, &op); |
Ashok Reddy Soma | d0003b5 | 2022-08-24 05:38:46 -0600 | [diff] [blame] | 75 | |
| 76 | return err; |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 77 | } |
| 78 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 79 | /* Calibration sequence to determine the read data capture delay register */ |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 80 | static int spi_calibration(struct udevice *bus, uint hz) |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 81 | { |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 82 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
| 83 | void *base = priv->regbase; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 84 | unsigned int idcode = 0, temp = 0; |
| 85 | int err = 0, i, range_lo = -1, range_hi = -1; |
| 86 | |
| 87 | /* start with slowest clock (1 MHz) */ |
| 88 | cadence_spi_write_speed(bus, 1000000); |
| 89 | |
| 90 | /* configure the read data capture delay register to 0 */ |
| 91 | cadence_qspi_apb_readdata_capture(base, 1, 0); |
| 92 | |
| 93 | /* Enable QSPI */ |
| 94 | cadence_qspi_apb_controller_enable(base); |
| 95 | |
| 96 | /* read the ID which will be our golden value */ |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 97 | err = cadence_spi_read_id(priv, 3, (u8 *)&idcode); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 98 | if (err) { |
| 99 | puts("SF: Calibration failed (read)\n"); |
| 100 | return err; |
| 101 | } |
| 102 | |
| 103 | /* use back the intended clock and find low range */ |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 104 | cadence_spi_write_speed(bus, hz); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 105 | for (i = 0; i < CQSPI_READ_CAPTURE_MAX_DELAY; i++) { |
| 106 | /* Disable QSPI */ |
| 107 | cadence_qspi_apb_controller_disable(base); |
| 108 | |
| 109 | /* reconfigure the read data capture delay register */ |
| 110 | cadence_qspi_apb_readdata_capture(base, 1, i); |
| 111 | |
| 112 | /* Enable back QSPI */ |
| 113 | cadence_qspi_apb_controller_enable(base); |
| 114 | |
| 115 | /* issue a RDID to get the ID value */ |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 116 | err = cadence_spi_read_id(priv, 3, (u8 *)&temp); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 117 | if (err) { |
| 118 | puts("SF: Calibration failed (read)\n"); |
| 119 | return err; |
| 120 | } |
| 121 | |
| 122 | /* search for range lo */ |
| 123 | if (range_lo == -1 && temp == idcode) { |
| 124 | range_lo = i; |
| 125 | continue; |
| 126 | } |
| 127 | |
| 128 | /* search for range hi */ |
| 129 | if (range_lo != -1 && temp != idcode) { |
| 130 | range_hi = i - 1; |
| 131 | break; |
| 132 | } |
| 133 | range_hi = i; |
| 134 | } |
| 135 | |
| 136 | if (range_lo == -1) { |
| 137 | puts("SF: Calibration failed (low range)\n"); |
| 138 | return err; |
| 139 | } |
| 140 | |
| 141 | /* Disable QSPI for subsequent initialization */ |
| 142 | cadence_qspi_apb_controller_disable(base); |
| 143 | |
| 144 | /* configure the final value for read data capture delay register */ |
| 145 | cadence_qspi_apb_readdata_capture(base, 1, (range_hi + range_lo) / 2); |
| 146 | debug("SF: Read data capture delay calibrated to %i (%i - %i)\n", |
| 147 | (range_hi + range_lo) / 2, range_lo, range_hi); |
| 148 | |
| 149 | /* just to ensure we do once only when speed or chip select change */ |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 150 | priv->qspi_calibrated_hz = hz; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 151 | priv->qspi_calibrated_cs = spi_chip_select(bus); |
| 152 | |
| 153 | return 0; |
| 154 | } |
| 155 | |
| 156 | static int cadence_spi_set_speed(struct udevice *bus, uint hz) |
| 157 | { |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 158 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
| 159 | int err; |
| 160 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 161 | if (!hz || hz > priv->max_hz) |
| 162 | hz = priv->max_hz; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 163 | /* Disable QSPI */ |
| 164 | cadence_qspi_apb_controller_disable(priv->regbase); |
| 165 | |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 166 | /* |
Pratyush Yadav | bd8c8dc | 2021-06-26 00:47:07 +0530 | [diff] [blame] | 167 | * If the device tree already provides a read delay value, use that |
| 168 | * instead of calibrating. |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 169 | */ |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 170 | if (priv->read_delay >= 0) { |
Pratyush Yadav | bd8c8dc | 2021-06-26 00:47:07 +0530 | [diff] [blame] | 171 | cadence_spi_write_speed(bus, hz); |
| 172 | cadence_qspi_apb_readdata_capture(priv->regbase, 1, |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 173 | priv->read_delay); |
Pratyush Yadav | bd8c8dc | 2021-06-26 00:47:07 +0530 | [diff] [blame] | 174 | } else if (priv->previous_hz != hz || |
| 175 | priv->qspi_calibrated_hz != hz || |
| 176 | priv->qspi_calibrated_cs != spi_chip_select(bus)) { |
| 177 | /* |
| 178 | * Calibration required for different current SCLK speed, |
| 179 | * requested SCLK speed or chip select |
| 180 | */ |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 181 | err = spi_calibration(bus, hz); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 182 | if (err) |
| 183 | return err; |
Chin Liang See | 98fbd71 | 2015-10-17 08:31:55 -0500 | [diff] [blame] | 184 | |
| 185 | /* prevent calibration run when same as previous request */ |
| 186 | priv->previous_hz = hz; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 187 | } |
| 188 | |
| 189 | /* Enable QSPI */ |
| 190 | cadence_qspi_apb_controller_enable(priv->regbase); |
| 191 | |
| 192 | debug("%s: speed=%d\n", __func__, hz); |
| 193 | |
| 194 | return 0; |
| 195 | } |
| 196 | |
| 197 | static int cadence_spi_probe(struct udevice *bus) |
| 198 | { |
Simon Glass | 0fd3d91 | 2020-12-22 19:30:28 -0700 | [diff] [blame] | 199 | struct cadence_spi_plat *plat = dev_get_plat(bus); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 200 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
Pratyush Yadav | 0a9c287 | 2020-02-24 12:40:51 +0530 | [diff] [blame] | 201 | struct clk clk; |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 202 | int ret; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 203 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 204 | priv->regbase = plat->regbase; |
| 205 | priv->ahbbase = plat->ahbbase; |
| 206 | priv->is_dma = plat->is_dma; |
| 207 | priv->is_decoded_cs = plat->is_decoded_cs; |
| 208 | priv->fifo_depth = plat->fifo_depth; |
| 209 | priv->fifo_width = plat->fifo_width; |
| 210 | priv->trigger_address = plat->trigger_address; |
| 211 | priv->read_delay = plat->read_delay; |
| 212 | priv->ahbsize = plat->ahbsize; |
| 213 | priv->max_hz = plat->max_hz; |
| 214 | |
| 215 | priv->page_size = plat->page_size; |
| 216 | priv->block_size = plat->block_size; |
| 217 | priv->tshsl_ns = plat->tshsl_ns; |
| 218 | priv->tsd2d_ns = plat->tsd2d_ns; |
| 219 | priv->tchsh_ns = plat->tchsh_ns; |
| 220 | priv->tslch_ns = plat->tslch_ns; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 221 | |
Simon Glass | 8581d99 | 2023-02-05 15:44:33 -0700 | [diff] [blame] | 222 | if (IS_ENABLED(CONFIG_ZYNQMP_FIRMWARE)) |
T Karthik Reddy | 248fe9f | 2022-05-12 04:05:34 -0600 | [diff] [blame] | 223 | xilinx_pm_request(PM_REQUEST_NODE, PM_DEV_OSPI, |
| 224 | ZYNQMP_PM_CAPABILITY_ACCESS, ZYNQMP_PM_MAX_QOS, |
| 225 | ZYNQMP_PM_REQUEST_ACK_NO, NULL); |
| 226 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 227 | if (priv->ref_clk_hz == 0) { |
Pratyush Yadav | 0a9c287 | 2020-02-24 12:40:51 +0530 | [diff] [blame] | 228 | ret = clk_get_by_index(bus, 0, &clk); |
| 229 | if (ret) { |
Tom Rini | 55b3ba4 | 2022-03-30 18:07:23 -0400 | [diff] [blame] | 230 | #ifdef CONFIG_HAS_CQSPI_REF_CLK |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 231 | priv->ref_clk_hz = CONFIG_CQSPI_REF_CLK; |
Tom Rini | 55b3ba4 | 2022-03-30 18:07:23 -0400 | [diff] [blame] | 232 | #elif defined(CONFIG_ARCH_SOCFPGA) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 233 | priv->ref_clk_hz = cm_get_qspi_controller_clk_hz(); |
Pratyush Yadav | 0a9c287 | 2020-02-24 12:40:51 +0530 | [diff] [blame] | 234 | #else |
| 235 | return ret; |
| 236 | #endif |
| 237 | } else { |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 238 | priv->ref_clk_hz = clk_get_rate(&clk); |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 239 | if (IS_ERR_VALUE(priv->ref_clk_hz)) |
| 240 | return priv->ref_clk_hz; |
Pratyush Yadav | 0a9c287 | 2020-02-24 12:40:51 +0530 | [diff] [blame] | 241 | } |
| 242 | } |
| 243 | |
Christian Gmeiner | e145606 | 2022-02-22 17:23:25 +0100 | [diff] [blame] | 244 | priv->resets = devm_reset_bulk_get_optional(bus); |
| 245 | if (priv->resets) |
| 246 | reset_deassert_bulk(priv->resets); |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 247 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 248 | if (!priv->qspi_is_init) { |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 249 | cadence_qspi_apb_controller_init(priv); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 250 | priv->qspi_is_init = 1; |
| 251 | } |
| 252 | |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 253 | priv->wr_delay = 50 * DIV_ROUND_UP(NSEC_PER_SEC, priv->ref_clk_hz); |
Pratyush Yadav | a6903aa | 2021-06-26 00:47:08 +0530 | [diff] [blame] | 254 | |
Ashok Reddy Soma | 34dec6a | 2023-06-14 06:04:52 -0600 | [diff] [blame] | 255 | /* Versal and Versal-NET use spi calibration to set read delay */ |
| 256 | if (CONFIG_IS_ENABLED(ARCH_VERSAL) || |
| 257 | CONFIG_IS_ENABLED(ARCH_VERSAL_NET)) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 258 | if (priv->read_delay >= 0) |
| 259 | priv->read_delay = -1; |
T Karthik Reddy | bf8dae5 | 2022-05-12 04:05:33 -0600 | [diff] [blame] | 260 | |
Ashok Reddy Soma | 34dec6a | 2023-06-14 06:04:52 -0600 | [diff] [blame] | 261 | /* Reset ospi flash device */ |
| 262 | return cadence_qspi_versal_flash_reset(bus); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 263 | } |
| 264 | |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 265 | static int cadence_spi_remove(struct udevice *dev) |
| 266 | { |
| 267 | struct cadence_spi_priv *priv = dev_get_priv(dev); |
Christian Gmeiner | e145606 | 2022-02-22 17:23:25 +0100 | [diff] [blame] | 268 | int ret = 0; |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 269 | |
Christian Gmeiner | e145606 | 2022-02-22 17:23:25 +0100 | [diff] [blame] | 270 | if (priv->resets) |
| 271 | ret = reset_release_bulk(priv->resets); |
| 272 | |
| 273 | return ret; |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 274 | } |
| 275 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 276 | static int cadence_spi_set_mode(struct udevice *bus, uint mode) |
| 277 | { |
| 278 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 279 | |
| 280 | /* Disable QSPI */ |
| 281 | cadence_qspi_apb_controller_disable(priv->regbase); |
| 282 | |
| 283 | /* Set SPI mode */ |
Phil Edworthy | 7d403f2 | 2016-11-29 12:58:31 +0000 | [diff] [blame] | 284 | cadence_qspi_apb_set_clk_mode(priv->regbase, mode); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 285 | |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 286 | /* Enable Direct Access Controller */ |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 287 | if (priv->use_dac_mode) |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 288 | cadence_qspi_apb_dac_mode_enable(priv->regbase); |
| 289 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 290 | /* Enable QSPI */ |
| 291 | cadence_qspi_apb_controller_enable(priv->regbase); |
| 292 | |
| 293 | return 0; |
| 294 | } |
| 295 | |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 296 | static int cadence_spi_mem_exec_op(struct spi_slave *spi, |
| 297 | const struct spi_mem_op *op) |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 298 | { |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 299 | struct udevice *bus = spi->dev->parent; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 300 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
| 301 | void *base = priv->regbase; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 302 | int err = 0; |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 303 | u32 mode; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 304 | |
| 305 | /* Set Chip select */ |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 306 | cadence_qspi_apb_chipselect(base, spi_chip_select(spi->dev), |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 307 | priv->is_decoded_cs); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 308 | |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 309 | if (op->data.dir == SPI_MEM_DATA_IN && op->data.buf.in) { |
Dhruva Gole | 53f4ef0 | 2023-01-03 12:01:12 +0530 | [diff] [blame] | 310 | /* |
| 311 | * Performing reads in DAC mode forces to read minimum 4 bytes |
| 312 | * which is unsupported on some flash devices during register |
| 313 | * reads, prefer STIG mode for such small reads. |
| 314 | */ |
Apurva Nandan | 8077d296 | 2023-04-12 16:28:55 +0530 | [diff] [blame] | 315 | if (op->data.nbytes <= CQSPI_STIG_DATA_LEN_MAX) |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 316 | mode = CQSPI_STIG_READ; |
| 317 | else |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 318 | mode = CQSPI_READ; |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 319 | } else { |
Apurva Nandan | 8077d296 | 2023-04-12 16:28:55 +0530 | [diff] [blame] | 320 | if (op->data.nbytes <= CQSPI_STIG_DATA_LEN_MAX) |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 321 | mode = CQSPI_STIG_WRITE; |
| 322 | else |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 323 | mode = CQSPI_WRITE; |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 324 | } |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 325 | |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 326 | switch (mode) { |
| 327 | case CQSPI_STIG_READ: |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 328 | err = cadence_qspi_apb_command_read_setup(priv, op); |
Pratyush Yadav | 38b0852 | 2021-06-26 00:47:09 +0530 | [diff] [blame] | 329 | if (!err) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 330 | err = cadence_qspi_apb_command_read(priv, op); |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 331 | break; |
| 332 | case CQSPI_STIG_WRITE: |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 333 | err = cadence_qspi_apb_command_write_setup(priv, op); |
Pratyush Yadav | 38b0852 | 2021-06-26 00:47:09 +0530 | [diff] [blame] | 334 | if (!err) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 335 | err = cadence_qspi_apb_command_write(priv, op); |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 336 | break; |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 337 | case CQSPI_READ: |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 338 | err = cadence_qspi_apb_read_setup(priv, op); |
T Karthik Reddy | cf553bf | 2022-05-12 04:05:32 -0600 | [diff] [blame] | 339 | if (!err) { |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 340 | if (priv->is_dma) |
| 341 | err = cadence_qspi_apb_dma_read(priv, op); |
T Karthik Reddy | cf553bf | 2022-05-12 04:05:32 -0600 | [diff] [blame] | 342 | else |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 343 | err = cadence_qspi_apb_read_execute(priv, op); |
T Karthik Reddy | cf553bf | 2022-05-12 04:05:32 -0600 | [diff] [blame] | 344 | } |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 345 | break; |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 346 | case CQSPI_WRITE: |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 347 | err = cadence_qspi_apb_write_setup(priv, op); |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 348 | if (!err) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 349 | err = cadence_qspi_apb_write_execute(priv, op); |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 350 | break; |
| 351 | default: |
| 352 | err = -1; |
| 353 | break; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 354 | } |
| 355 | |
| 356 | return err; |
| 357 | } |
| 358 | |
Pratyush Yadav | 38b0852 | 2021-06-26 00:47:09 +0530 | [diff] [blame] | 359 | static bool cadence_spi_mem_supports_op(struct spi_slave *slave, |
| 360 | const struct spi_mem_op *op) |
| 361 | { |
| 362 | bool all_true, all_false; |
| 363 | |
Apurva Nandan | 44e2de0 | 2023-04-12 16:28:54 +0530 | [diff] [blame] | 364 | /* |
| 365 | * op->dummy.dtr is required for converting nbytes into ncycles. |
| 366 | * Also, don't check the dtr field of the op phase having zero nbytes. |
| 367 | */ |
| 368 | all_true = op->cmd.dtr && |
| 369 | (!op->addr.nbytes || op->addr.dtr) && |
| 370 | (!op->dummy.nbytes || op->dummy.dtr) && |
| 371 | (!op->data.nbytes || op->data.dtr); |
| 372 | |
Pratyush Yadav | 38b0852 | 2021-06-26 00:47:09 +0530 | [diff] [blame] | 373 | all_false = !op->cmd.dtr && !op->addr.dtr && !op->dummy.dtr && |
| 374 | !op->data.dtr; |
| 375 | |
| 376 | /* Mixed DTR modes not supported. */ |
| 377 | if (!(all_true || all_false)) |
| 378 | return false; |
| 379 | |
| 380 | if (all_true) |
| 381 | return spi_mem_dtr_supports_op(slave, op); |
| 382 | else |
| 383 | return spi_mem_default_supports_op(slave, op); |
| 384 | } |
| 385 | |
Simon Glass | d1998a9 | 2020-12-03 16:55:21 -0700 | [diff] [blame] | 386 | static int cadence_spi_of_to_plat(struct udevice *bus) |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 387 | { |
Simon Glass | 0fd3d91 | 2020-12-22 19:30:28 -0700 | [diff] [blame] | 388 | struct cadence_spi_plat *plat = dev_get_plat(bus); |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 389 | struct cadence_spi_priv *priv = dev_get_priv(bus); |
Simon Goldschmidt | 46b633d | 2019-05-09 22:11:56 +0200 | [diff] [blame] | 390 | ofnode subnode; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 391 | |
Johan Jonker | 320a193 | 2023-03-13 01:32:31 +0100 | [diff] [blame] | 392 | plat->regbase = devfdt_get_addr_index_ptr(bus, 0); |
Johan Jonker | 842fb5d | 2023-03-13 01:32:18 +0100 | [diff] [blame] | 393 | plat->ahbbase = devfdt_get_addr_size_index_ptr(bus, 1, &plat->ahbsize); |
Simon Goldschmidt | 46b633d | 2019-05-09 22:11:56 +0200 | [diff] [blame] | 394 | plat->is_decoded_cs = dev_read_bool(bus, "cdns,is-decoded-cs"); |
| 395 | plat->fifo_depth = dev_read_u32_default(bus, "cdns,fifo-depth", 128); |
| 396 | plat->fifo_width = dev_read_u32_default(bus, "cdns,fifo-width", 4); |
| 397 | plat->trigger_address = dev_read_u32_default(bus, |
| 398 | "cdns,trigger-address", |
| 399 | 0); |
Vignesh Raghavendra | ffab212 | 2020-01-27 10:36:40 +0530 | [diff] [blame] | 400 | /* Use DAC mode only when MMIO window is at least 8M wide */ |
| 401 | if (plat->ahbsize >= SZ_8M) |
Ashok Reddy Soma | f7d4cab | 2022-08-24 05:38:47 -0600 | [diff] [blame] | 402 | priv->use_dac_mode = true; |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 403 | |
T Karthik Reddy | cf553bf | 2022-05-12 04:05:32 -0600 | [diff] [blame] | 404 | plat->is_dma = dev_read_bool(bus, "cdns,is-dma"); |
| 405 | |
Pengfei Fan | d466f62 | 2022-12-09 09:39:50 +0800 | [diff] [blame] | 406 | /* All other parameters are embedded in the child node */ |
Udit Kumar | c77efca | 2023-09-12 15:20:35 +0530 | [diff] [blame] | 407 | subnode = cadence_qspi_get_subnode(bus); |
Simon Goldschmidt | 46b633d | 2019-05-09 22:11:56 +0200 | [diff] [blame] | 408 | if (!ofnode_valid(subnode)) { |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 409 | printf("Error: subnode with SPI flash config missing!\n"); |
| 410 | return -ENODEV; |
| 411 | } |
| 412 | |
Chin Liang See | 040f4ba | 2015-10-17 08:32:14 -0500 | [diff] [blame] | 413 | /* Use 500 KHz as a suitable default */ |
Simon Goldschmidt | 46b633d | 2019-05-09 22:11:56 +0200 | [diff] [blame] | 414 | plat->max_hz = ofnode_read_u32_default(subnode, "spi-max-frequency", |
| 415 | 500000); |
Chin Liang See | 040f4ba | 2015-10-17 08:32:14 -0500 | [diff] [blame] | 416 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 417 | /* Read other parameters from DT */ |
Simon Goldschmidt | 46b633d | 2019-05-09 22:11:56 +0200 | [diff] [blame] | 418 | plat->page_size = ofnode_read_u32_default(subnode, "page-size", 256); |
| 419 | plat->block_size = ofnode_read_u32_default(subnode, "block-size", 16); |
| 420 | plat->tshsl_ns = ofnode_read_u32_default(subnode, "cdns,tshsl-ns", |
| 421 | 200); |
| 422 | plat->tsd2d_ns = ofnode_read_u32_default(subnode, "cdns,tsd2d-ns", |
| 423 | 255); |
| 424 | plat->tchsh_ns = ofnode_read_u32_default(subnode, "cdns,tchsh-ns", 20); |
| 425 | plat->tslch_ns = ofnode_read_u32_default(subnode, "cdns,tslch-ns", 20); |
Pratyush Yadav | bd8c8dc | 2021-06-26 00:47:07 +0530 | [diff] [blame] | 426 | /* |
| 427 | * Read delay should be an unsigned value but we use a signed integer |
| 428 | * so that negative values can indicate that the device tree did not |
| 429 | * specify any signed values and we need to perform the calibration |
| 430 | * sequence to find it out. |
| 431 | */ |
| 432 | plat->read_delay = ofnode_read_s32_default(subnode, "cdns,read-delay", |
| 433 | -1); |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 434 | |
| 435 | debug("%s: regbase=%p ahbbase=%p max-frequency=%d page-size=%d\n", |
| 436 | __func__, plat->regbase, plat->ahbbase, plat->max_hz, |
| 437 | plat->page_size); |
| 438 | |
| 439 | return 0; |
| 440 | } |
| 441 | |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 442 | static const struct spi_controller_mem_ops cadence_spi_mem_ops = { |
| 443 | .exec_op = cadence_spi_mem_exec_op, |
Pratyush Yadav | 38b0852 | 2021-06-26 00:47:09 +0530 | [diff] [blame] | 444 | .supports_op = cadence_spi_mem_supports_op, |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 445 | }; |
| 446 | |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 447 | static const struct dm_spi_ops cadence_spi_ops = { |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 448 | .set_speed = cadence_spi_set_speed, |
| 449 | .set_mode = cadence_spi_set_mode, |
Vignesh Raghavendra | d640772 | 2020-01-27 10:36:39 +0530 | [diff] [blame] | 450 | .mem_ops = &cadence_spi_mem_ops, |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 451 | /* |
| 452 | * cs_info is not needed, since we require all chip selects to be |
| 453 | * in the device tree explicitly |
| 454 | */ |
| 455 | }; |
| 456 | |
| 457 | static const struct udevice_id cadence_spi_ids[] = { |
Simon Goldschmidt | 2a3a999 | 2018-11-02 11:54:51 +0100 | [diff] [blame] | 458 | { .compatible = "cdns,qspi-nor" }, |
Vignesh Raghavendra | daa9405 | 2019-12-05 15:46:07 +0530 | [diff] [blame] | 459 | { .compatible = "ti,am654-ospi" }, |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 460 | { } |
| 461 | }; |
| 462 | |
| 463 | U_BOOT_DRIVER(cadence_spi) = { |
| 464 | .name = "cadence_spi", |
| 465 | .id = UCLASS_SPI, |
| 466 | .of_match = cadence_spi_ids, |
| 467 | .ops = &cadence_spi_ops, |
Simon Glass | d1998a9 | 2020-12-03 16:55:21 -0700 | [diff] [blame] | 468 | .of_to_plat = cadence_spi_of_to_plat, |
Simon Glass | 8a8d24b | 2020-12-03 16:55:23 -0700 | [diff] [blame] | 469 | .plat_auto = sizeof(struct cadence_spi_plat), |
Simon Glass | 41575d8 | 2020-12-03 16:55:17 -0700 | [diff] [blame] | 470 | .priv_auto = sizeof(struct cadence_spi_priv), |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 471 | .probe = cadence_spi_probe, |
Simon Goldschmidt | ac7e14a | 2019-03-01 20:12:35 +0100 | [diff] [blame] | 472 | .remove = cadence_spi_remove, |
| 473 | .flags = DM_FLAG_OS_PREPARE, |
Stefan Roese | 10e8bf8 | 2014-11-07 12:37:49 +0100 | [diff] [blame] | 474 | }; |