blob: e84171a661a71b925c9204ee6f7b65b893138636 [file] [log] [blame]
Amit Singh Tomar57a91c32021-11-28 17:02:24 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Copyright (C) 2020 Amit Singh Tomar <amittomer25@gmail.com>
4 *
5 * Driver for SD/MMC controller present on Actions Semi S700/S900 SoC, based
6 * on Linux Driver "drivers/mmc/host/owl-mmc.c".
7 *
8 * Though, there is a bit (BSEL, BUS or DMA Special Channel Selection) that
9 * controls the data transfer from SDx_DAT register either using CPU AHB Bus
10 * or DMA channel, but seems like, it only works correctly using external DMA
11 * channel, and those special bits used in this driver is picked from vendor
12 * source exclusively for MMC/SD.
13 */
14#include <common.h>
15#include <clk.h>
16#include <cpu_func.h>
17#include <dm.h>
18#include <errno.h>
19#include <log.h>
20#include <mmc.h>
21#include <asm/io.h>
22#include <linux/bitops.h>
23#include <linux/delay.h>
24#include <linux/err.h>
25#include <linux/iopoll.h>
26
27/*
28 * SDC registers
29 */
30#define OWL_REG_SD_EN 0x0000
31#define OWL_REG_SD_CTL 0x0004
32#define OWL_REG_SD_STATE 0x0008
33#define OWL_REG_SD_CMD 0x000c
34#define OWL_REG_SD_ARG 0x0010
35#define OWL_REG_SD_RSPBUF0 0x0014
36#define OWL_REG_SD_RSPBUF1 0x0018
37#define OWL_REG_SD_RSPBUF2 0x001c
38#define OWL_REG_SD_RSPBUF3 0x0020
39#define OWL_REG_SD_RSPBUF4 0x0024
40#define OWL_REG_SD_DAT 0x0028
41#define OWL_REG_SD_BLK_SIZE 0x002c
42#define OWL_REG_SD_BLK_NUM 0x0030
43#define OWL_REG_SD_BUF_SIZE 0x0034
44
45/* SD_EN Bits */
46#define OWL_SD_EN_RANE BIT(31)
47#define OWL_SD_EN_RESE BIT(10)
48#define OWL_SD_ENABLE BIT(7)
49#define OWL_SD_EN_BSEL BIT(6)
50#define OWL_SD_EN_DATAWID(x) (((x) & 0x3) << 0)
51#define OWL_SD_EN_DATAWID_MASK 0x03
52
53/* SD_CTL Bits */
54#define OWL_SD_CTL_TOUTEN BIT(31)
55#define OWL_SD_CTL_DELAY_MSK GENMASK(23, 16)
56#define OWL_SD_CTL_RDELAY(x) (((x) & 0xf) << 20)
57#define OWL_SD_CTL_WDELAY(x) (((x) & 0xf) << 16)
58#define OWL_SD_CTL_TS BIT(7)
59#define OWL_SD_CTL_LBE BIT(6)
60#define OWL_SD_CTL_TM(x) (((x) & 0xf) << 0)
61
62#define OWL_SD_DELAY_LOW_CLK 0x0f
63#define OWL_SD_DELAY_MID_CLK 0x0a
64#define OWL_SD_RDELAY_HIGH 0x08
65#define OWL_SD_WDELAY_HIGH 0x09
66
67/* SD_STATE Bits */
68#define OWL_SD_STATE_DAT0S BIT(7)
69#define OWL_SD_STATE_CLNR BIT(4)
70#define OWL_SD_STATE_CRC7ER BIT(0)
71
72#define OWL_MMC_OCR (MMC_VDD_32_33 | MMC_VDD_33_34 | \
73 MMC_VDD_165_195)
74
75#define DATA_TRANSFER_TIMEOUT 3000000
76#define DMA_TRANSFER_TIMEOUT 5000000
77
78/*
79 * Simple DMA transfer operations defines for MMC/SD card
80 */
81#define SD_DMA_CHANNEL(base, channel) ((base) + 0x100 + 0x100 * (channel))
82
83#define DMA_MODE 0x0000
84#define DMA_SOURCE 0x0004
85#define DMA_DESTINATION 0x0008
86#define DMA_FRAME_LEN 0x000C
87#define DMA_FRAME_CNT 0x0010
88#define DMA_START 0x0024
89
90/* DMAx_MODE */
91#define DMA_MODE_ST(x) (((x) & 0x3) << 8)
92#define DMA_MODE_ST_DEV DMA_MODE_ST(0)
93#define DMA_MODE_DT(x) (((x) & 0x3) << 10)
94#define DMA_MODE_DT_DCU DMA_MODE_DT(2)
95#define DMA_MODE_SAM(x) (((x) & 0x3) << 16)
96#define DMA_MODE_SAM_CONST DMA_MODE_SAM(0)
97#define DMA_MODE_DAM(x) (((x) & 0x3) << 18)
98#define DMA_MODE_DAM_INC DMA_MODE_DAM(1)
99
100#define DMA_ENABLE 0x1
101
102struct owl_mmc_plat {
103 struct mmc_config cfg;
104 struct mmc mmc;
105};
106
107struct owl_mmc_priv {
108 void *reg_base;
109 void *dma_channel;
110 struct clk clk;
111 unsigned int clock; /* Current clock */
112 unsigned int dma_drq; /* Trigger Source */
113};
114
115static void owl_dma_config(struct owl_mmc_priv *priv, unsigned int src,
116 unsigned int dst, unsigned int len)
117{
118 unsigned int mode = priv->dma_drq;
119
120 /* Set Source and Destination adderess mode */
121 mode |= (DMA_MODE_ST_DEV | DMA_MODE_SAM_CONST | DMA_MODE_DT_DCU |
122 DMA_MODE_DAM_INC);
123
124 writel(mode, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_MODE);
125 writel(src, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_SOURCE);
126 writel(dst, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_DESTINATION);
127 writel(len, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_FRAME_LEN);
128 writel(0x1, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_FRAME_CNT);
129}
130
131static void owl_mmc_prepare_data(struct owl_mmc_priv *priv,
132 struct mmc_data *data)
133{
134 unsigned int total;
135 u32 buf = 0;
136
137 setbits_le32(priv->reg_base + OWL_REG_SD_EN, OWL_SD_EN_BSEL);
138
139 writel(data->blocks, priv->reg_base + OWL_REG_SD_BLK_NUM);
140 writel(data->blocksize, priv->reg_base + OWL_REG_SD_BLK_SIZE);
141 total = data->blocksize * data->blocks;
142
143 if (total < 512)
144 writel(total, priv->reg_base + OWL_REG_SD_BUF_SIZE);
145 else
146 writel(512, priv->reg_base + OWL_REG_SD_BUF_SIZE);
147
148 /* DMA STOP */
149 writel(0x0, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
150
151 if (data) {
152 if (data->flags == MMC_DATA_READ) {
153 buf = (ulong) (data->dest);
154 owl_dma_config(priv, (ulong) priv->reg_base +
155 OWL_REG_SD_DAT, buf, total);
156 invalidate_dcache_range(buf, buf + total);
157 } else {
158 buf = (ulong) (data->src);
159 owl_dma_config(priv, buf, (ulong) priv->reg_base +
160 OWL_REG_SD_DAT, total);
161 flush_dcache_range(buf, buf + total);
162 }
163 /* DMA START */
164 writel(0x1, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
165 }
166}
167
168static int owl_mmc_send_cmd(struct udevice *dev, struct mmc_cmd *cmd,
169 struct mmc_data *data)
170{
171 struct owl_mmc_priv *priv = dev_get_priv(dev);
172 unsigned int cmd_rsp_mask, mode, reg;
173 int ret;
174
175 setbits_le32(priv->reg_base + OWL_REG_SD_EN, OWL_SD_ENABLE);
176
177 /* setup response */
178 mode = 0;
179 if (cmd->resp_type != MMC_RSP_NONE)
180 cmd_rsp_mask = OWL_SD_STATE_CLNR | OWL_SD_STATE_CRC7ER;
181 if (cmd->resp_type == MMC_RSP_R1) {
182 if (data) {
183 if (data->flags == MMC_DATA_READ)
184 mode |= OWL_SD_CTL_TM(4);
185 else
186 mode |= OWL_SD_CTL_TM(5);
187 } else
188 mode |= OWL_SD_CTL_TM(1);
189 } else if (cmd->resp_type == MMC_RSP_R2) {
190 mode = OWL_SD_CTL_TM(2);
191 } else if (cmd->resp_type == MMC_RSP_R1b) {
192 mode = OWL_SD_CTL_TM(3);
193 } else if (cmd->resp_type == MMC_RSP_R3) {
194 cmd_rsp_mask = OWL_SD_STATE_CLNR;
195 mode = OWL_SD_CTL_TM(1);
196 }
197
198 mode |= (readl(priv->reg_base + OWL_REG_SD_CTL) & (0xff << 16));
199
200 /* setup command */
201 writel(cmd->cmdidx, priv->reg_base + OWL_REG_SD_CMD);
202 writel(cmd->cmdarg, priv->reg_base + OWL_REG_SD_ARG);
203
204 /* Set LBE to send clk at the end of last read block */
205 if (data)
206 mode |= (OWL_SD_CTL_TS | OWL_SD_CTL_LBE | 0xE4000000);
207 else
208 mode |= OWL_SD_CTL_TS;
209
210 if (data)
211 owl_mmc_prepare_data(priv, data);
212
213 /* Start transfer */
214 writel(mode, priv->reg_base + OWL_REG_SD_CTL);
215
216 ret = readl_poll_timeout(priv->reg_base + OWL_REG_SD_CTL, reg,
217 !(reg & OWL_SD_CTL_TS), DATA_TRANSFER_TIMEOUT);
218
219 if (ret == -ETIMEDOUT) {
220 debug("error: transferred data timeout\n");
221 return ret;
222 }
223
224 reg = readl(priv->reg_base + OWL_REG_SD_STATE) & cmd_rsp_mask;
225 if (cmd->resp_type & MMC_RSP_PRESENT) {
226 if (reg & OWL_SD_STATE_CLNR) {
227 printf("Error CMD_NO_RSP\n");
228 return -1;
229 }
230
231 if (reg & OWL_SD_STATE_CRC7ER) {
232 printf("Error CMD_RSP_CRC\n");
233 return -1;
234 }
235
236 if (cmd->resp_type & MMC_RSP_136) {
237 cmd->response[3] = readl(priv->reg_base + OWL_REG_SD_RSPBUF0);
238 cmd->response[2] = readl(priv->reg_base + OWL_REG_SD_RSPBUF1);
239 cmd->response[1] = readl(priv->reg_base + OWL_REG_SD_RSPBUF2);
240 cmd->response[0] = readl(priv->reg_base + OWL_REG_SD_RSPBUF3);
241 } else {
242 u32 rsp[2];
243
244 rsp[0] = readl(priv->reg_base + OWL_REG_SD_RSPBUF0);
245 rsp[1] = readl(priv->reg_base + OWL_REG_SD_RSPBUF1);
246 cmd->response[0] = rsp[1] << 24 | rsp[0] >> 8;
247 cmd->response[1] = rsp[1] >> 8;
248 }
249 }
250
251 if (data) {
252 ret = readl_poll_timeout(SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START,
253 reg, !(reg & DMA_ENABLE), DMA_TRANSFER_TIMEOUT);
254
255 if (ret == -ETIMEDOUT) {
256 debug("error: DMA transfer timeout\n");
257 return ret;
258 }
259
260 /* DMA STOP */
261 writel(0x0, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
262 /* Transmission STOP */
263 while (readl(priv->reg_base + OWL_REG_SD_CTL) & OWL_SD_CTL_TS)
264 clrbits_le32(priv->reg_base + OWL_REG_SD_CTL,
265 OWL_SD_CTL_TS);
266 }
267
268 return 0;
269}
270
271static int owl_mmc_clk_set(struct owl_mmc_priv *priv, int rate)
272{
273 u32 reg, wdelay, rdelay;
274
275 reg = readl(priv->reg_base + OWL_REG_SD_CTL);
276 reg &= ~OWL_SD_CTL_DELAY_MSK;
277
278 /* Set RDELAY and WDELAY based on the clock */
279 if (rate <= 1000000)
280 rdelay = wdelay = OWL_SD_DELAY_LOW_CLK;
281 else if ((rate > 1000000) && (rate <= 26000000))
282 rdelay = wdelay = OWL_SD_DELAY_MID_CLK;
283 else if ((rate > 26000000) && (rate <= 52000000)) {
284 rdelay = OWL_SD_RDELAY_HIGH;
285 wdelay = OWL_SD_WDELAY_HIGH;
286 } else {
287 debug("SD clock rate not supported\n");
288 return -EINVAL;
289 }
290
291 writel(reg | OWL_SD_CTL_RDELAY(rdelay) | OWL_SD_CTL_WDELAY(wdelay),
292 priv->reg_base + OWL_REG_SD_CTL);
293
294 return 0;
295}
296
297static int owl_mmc_set_ios(struct udevice *dev)
298{
299 struct owl_mmc_priv *priv = dev_get_priv(dev);
300 struct owl_mmc_plat *plat = dev_get_plat(dev);
301 struct mmc *mmc = &plat->mmc;
302 u32 reg, ret;
303
304 if (mmc->clock != priv->clock) {
305 priv->clock = mmc->clock;
306 ret = owl_mmc_clk_set(priv, mmc->clock);
307 if (IS_ERR_VALUE(ret))
308 return ret;
309
310 ret = clk_set_rate(&priv->clk, mmc->clock);
311 if (IS_ERR_VALUE(ret))
312 return ret;
313 }
314
315 if (mmc->clk_disable)
316 ret = clk_disable(&priv->clk);
317 else
318 ret = clk_enable(&priv->clk);
319 if (ret)
320 return ret;
321
322 /* Set the Bus width */
323 reg = readl(priv->reg_base + OWL_REG_SD_EN);
324 reg &= ~OWL_SD_EN_DATAWID_MASK;
325 if (mmc->bus_width == 8)
326 reg |= OWL_SD_EN_DATAWID(2);
327 else if (mmc->bus_width == 4)
328 reg |= OWL_SD_EN_DATAWID(1);
329
330 writel(reg, priv->reg_base + OWL_REG_SD_EN);
331
332 return 0;
333}
334
335static const struct dm_mmc_ops owl_mmc_ops = {
336 .send_cmd = owl_mmc_send_cmd,
337 .set_ios = owl_mmc_set_ios,
338};
339
340static int owl_mmc_probe(struct udevice *dev)
341{
342 struct mmc_uclass_priv *upriv = dev_get_uclass_priv(dev);
343 struct owl_mmc_plat *plat = dev_get_plat(dev);
344 struct owl_mmc_priv *priv = dev_get_priv(dev);
345 struct mmc_config *cfg = &plat->cfg;
346 struct ofnode_phandle_args args;
347 int ret;
348 fdt_addr_t addr;
349
350 cfg->name = dev->name;
351 cfg->voltages = OWL_MMC_OCR;
352 cfg->f_min = 400000;
353 cfg->f_max = 52000000;
354 cfg->b_max = 512;
355 cfg->host_caps = MMC_MODE_HS | MMC_MODE_HS_52MHz;
356
357 ret = mmc_of_parse(dev, cfg);
358 if (ret)
359 return ret;
360
361 addr = dev_read_addr(dev);
362 if (addr == FDT_ADDR_T_NONE)
363 return -EINVAL;
364
365 priv->reg_base = (void *)addr;
366
367 ret = dev_read_phandle_with_args(dev, "dmas", "#dma-cells", 0, 0,
368 &args);
369 if (ret)
370 return ret;
371
372 priv->dma_channel = (void *)ofnode_get_addr(args.node);
373 priv->dma_drq = args.args[0];
374
375 ret = clk_get_by_index(dev, 0, &priv->clk);
376 if (ret) {
377 debug("clk_get_by_index() failed: %d\n", ret);
378 return ret;
379 }
380
381 upriv->mmc = &plat->mmc;
382
383 return 0;
384}
385
386static int owl_mmc_bind(struct udevice *dev)
387{
388 struct owl_mmc_plat *plat = dev_get_plat(dev);
389
390 return mmc_bind(dev, &plat->mmc, &plat->cfg);
391}
392
393static const struct udevice_id owl_mmc_ids[] = {
394 { .compatible = "actions,s700-mmc" },
395 { .compatible = "actions,owl-mmc" },
396 { }
397};
398
399U_BOOT_DRIVER(owl_mmc_drv) = {
400 .name = "owl_mmc",
401 .id = UCLASS_MMC,
402 .of_match = owl_mmc_ids,
403 .bind = owl_mmc_bind,
404 .probe = owl_mmc_probe,
405 .ops = &owl_mmc_ops,
406 .plat_auto = sizeof(struct owl_mmc_plat),
407 .priv_auto = sizeof(struct owl_mmc_priv),
408};