blob: aebf3eef966ce392c8ec347402fc71e0c80411fa [file] [log] [blame]
Grygorii Strashkob3309912018-11-28 19:17:51 +01001// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Direct Memory Access U-Class Simulation driver
4 *
5 * Copyright (C) 2018 Texas Instruments Incorporated <www.ti.com>
6 *
7 * Author: Grygorii Strashko <grygorii.strashko@ti.com>
8 */
9
10#include <common.h>
11#include <dm.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060012#include <log.h>
Simon Glass336d4612020-02-03 07:36:16 -070013#include <malloc.h>
Grygorii Strashkob3309912018-11-28 19:17:51 +010014#include <dm/read.h>
15#include <dma-uclass.h>
16#include <dt-structs.h>
17#include <errno.h>
18
19#define SANDBOX_DMA_CH_CNT 3
20#define SANDBOX_DMA_BUF_SIZE 1024
21
22struct sandbox_dma_chan {
23 struct sandbox_dma_dev *ud;
24 char name[20];
25 u32 id;
26 enum dma_direction dir;
27 bool in_use;
28 bool enabled;
29};
30
31struct sandbox_dma_dev {
32 struct device *dev;
33 u32 ch_count;
34 struct sandbox_dma_chan channels[SANDBOX_DMA_CH_CNT];
35 uchar buf[SANDBOX_DMA_BUF_SIZE];
36 uchar *buf_rx;
37 size_t data_len;
38 u32 meta;
39};
40
41static int sandbox_dma_transfer(struct udevice *dev, int direction,
42 void *dst, void *src, size_t len)
43{
44 memcpy(dst, src, len);
45
46 return 0;
47}
48
49static int sandbox_dma_of_xlate(struct dma *dma,
50 struct ofnode_phandle_args *args)
51{
52 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
53 struct sandbox_dma_chan *uc;
54
55 debug("%s(dma id=%u)\n", __func__, args->args[0]);
56
57 if (args->args[0] >= SANDBOX_DMA_CH_CNT)
58 return -EINVAL;
59
60 dma->id = args->args[0];
61
62 uc = &ud->channels[dma->id];
63
64 if (dma->id == 1)
65 uc->dir = DMA_MEM_TO_DEV;
66 else if (dma->id == 2)
67 uc->dir = DMA_DEV_TO_MEM;
68 else
69 uc->dir = DMA_MEM_TO_MEM;
70 debug("%s(dma id=%lu dir=%d)\n", __func__, dma->id, uc->dir);
71
72 return 0;
73}
74
75static int sandbox_dma_request(struct dma *dma)
76{
77 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
78 struct sandbox_dma_chan *uc;
79
80 if (dma->id >= SANDBOX_DMA_CH_CNT)
81 return -EINVAL;
82
83 uc = &ud->channels[dma->id];
84 if (uc->in_use)
85 return -EBUSY;
86
87 uc->in_use = true;
88 debug("%s(dma id=%lu in_use=%d)\n", __func__, dma->id, uc->in_use);
89
90 return 0;
91}
92
Simon Glassaae95882020-02-03 07:35:55 -070093static int sandbox_dma_rfree(struct dma *dma)
Grygorii Strashkob3309912018-11-28 19:17:51 +010094{
95 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
96 struct sandbox_dma_chan *uc;
97
98 if (dma->id >= SANDBOX_DMA_CH_CNT)
99 return -EINVAL;
100
101 uc = &ud->channels[dma->id];
102 if (!uc->in_use)
103 return -EINVAL;
104
105 uc->in_use = false;
106 ud->buf_rx = NULL;
107 ud->data_len = 0;
108 debug("%s(dma id=%lu in_use=%d)\n", __func__, dma->id, uc->in_use);
109
110 return 0;
111}
112
113static int sandbox_dma_enable(struct dma *dma)
114{
115 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
116 struct sandbox_dma_chan *uc;
117
118 if (dma->id >= SANDBOX_DMA_CH_CNT)
119 return -EINVAL;
120
121 uc = &ud->channels[dma->id];
122 if (!uc->in_use)
123 return -EINVAL;
124 if (uc->enabled)
125 return -EINVAL;
126
127 uc->enabled = true;
128 debug("%s(dma id=%lu enabled=%d)\n", __func__, dma->id, uc->enabled);
129
130 return 0;
131}
132
133static int sandbox_dma_disable(struct dma *dma)
134{
135 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
136 struct sandbox_dma_chan *uc;
137
138 if (dma->id >= SANDBOX_DMA_CH_CNT)
139 return -EINVAL;
140
141 uc = &ud->channels[dma->id];
142 if (!uc->in_use)
143 return -EINVAL;
144 if (!uc->enabled)
145 return -EINVAL;
146
147 uc->enabled = false;
148 debug("%s(dma id=%lu enabled=%d)\n", __func__, dma->id, uc->enabled);
149
150 return 0;
151}
152
153static int sandbox_dma_send(struct dma *dma,
154 void *src, size_t len, void *metadata)
155{
156 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
157 struct sandbox_dma_chan *uc;
158
159 if (dma->id >= SANDBOX_DMA_CH_CNT)
160 return -EINVAL;
161 if (!src || !metadata)
162 return -EINVAL;
163
164 debug("%s(dma id=%lu)\n", __func__, dma->id);
165
166 uc = &ud->channels[dma->id];
167 if (uc->dir != DMA_MEM_TO_DEV)
168 return -EINVAL;
169 if (!uc->in_use)
170 return -EINVAL;
171 if (!uc->enabled)
172 return -EINVAL;
173 if (len >= SANDBOX_DMA_BUF_SIZE)
174 return -EINVAL;
175
176 memcpy(ud->buf, src, len);
177 ud->data_len = len;
178 ud->meta = *((u32 *)metadata);
179
180 debug("%s(dma id=%lu len=%zu meta=%08x)\n",
181 __func__, dma->id, len, ud->meta);
182
183 return 0;
184}
185
186static int sandbox_dma_receive(struct dma *dma, void **dst, void *metadata)
187{
188 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
189 struct sandbox_dma_chan *uc;
190
191 if (dma->id >= SANDBOX_DMA_CH_CNT)
192 return -EINVAL;
193 if (!dst || !metadata)
194 return -EINVAL;
195
196 uc = &ud->channels[dma->id];
197 if (uc->dir != DMA_DEV_TO_MEM)
198 return -EINVAL;
199 if (!uc->in_use)
200 return -EINVAL;
201 if (!uc->enabled)
202 return -EINVAL;
203 if (!ud->data_len)
204 return 0;
205
206 if (ud->buf_rx) {
207 memcpy(ud->buf_rx, ud->buf, ud->data_len);
208 *dst = ud->buf_rx;
209 } else {
210 memcpy(*dst, ud->buf, ud->data_len);
211 }
212
213 *((u32 *)metadata) = ud->meta;
214
215 debug("%s(dma id=%lu len=%zu meta=%08x %p)\n",
216 __func__, dma->id, ud->data_len, ud->meta, *dst);
217
218 return ud->data_len;
219}
220
221static int sandbox_dma_prepare_rcv_buf(struct dma *dma, void *dst, size_t size)
222{
223 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
224
225 ud->buf_rx = dst;
226
227 return 0;
228}
229
230static const struct dma_ops sandbox_dma_ops = {
231 .transfer = sandbox_dma_transfer,
232 .of_xlate = sandbox_dma_of_xlate,
233 .request = sandbox_dma_request,
Simon Glassaae95882020-02-03 07:35:55 -0700234 .rfree = sandbox_dma_rfree,
Grygorii Strashkob3309912018-11-28 19:17:51 +0100235 .enable = sandbox_dma_enable,
236 .disable = sandbox_dma_disable,
237 .send = sandbox_dma_send,
238 .receive = sandbox_dma_receive,
239 .prepare_rcv_buf = sandbox_dma_prepare_rcv_buf,
240};
241
242static int sandbox_dma_probe(struct udevice *dev)
243{
244 struct dma_dev_priv *uc_priv = dev_get_uclass_priv(dev);
245 struct sandbox_dma_dev *ud = dev_get_priv(dev);
246 int i, ret = 0;
247
248 uc_priv->supported = DMA_SUPPORTS_MEM_TO_MEM |
249 DMA_SUPPORTS_MEM_TO_DEV |
250 DMA_SUPPORTS_DEV_TO_MEM;
251
252 ud->ch_count = SANDBOX_DMA_CH_CNT;
253 ud->buf_rx = NULL;
254 ud->meta = 0;
255 ud->data_len = 0;
256
257 pr_err("Number of channels: %u\n", ud->ch_count);
258
259 for (i = 0; i < ud->ch_count; i++) {
260 struct sandbox_dma_chan *uc = &ud->channels[i];
261
262 uc->ud = ud;
263 uc->id = i;
264 sprintf(uc->name, "DMA chan%d\n", i);
265 uc->in_use = false;
266 uc->enabled = false;
267 }
268
269 return ret;
270}
271
272static const struct udevice_id sandbox_dma_ids[] = {
273 { .compatible = "sandbox,dma" },
274 { }
275};
276
277U_BOOT_DRIVER(sandbox_dma) = {
278 .name = "sandbox-dma",
279 .id = UCLASS_DMA,
280 .of_match = sandbox_dma_ids,
281 .ops = &sandbox_dma_ops,
282 .probe = sandbox_dma_probe,
Simon Glass41575d82020-12-03 16:55:17 -0700283 .priv_auto = sizeof(struct sandbox_dma_dev),
Grygorii Strashkob3309912018-11-28 19:17:51 +0100284};