blob: 33f0d06ee8ab41343debe8d9ff7b66941ceca855 [file] [log] [blame]
Song Qiang749d2d22019-10-24 19:06:19 +08001/*
2 * Copyright (c) 2016 Linaro Limited.
3 * Copyright (c) 2019 Song Qiang <songqiang1304521@gmail.com>
4 *
5 * SPDX-License-Identifier: Apache-2.0
6 */
7
8/**
9 * @brief Common part of DMA drivers for stm32.
10 * @note Functions named with stm32_dma_* are SoCs related functions
11 * implemented in dma_stm32_v*.c
12 */
13
Francois Ramu3553d4f2020-01-27 10:52:56 +010014#include "dma_stm32.h"
Song Qiang749d2d22019-10-24 19:06:19 +080015
Gerard Marull-Paretasfb60aab2022-05-06 10:25:46 +020016#include <zephyr/init.h>
17#include <zephyr/drivers/clock_control.h>
18#include <zephyr/drivers/dma/dma_stm32.h>
Erwin Rol3866b392020-08-28 15:04:00 +020019
Gerard Marull-Paretasfb60aab2022-05-06 10:25:46 +020020#include <zephyr/logging/log.h>
Francois Ramu3553d4f2020-01-27 10:52:56 +010021LOG_MODULE_REGISTER(dma_stm32, CONFIG_DMA_LOG_LEVEL);
Song Qiang749d2d22019-10-24 19:06:19 +080022
Erwan Gourioua214f412021-01-07 20:37:04 +010023#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32_dma_v1)
24#define DT_DRV_COMPAT st_stm32_dma_v1
25#elif DT_HAS_COMPAT_STATUS_OKAY(st_stm32_dma_v2)
26#define DT_DRV_COMPAT st_stm32_dma_v2
Francois Ramudcefbd62021-04-28 14:37:47 +020027#elif DT_HAS_COMPAT_STATUS_OKAY(st_stm32_dma_v2bis)
28#define DT_DRV_COMPAT st_stm32_dma_v2bis
Erwan Gourioua214f412021-01-07 20:37:04 +010029#endif
30
Erwin Rol67f68e62020-08-21 23:50:00 +020031#if DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay)
32#if DT_INST_IRQ_HAS_IDX(0, 7)
33#define DMA_STM32_0_STREAM_COUNT 8
34#elif DT_INST_IRQ_HAS_IDX(0, 6)
35#define DMA_STM32_0_STREAM_COUNT 7
36#elif DT_INST_IRQ_HAS_IDX(0, 5)
37#define DMA_STM32_0_STREAM_COUNT 6
38#else
39#define DMA_STM32_0_STREAM_COUNT 5
40#endif
41#endif /* DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay) */
42
43#if DT_NODE_HAS_STATUS(DT_DRV_INST(1), okay)
44#if DT_INST_IRQ_HAS_IDX(1, 7)
45#define DMA_STM32_1_STREAM_COUNT 8
46#elif DT_INST_IRQ_HAS_IDX(1, 6)
47#define DMA_STM32_1_STREAM_COUNT 7
48#elif DT_INST_IRQ_HAS_IDX(1, 5)
49#define DMA_STM32_1_STREAM_COUNT 6
50#else
51#define DMA_STM32_1_STREAM_COUNT 5
52#endif
53#endif /* DT_NODE_HAS_STATUS(DT_DRV_INST(1), okay) */
54
Kumar Galaa1b77fd2020-05-27 11:26:57 -050055static uint32_t table_m_size[] = {
Francois Ramu1b0503d2020-04-01 16:36:51 +020056 LL_DMA_MDATAALIGN_BYTE,
57 LL_DMA_MDATAALIGN_HALFWORD,
58 LL_DMA_MDATAALIGN_WORD,
59};
60
Kumar Galaa1b77fd2020-05-27 11:26:57 -050061static uint32_t table_p_size[] = {
Francois Ramu1b0503d2020-04-01 16:36:51 +020062 LL_DMA_PDATAALIGN_BYTE,
63 LL_DMA_PDATAALIGN_HALFWORD,
64 LL_DMA_PDATAALIGN_WORD,
65};
66
Tomasz Bursztykae18fcbb2020-04-30 20:33:38 +020067static void dma_stm32_dump_stream_irq(const struct device *dev, uint32_t id)
Song Qiang749d2d22019-10-24 19:06:19 +080068{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +020069 const struct dma_stm32_config *config = dev->config;
Song Qiang749d2d22019-10-24 19:06:19 +080070 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
71
72 stm32_dma_dump_stream_irq(dma, id);
73}
74
Tomasz Bursztykae18fcbb2020-04-30 20:33:38 +020075static void dma_stm32_clear_stream_irq(const struct device *dev, uint32_t id)
Song Qiang749d2d22019-10-24 19:06:19 +080076{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +020077 const struct dma_stm32_config *config = dev->config;
Song Qiang749d2d22019-10-24 19:06:19 +080078 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
79
Erwin Rol3866b392020-08-28 15:04:00 +020080 dma_stm32_clear_tc(dma, id);
81 dma_stm32_clear_ht(dma, id);
Song Qiang749d2d22019-10-24 19:06:19 +080082 stm32_dma_clear_stream_irq(dma, id);
83}
84
Erwin Rol67f68e62020-08-21 23:50:00 +020085static void dma_stm32_irq_handler(const struct device *dev, uint32_t id)
Song Qiang749d2d22019-10-24 19:06:19 +080086{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +020087 const struct dma_stm32_config *config = dev->config;
Song Qiang749d2d22019-10-24 19:06:19 +080088 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
89 struct dma_stm32_stream *stream;
Erwin Rolc8ae0702020-08-23 09:20:17 +020090 uint32_t callback_arg;
Song Qiang749d2d22019-10-24 19:06:19 +080091
Erwin Rol67f68e62020-08-21 23:50:00 +020092 __ASSERT_NO_MSG(id < config->max_streams);
Song Qiang749d2d22019-10-24 19:06:19 +080093
Erwin Rol67f68e62020-08-21 23:50:00 +020094 stream = &config->streams[id];
Francois Ramu41df1c92020-04-23 11:38:06 +020095
Erwin Rolc8ae0702020-08-23 09:20:17 +020096#ifdef CONFIG_DMAMUX_STM32
97 callback_arg = stream->mux_channel;
98#else
Francois Ramu005968a2022-04-29 15:19:53 +020099 callback_arg = id + STM32_DMA_STREAM_OFFSET;
Erwin Rolc8ae0702020-08-23 09:20:17 +0200100#endif /* CONFIG_DMAMUX_STM32 */
Francois Ramu41df1c92020-04-23 11:38:06 +0200101 if (!IS_ENABLED(CONFIG_DMAMUX_STM32)) {
102 stream->busy = false;
103 }
Song Qiang749d2d22019-10-24 19:06:19 +0800104
Francois Ramu005968a2022-04-29 15:19:53 +0200105 /* the dma stream id is in range from STM32_DMA_STREAM_OFFSET..<dma-requests> */
Erwan Gouriou96c92ed2021-02-03 16:57:10 +0100106 if (stm32_dma_is_ht_irq_active(dma, id)) {
Erwan Gourioud43200e2020-12-18 10:49:23 +0100107 /* Let HAL DMA handle flags on its own */
108 if (!stream->hal_override) {
109 dma_stm32_clear_ht(dma, id);
110 }
111 stream->dma_callback(dev, stream->user_data, callback_arg, 0);
Erwan Gouriou96c92ed2021-02-03 16:57:10 +0100112 } else if (stm32_dma_is_tc_irq_active(dma, id)) {
Francois Ramu41df1c92020-04-23 11:38:06 +0200113#ifdef CONFIG_DMAMUX_STM32
114 stream->busy = false;
Erwin Rolc8ae0702020-08-23 09:20:17 +0200115#endif
Erwan Gourioud43200e2020-12-18 10:49:23 +0100116 /* Let HAL DMA handle flags on its own */
117 if (!stream->hal_override) {
118 dma_stm32_clear_tc(dma, id);
119 }
Erwin Rolc8ae0702020-08-23 09:20:17 +0200120 stream->dma_callback(dev, stream->user_data, callback_arg, 0);
Song Qiang749d2d22019-10-24 19:06:19 +0800121 } else if (stm32_dma_is_unexpected_irq_happened(dma, id)) {
122 LOG_ERR("Unexpected irq happened.");
Tomasz Bursztyka15807682020-07-29 09:02:03 +0200123 stream->dma_callback(dev, stream->user_data,
Erwin Rolc8ae0702020-08-23 09:20:17 +0200124 callback_arg, -EIO);
Song Qiang749d2d22019-10-24 19:06:19 +0800125 } else {
126 LOG_ERR("Transfer Error.");
127 dma_stm32_dump_stream_irq(dev, id);
128 dma_stm32_clear_stream_irq(dev, id);
Tomasz Bursztyka15807682020-07-29 09:02:03 +0200129 stream->dma_callback(dev, stream->user_data,
Erwin Rolc8ae0702020-08-23 09:20:17 +0200130 callback_arg, -EIO);
Song Qiang749d2d22019-10-24 19:06:19 +0800131 }
132}
133
Erwin Rol67f68e62020-08-21 23:50:00 +0200134#ifdef CONFIG_DMA_STM32_SHARED_IRQS
135
Thomas Stranger5a475d72021-03-17 16:05:42 +0100136#define HANDLE_IRQS(index) \
137 static const struct device *dev_##index = DEVICE_DT_INST_GET(index); \
138 const struct dma_stm32_config *cfg_##index = dev_##index->config; \
139 DMA_TypeDef *dma_##index = (DMA_TypeDef *)(cfg_##index->base); \
140 \
141 for (id = 0; id < cfg_##index->max_streams; ++id) { \
142 if (stm32_dma_is_irq_active(dma_##index, id)) { \
143 dma_stm32_irq_handler(dev_##index, id); \
144 } \
145 }
146
Erwin Rol67f68e62020-08-21 23:50:00 +0200147static void dma_stm32_shared_irq_handler(const struct device *dev)
148{
Thomas Stranger5a475d72021-03-17 16:05:42 +0100149 ARG_UNUSED(dev);
Erwin Rol67f68e62020-08-21 23:50:00 +0200150 uint32_t id = 0;
151
Thomas Stranger5a475d72021-03-17 16:05:42 +0100152 DT_INST_FOREACH_STATUS_OKAY(HANDLE_IRQS)
Erwin Rol67f68e62020-08-21 23:50:00 +0200153}
154
155#endif /* CONFIG_DMA_STM32_SHARED_IRQS */
156
Kumar Galaa1b77fd2020-05-27 11:26:57 -0500157static int dma_stm32_get_priority(uint8_t priority, uint32_t *ll_priority)
Song Qiang749d2d22019-10-24 19:06:19 +0800158{
159 switch (priority) {
160 case 0x0:
161 *ll_priority = LL_DMA_PRIORITY_LOW;
162 break;
163 case 0x1:
164 *ll_priority = LL_DMA_PRIORITY_MEDIUM;
165 break;
166 case 0x2:
167 *ll_priority = LL_DMA_PRIORITY_HIGH;
168 break;
169 case 0x3:
170 *ll_priority = LL_DMA_PRIORITY_VERYHIGH;
171 break;
172 default:
173 LOG_ERR("Priority error. %d", priority);
174 return -EINVAL;
175 }
176
177 return 0;
178}
179
180static int dma_stm32_get_direction(enum dma_channel_direction direction,
Kumar Galaa1b77fd2020-05-27 11:26:57 -0500181 uint32_t *ll_direction)
Song Qiang749d2d22019-10-24 19:06:19 +0800182{
183 switch (direction) {
184 case MEMORY_TO_MEMORY:
185 *ll_direction = LL_DMA_DIRECTION_MEMORY_TO_MEMORY;
186 break;
187 case MEMORY_TO_PERIPHERAL:
188 *ll_direction = LL_DMA_DIRECTION_MEMORY_TO_PERIPH;
189 break;
190 case PERIPHERAL_TO_MEMORY:
191 *ll_direction = LL_DMA_DIRECTION_PERIPH_TO_MEMORY;
192 break;
193 default:
194 LOG_ERR("Direction error. %d", direction);
195 return -EINVAL;
196 }
197
198 return 0;
199}
200
201static int dma_stm32_get_memory_increment(enum dma_addr_adj increment,
Kumar Galaa1b77fd2020-05-27 11:26:57 -0500202 uint32_t *ll_increment)
Song Qiang749d2d22019-10-24 19:06:19 +0800203{
204 switch (increment) {
205 case DMA_ADDR_ADJ_INCREMENT:
206 *ll_increment = LL_DMA_MEMORY_INCREMENT;
207 break;
208 case DMA_ADDR_ADJ_NO_CHANGE:
209 *ll_increment = LL_DMA_MEMORY_NOINCREMENT;
210 break;
211 case DMA_ADDR_ADJ_DECREMENT:
212 return -ENOTSUP;
213 default:
214 LOG_ERR("Memory increment error. %d", increment);
215 return -EINVAL;
216 }
217
218 return 0;
219}
220
221static int dma_stm32_get_periph_increment(enum dma_addr_adj increment,
Kumar Galaa1b77fd2020-05-27 11:26:57 -0500222 uint32_t *ll_increment)
Song Qiang749d2d22019-10-24 19:06:19 +0800223{
224 switch (increment) {
225 case DMA_ADDR_ADJ_INCREMENT:
226 *ll_increment = LL_DMA_PERIPH_INCREMENT;
227 break;
228 case DMA_ADDR_ADJ_NO_CHANGE:
229 *ll_increment = LL_DMA_PERIPH_NOINCREMENT;
230 break;
231 case DMA_ADDR_ADJ_DECREMENT:
232 return -ENOTSUP;
233 default:
234 LOG_ERR("Periph increment error. %d", increment);
235 return -EINVAL;
236 }
237
238 return 0;
239}
240
Erwin Rol7b8b2192020-08-28 14:04:19 +0200241static int dma_stm32_disable_stream(DMA_TypeDef *dma, uint32_t id)
242{
243 int count = 0;
244
245 for (;;) {
246 if (stm32_dma_disable_stream(dma, id) == 0) {
247 return 0;
248 }
249 /* After trying for 5 seconds, give up */
250 if (count++ > (5 * 1000)) {
251 return -EBUSY;
252 }
253 k_sleep(K_MSEC(1));
254 }
255
256 return 0;
257}
258
Erwin Rol67f68e62020-08-21 23:50:00 +0200259DMA_STM32_EXPORT_API int dma_stm32_configure(const struct device *dev,
260 uint32_t id,
Erwin Rolc8ae0702020-08-23 09:20:17 +0200261 struct dma_config *config)
Song Qiang749d2d22019-10-24 19:06:19 +0800262{
Erwin Rol67f68e62020-08-21 23:50:00 +0200263 const struct dma_stm32_config *dev_config = dev->config;
264 struct dma_stm32_stream *stream =
Francois Ramu005968a2022-04-29 15:19:53 +0200265 &dev_config->streams[id - STM32_DMA_STREAM_OFFSET];
Song Qiang749d2d22019-10-24 19:06:19 +0800266 DMA_TypeDef *dma = (DMA_TypeDef *)dev_config->base;
Abel Radenace8fa6b32021-03-29 18:14:29 +0200267 LL_DMA_InitTypeDef DMA_InitStruct;
Song Qiang749d2d22019-10-24 19:06:19 +0800268 int ret;
269
Abel Radenace8fa6b32021-03-29 18:14:29 +0200270 LL_DMA_StructInit(&DMA_InitStruct);
271
Erwan Gourioude4ba272020-04-13 21:38:58 +0200272 /* give channel from index 0 */
Francois Ramu005968a2022-04-29 15:19:53 +0200273 id = id - STM32_DMA_STREAM_OFFSET;
Erwan Gourioude4ba272020-04-13 21:38:58 +0200274
Erwin Rol67f68e62020-08-21 23:50:00 +0200275 if (id >= dev_config->max_streams) {
Erwan Gourioude4ba272020-04-13 21:38:58 +0200276 LOG_ERR("cannot configure the dma stream %d.", id);
Song Qiang749d2d22019-10-24 19:06:19 +0800277 return -EINVAL;
278 }
279
280 if (stream->busy) {
Erwan Gourioude4ba272020-04-13 21:38:58 +0200281 LOG_ERR("dma stream %d is busy.", id);
Song Qiang749d2d22019-10-24 19:06:19 +0800282 return -EBUSY;
283 }
284
Erwin Rol7b8b2192020-08-28 14:04:19 +0200285 if (dma_stm32_disable_stream(dma, id) != 0) {
286 LOG_ERR("could not disable dma stream %d.", id);
287 return -EBUSY;
288 }
289
Song Qiang749d2d22019-10-24 19:06:19 +0800290 dma_stm32_clear_stream_irq(dev, id);
291
Francois Ramu41efe862022-04-28 14:45:15 +0200292 /* Check potential DMA override (if id parameters and stream are valid) */
293 if (config->linked_channel == STM32_DMA_HAL_OVERRIDE) {
294 /* DMA channel is overridden by HAL DMA
295 * Retain that the channel is busy and proceed to the minimal
296 * configuration to properly route the IRQ
297 */
298 stream->busy = true;
299 stream->hal_override = true;
300 stream->dma_callback = config->dma_callback;
301 stream->user_data = config->user_data;
302 return 0;
303 }
304
Song Qiang749d2d22019-10-24 19:06:19 +0800305 if (config->head_block->block_size > DMA_STM32_MAX_DATA_ITEMS) {
306 LOG_ERR("Data size too big: %d\n",
307 config->head_block->block_size);
308 return -EINVAL;
309 }
310
Francois Ramu82799d22020-01-24 12:04:01 +0100311#ifdef CONFIG_DMA_STM32_V1
Abe Kohandel8c7f63c2020-04-07 23:37:05 -0700312 if ((config->channel_direction == MEMORY_TO_MEMORY) &&
Song Qiang749d2d22019-10-24 19:06:19 +0800313 (!dev_config->support_m2m)) {
314 LOG_ERR("Memcopy not supported for device %s",
Tomasz Bursztyka97326c02020-03-09 12:49:07 +0100315 dev->name);
Song Qiang749d2d22019-10-24 19:06:19 +0800316 return -ENOTSUP;
317 }
Francois Ramu82799d22020-01-24 12:04:01 +0100318#endif /* CONFIG_DMA_STM32_V1 */
Song Qiang749d2d22019-10-24 19:06:19 +0800319
Francois Ramue7f222a2020-11-18 11:10:52 +0100320 /* support only the same data width for source and dest */
321 if ((config->dest_data_size != config->source_data_size)) {
322 LOG_ERR("source and dest data size differ.");
Song Qiang749d2d22019-10-24 19:06:19 +0800323 return -EINVAL;
324 }
325
Francois Ramue7f222a2020-11-18 11:10:52 +0100326 if (config->source_data_size != 4U &&
327 config->source_data_size != 2U &&
328 config->source_data_size != 1U) {
329 LOG_ERR("source and dest unit size error, %d",
330 config->source_data_size);
Song Qiang749d2d22019-10-24 19:06:19 +0800331 return -EINVAL;
332 }
333
334 /*
335 * STM32's circular mode will auto reset both source address
336 * counter and destination address counter.
337 */
338 if (config->head_block->source_reload_en !=
339 config->head_block->dest_reload_en) {
340 LOG_ERR("source_reload_en and dest_reload_en must "
341 "be the same.");
342 return -EINVAL;
343 }
344
345 stream->busy = true;
346 stream->dma_callback = config->dma_callback;
347 stream->direction = config->channel_direction;
Tomasz Bursztyka15807682020-07-29 09:02:03 +0200348 stream->user_data = config->user_data;
Song Qiang749d2d22019-10-24 19:06:19 +0800349 stream->src_size = config->source_data_size;
350 stream->dst_size = config->dest_data_size;
351
Francois Ramua9f277a2020-03-12 15:43:46 +0100352 /* check dest or source memory address, warn if 0 */
353 if ((config->head_block->source_address == 0)) {
354 LOG_WRN("source_buffer address is null.");
355 }
356
357 if ((config->head_block->dest_address == 0)) {
358 LOG_WRN("dest_buffer address is null.");
359 }
360
Song Qiang749d2d22019-10-24 19:06:19 +0800361 if (stream->direction == MEMORY_TO_PERIPHERAL) {
362 DMA_InitStruct.MemoryOrM2MDstAddress =
363 config->head_block->source_address;
364 DMA_InitStruct.PeriphOrM2MSrcAddress =
365 config->head_block->dest_address;
366 } else {
367 DMA_InitStruct.PeriphOrM2MSrcAddress =
368 config->head_block->source_address;
369 DMA_InitStruct.MemoryOrM2MDstAddress =
370 config->head_block->dest_address;
371 }
372
Kumar Galaa1b77fd2020-05-27 11:26:57 -0500373 uint16_t memory_addr_adj = 0, periph_addr_adj = 0;
Song Qiang749d2d22019-10-24 19:06:19 +0800374
375 ret = dma_stm32_get_priority(config->channel_priority,
376 &DMA_InitStruct.Priority);
377 if (ret < 0) {
378 return ret;
379 }
380
381 ret = dma_stm32_get_direction(config->channel_direction,
382 &DMA_InitStruct.Direction);
383 if (ret < 0) {
384 return ret;
385 }
386
387 switch (config->channel_direction) {
388 case MEMORY_TO_MEMORY:
389 case PERIPHERAL_TO_MEMORY:
390 memory_addr_adj = config->head_block->dest_addr_adj;
391 periph_addr_adj = config->head_block->source_addr_adj;
392 break;
393 case MEMORY_TO_PERIPHERAL:
394 memory_addr_adj = config->head_block->source_addr_adj;
395 periph_addr_adj = config->head_block->dest_addr_adj;
396 break;
397 /* Direction has been asserted in dma_stm32_get_direction. */
Francois Ramue135bba2020-03-31 14:55:49 +0200398 default:
399 LOG_ERR("Channel direction error (%d).",
400 config->channel_direction);
401 return -EINVAL;
Song Qiang749d2d22019-10-24 19:06:19 +0800402 }
403
404 ret = dma_stm32_get_memory_increment(memory_addr_adj,
405 &DMA_InitStruct.MemoryOrM2MDstIncMode);
406 if (ret < 0) {
407 return ret;
408 }
409 ret = dma_stm32_get_periph_increment(periph_addr_adj,
410 &DMA_InitStruct.PeriphOrM2MSrcIncMode);
411 if (ret < 0) {
412 return ret;
413 }
414
415 if (config->head_block->source_reload_en) {
416 DMA_InitStruct.Mode = LL_DMA_MODE_CIRCULAR;
417 } else {
418 DMA_InitStruct.Mode = LL_DMA_MODE_NORMAL;
419 }
420
Francois Ramu325edf22021-07-19 12:22:19 +0200421 stream->source_periph = (stream->direction == PERIPHERAL_TO_MEMORY);
Francois Ramue7f222a2020-11-18 11:10:52 +0100422
423 /* set the data width, when source_data_size equals dest_data_size */
424 int index = find_lsb_set(config->source_data_size) - 1;
425 DMA_InitStruct.PeriphOrM2MSrcDataSize = table_p_size[index];
426 index = find_lsb_set(config->dest_data_size) - 1;
427 DMA_InitStruct.MemoryOrM2MDstDataSize = table_m_size[index];
Song Qiang749d2d22019-10-24 19:06:19 +0800428
429#if defined(CONFIG_DMA_STM32_V1)
430 DMA_InitStruct.MemBurst = stm32_dma_get_mburst(config,
431 stream->source_periph);
432 DMA_InitStruct.PeriphBurst = stm32_dma_get_pburst(config,
433 stream->source_periph);
434
Shlomi Vaknin95143fc2021-03-30 18:22:30 +0300435#if !defined(CONFIG_SOC_SERIES_STM32H7X)
Song Qiang749d2d22019-10-24 19:06:19 +0800436 if (config->channel_direction != MEMORY_TO_MEMORY) {
437 if (config->dma_slot >= 8) {
438 LOG_ERR("dma slot error.");
439 return -EINVAL;
440 }
441 } else {
442 if (config->dma_slot >= 8) {
443 LOG_ERR("dma slot is too big, using 0 as default.");
444 config->dma_slot = 0;
445 }
446 }
Shlomi Vaknin95143fc2021-03-30 18:22:30 +0300447
Erwin Rolb05dc442020-08-22 14:28:52 +0200448 DMA_InitStruct.Channel = dma_stm32_slot_to_channel(config->dma_slot);
Shlomi Vaknin95143fc2021-03-30 18:22:30 +0300449#endif
Song Qiang749d2d22019-10-24 19:06:19 +0800450
Song Qiang65c1d762019-12-04 21:01:40 +0800451 DMA_InitStruct.FIFOThreshold = stm32_dma_get_fifo_threshold(
452 config->head_block->fifo_mode_control);
Song Qiang749d2d22019-10-24 19:06:19 +0800453
454 if (stm32_dma_check_fifo_mburst(&DMA_InitStruct)) {
455 DMA_InitStruct.FIFOMode = LL_DMA_FIFOMODE_ENABLE;
456 } else {
457 DMA_InitStruct.FIFOMode = LL_DMA_FIFOMODE_DISABLE;
458 }
459#endif
460 if (stream->source_periph) {
461 DMA_InitStruct.NbData = config->head_block->block_size /
462 config->source_data_size;
463 } else {
464 DMA_InitStruct.NbData = config->head_block->block_size /
465 config->dest_data_size;
466 }
Francois Ramu41df1c92020-04-23 11:38:06 +0200467
Francois Ramu6312b732021-06-21 10:55:29 +0200468#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32_dma_v2) || DT_HAS_COMPAT_STATUS_OKAY(st_stm32_dmamux)
Francois Ramu7ed83de2020-03-10 12:52:34 +0100469 /*
Francois Ramu41df1c92020-04-23 11:38:06 +0200470 * the with dma V2 and dma mux,
Francois Ramu7ed83de2020-03-10 12:52:34 +0100471 * the request ID is stored in the dma_slot
472 */
473 DMA_InitStruct.PeriphRequest = config->dma_slot;
474#endif
Erwin Rolb05dc442020-08-22 14:28:52 +0200475 LL_DMA_Init(dma, dma_stm32_id_to_stream(id), &DMA_InitStruct);
Song Qiang749d2d22019-10-24 19:06:19 +0800476
Erwin Rolb05dc442020-08-22 14:28:52 +0200477 LL_DMA_EnableIT_TC(dma, dma_stm32_id_to_stream(id));
Shlomi Vaknine9efa8a2021-03-29 19:23:13 +0300478
479 /* Enable Half-Transfer irq if circular mode is enabled */
480 if (config->head_block->source_reload_en) {
481 LL_DMA_EnableIT_HT(dma, dma_stm32_id_to_stream(id));
482 }
Song Qiang749d2d22019-10-24 19:06:19 +0800483
484#if defined(CONFIG_DMA_STM32_V1)
485 if (DMA_InitStruct.FIFOMode == LL_DMA_FIFOMODE_ENABLE) {
Erwin Rolb05dc442020-08-22 14:28:52 +0200486 LL_DMA_EnableFifoMode(dma, dma_stm32_id_to_stream(id));
487 LL_DMA_EnableIT_FE(dma, dma_stm32_id_to_stream(id));
Song Qiang749d2d22019-10-24 19:06:19 +0800488 } else {
Erwin Rolb05dc442020-08-22 14:28:52 +0200489 LL_DMA_DisableFifoMode(dma, dma_stm32_id_to_stream(id));
490 LL_DMA_DisableIT_FE(dma, dma_stm32_id_to_stream(id));
Song Qiang749d2d22019-10-24 19:06:19 +0800491 }
492#endif
493 return ret;
494}
495
Erwin Rolc8ae0702020-08-23 09:20:17 +0200496DMA_STM32_EXPORT_API int dma_stm32_reload(const struct device *dev, uint32_t id,
497 uint32_t src, uint32_t dst,
498 size_t size)
Song Qiang749d2d22019-10-24 19:06:19 +0800499{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +0200500 const struct dma_stm32_config *config = dev->config;
Song Qiang749d2d22019-10-24 19:06:19 +0800501 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
Erwin Rol2da881a2020-08-28 13:15:48 +0200502 struct dma_stm32_stream *stream;
Erwan Gourioude4ba272020-04-13 21:38:58 +0200503
504 /* give channel from index 0 */
Francois Ramu005968a2022-04-29 15:19:53 +0200505 id = id - STM32_DMA_STREAM_OFFSET;
Song Qiang749d2d22019-10-24 19:06:19 +0800506
Erwin Rol67f68e62020-08-21 23:50:00 +0200507 if (id >= config->max_streams) {
Song Qiang749d2d22019-10-24 19:06:19 +0800508 return -EINVAL;
509 }
510
Erwin Rol2da881a2020-08-28 13:15:48 +0200511 stream = &config->streams[id];
Francois Ramubea0a952020-04-23 10:53:30 +0200512
Erwin Rol7b8b2192020-08-28 14:04:19 +0200513 if (dma_stm32_disable_stream(dma, id) != 0) {
514 return -EBUSY;
515 }
516
Song Qiang749d2d22019-10-24 19:06:19 +0800517 switch (stream->direction) {
518 case MEMORY_TO_PERIPHERAL:
Erwin Rolb05dc442020-08-22 14:28:52 +0200519 LL_DMA_SetMemoryAddress(dma, dma_stm32_id_to_stream(id), src);
520 LL_DMA_SetPeriphAddress(dma, dma_stm32_id_to_stream(id), dst);
Song Qiang749d2d22019-10-24 19:06:19 +0800521 break;
522 case MEMORY_TO_MEMORY:
523 case PERIPHERAL_TO_MEMORY:
Erwin Rolb05dc442020-08-22 14:28:52 +0200524 LL_DMA_SetPeriphAddress(dma, dma_stm32_id_to_stream(id), src);
525 LL_DMA_SetMemoryAddress(dma, dma_stm32_id_to_stream(id), dst);
Song Qiang749d2d22019-10-24 19:06:19 +0800526 break;
527 default:
528 return -EINVAL;
529 }
530
531 if (stream->source_periph) {
Erwin Rolb05dc442020-08-22 14:28:52 +0200532 LL_DMA_SetDataLength(dma, dma_stm32_id_to_stream(id),
Song Qiang749d2d22019-10-24 19:06:19 +0800533 size / stream->src_size);
534 } else {
Erwin Rolb05dc442020-08-22 14:28:52 +0200535 LL_DMA_SetDataLength(dma, dma_stm32_id_to_stream(id),
Song Qiang749d2d22019-10-24 19:06:19 +0800536 size / stream->dst_size);
537 }
Francois Ramubea0a952020-04-23 10:53:30 +0200538
539 stm32_dma_enable_stream(dma, id);
540
Song Qiang749d2d22019-10-24 19:06:19 +0800541 return 0;
542}
543
Erwin Rolc8ae0702020-08-23 09:20:17 +0200544DMA_STM32_EXPORT_API int dma_stm32_start(const struct device *dev, uint32_t id)
Song Qiang749d2d22019-10-24 19:06:19 +0800545{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +0200546 const struct dma_stm32_config *config = dev->config;
Song Qiang749d2d22019-10-24 19:06:19 +0800547 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
Song Qiang749d2d22019-10-24 19:06:19 +0800548
Erwan Gourioude4ba272020-04-13 21:38:58 +0200549 /* give channel from index 0 */
Francois Ramu005968a2022-04-29 15:19:53 +0200550 id = id - STM32_DMA_STREAM_OFFSET;
Erwan Gourioude4ba272020-04-13 21:38:58 +0200551
Song Qiang749d2d22019-10-24 19:06:19 +0800552 /* Only M2P or M2M mode can be started manually. */
Erwin Rol67f68e62020-08-21 23:50:00 +0200553 if (id >= config->max_streams) {
Song Qiang749d2d22019-10-24 19:06:19 +0800554 return -EINVAL;
555 }
556
557 dma_stm32_clear_stream_irq(dev, id);
558
559 stm32_dma_enable_stream(dma, id);
560
561 return 0;
562}
563
Erwin Rolc8ae0702020-08-23 09:20:17 +0200564DMA_STM32_EXPORT_API int dma_stm32_stop(const struct device *dev, uint32_t id)
Song Qiang749d2d22019-10-24 19:06:19 +0800565{
Erwin Rol67f68e62020-08-21 23:50:00 +0200566 const struct dma_stm32_config *config = dev->config;
Francois Ramu005968a2022-04-29 15:19:53 +0200567 struct dma_stm32_stream *stream = &config->streams[id - STM32_DMA_STREAM_OFFSET];
Song Qiang749d2d22019-10-24 19:06:19 +0800568 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
569
Erwan Gourioude4ba272020-04-13 21:38:58 +0200570 /* give channel from index 0 */
Francois Ramu005968a2022-04-29 15:19:53 +0200571 id = id - STM32_DMA_STREAM_OFFSET;
Erwan Gourioude4ba272020-04-13 21:38:58 +0200572
Erwin Rol67f68e62020-08-21 23:50:00 +0200573 if (id >= config->max_streams) {
Song Qiang749d2d22019-10-24 19:06:19 +0800574 return -EINVAL;
575 }
576
Shlomi Vaknin95143fc2021-03-30 18:22:30 +0300577#if !defined(CONFIG_DMAMUX_STM32) || defined(CONFIG_SOC_SERIES_STM32H7X)
Erwin Rolb05dc442020-08-22 14:28:52 +0200578 LL_DMA_DisableIT_TC(dma, dma_stm32_id_to_stream(id));
Francois Ramu41df1c92020-04-23 11:38:06 +0200579#endif /* CONFIG_DMAMUX_STM32 */
580
Song Qiang749d2d22019-10-24 19:06:19 +0800581#if defined(CONFIG_DMA_STM32_V1)
582 stm32_dma_disable_fifo_irq(dma, id);
583#endif
584 dma_stm32_disable_stream(dma, id);
585 dma_stm32_clear_stream_irq(dev, id);
586
587 /* Finally, flag stream as free */
588 stream->busy = false;
589
590 return 0;
591}
592
Tomasz Bursztykae18fcbb2020-04-30 20:33:38 +0200593static int dma_stm32_init(const struct device *dev)
Song Qiang749d2d22019-10-24 19:06:19 +0800594{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +0200595 const struct dma_stm32_config *config = dev->config;
Kumar Galab275fec2021-02-11 11:49:24 -0600596 const struct device *clk = DEVICE_DT_GET(STM32_CLOCK_CONTROL_NODE);
Song Qiang749d2d22019-10-24 19:06:19 +0800597
598 if (clock_control_on(clk,
599 (clock_control_subsys_t *) &config->pclken) != 0) {
600 LOG_ERR("clock op failed\n");
601 return -EIO;
602 }
603
604 config->config_irq(dev);
605
Erwin Rol67f68e62020-08-21 23:50:00 +0200606 for (uint32_t i = 0; i < config->max_streams; i++) {
607 config->streams[i].busy = false;
Francois Ramu41df1c92020-04-23 11:38:06 +0200608#ifdef CONFIG_DMAMUX_STM32
609 /* each further stream->mux_channel is fixed here */
Francois Ramu72086322020-10-15 08:50:23 +0200610 config->streams[i].mux_channel = i + config->offset;
Francois Ramu41df1c92020-04-23 11:38:06 +0200611#endif /* CONFIG_DMAMUX_STM32 */
Song Qiang749d2d22019-10-24 19:06:19 +0800612 }
613
Francois Ramue90c47f2021-08-04 16:58:04 +0200614 ((struct dma_stm32_data *)dev->data)->dma_ctx.magic = 0;
615 ((struct dma_stm32_data *)dev->data)->dma_ctx.dma_channels = 0;
616 ((struct dma_stm32_data *)dev->data)->dma_ctx.atomic = 0;
617
Song Qiang749d2d22019-10-24 19:06:19 +0800618 return 0;
619}
620
Shlomi Vaknincad96852020-12-16 16:44:53 +0200621DMA_STM32_EXPORT_API int dma_stm32_get_status(const struct device *dev,
622 uint32_t id, struct dma_status *stat)
Jun Li486dab02020-05-20 09:33:29 -0700623{
Tomasz Bursztykaaf6140c2020-05-28 20:44:16 +0200624 const struct dma_stm32_config *config = dev->config;
Jun Li486dab02020-05-20 09:33:29 -0700625 DMA_TypeDef *dma = (DMA_TypeDef *)(config->base);
Jun Li486dab02020-05-20 09:33:29 -0700626 struct dma_stm32_stream *stream;
627
628 /* give channel from index 0 */
Francois Ramu005968a2022-04-29 15:19:53 +0200629 id = id - STM32_DMA_STREAM_OFFSET;
Erwin Rol67f68e62020-08-21 23:50:00 +0200630 if (id >= config->max_streams) {
Jun Li486dab02020-05-20 09:33:29 -0700631 return -EINVAL;
632 }
633
Erwin Rol67f68e62020-08-21 23:50:00 +0200634 stream = &config->streams[id];
Erwin Rolb05dc442020-08-22 14:28:52 +0200635 stat->pending_length = LL_DMA_GetDataLength(dma, dma_stm32_id_to_stream(id));
Jun Li486dab02020-05-20 09:33:29 -0700636 stat->dir = stream->direction;
637 stat->busy = stream->busy;
638
639 return 0;
640}
641
Song Qiang749d2d22019-10-24 19:06:19 +0800642static const struct dma_driver_api dma_funcs = {
643 .reload = dma_stm32_reload,
644 .config = dma_stm32_configure,
645 .start = dma_stm32_start,
646 .stop = dma_stm32_stop,
Jun Li486dab02020-05-20 09:33:29 -0700647 .get_status = dma_stm32_get_status,
Song Qiang749d2d22019-10-24 19:06:19 +0800648};
649
Francois Ramu72086322020-10-15 08:50:23 +0200650#ifdef CONFIG_DMAMUX_STM32
651#define DMA_STM32_OFFSET_INIT(index) \
652 .offset = DT_INST_PROP(index, dma_offset),
653#else
654#define DMA_STM32_OFFSET_INIT(index)
655#endif /* CONFIG_DMAMUX_STM32 */
656
Erwan Gourioua214f412021-01-07 20:37:04 +0100657#ifdef CONFIG_DMA_STM32_V1
658#define DMA_STM32_MEM2MEM_INIT(index) \
659 .support_m2m = DT_INST_PROP(index, st_mem2mem),
660#else
661#define DMA_STM32_MEM2MEM_INIT(index)
662#endif /* CONFIG_DMA_STM32_V1 */ \
663
Erwin Rol67f68e62020-08-21 23:50:00 +0200664#define DMA_STM32_INIT_DEV(index) \
665static struct dma_stm32_stream \
666 dma_stm32_streams_##index[DMA_STM32_##index##_STREAM_COUNT]; \
Song Qiang749d2d22019-10-24 19:06:19 +0800667 \
668const struct dma_stm32_config dma_stm32_config_##index = { \
Erwin Rolc8ae0702020-08-23 09:20:17 +0200669 .pclken = { .bus = DT_INST_CLOCKS_CELL(index, bus), \
670 .enr = DT_INST_CLOCKS_CELL(index, bits) }, \
Song Qiang749d2d22019-10-24 19:06:19 +0800671 .config_irq = dma_stm32_config_irq_##index, \
Erwin Rolc8ae0702020-08-23 09:20:17 +0200672 .base = DT_INST_REG_ADDR(index), \
Erwan Gourioua214f412021-01-07 20:37:04 +0100673 DMA_STM32_MEM2MEM_INIT(index) \
Erwin Rol67f68e62020-08-21 23:50:00 +0200674 .max_streams = DMA_STM32_##index##_STREAM_COUNT, \
675 .streams = dma_stm32_streams_##index, \
Francois Ramu72086322020-10-15 08:50:23 +0200676 DMA_STM32_OFFSET_INIT(index) \
Song Qiang749d2d22019-10-24 19:06:19 +0800677}; \
678 \
679static struct dma_stm32_data dma_stm32_data_##index = { \
Song Qiang749d2d22019-10-24 19:06:19 +0800680}; \
681 \
Kumar Galac5582352020-12-17 11:53:29 -0600682DEVICE_DT_INST_DEFINE(index, \
Song Qiang749d2d22019-10-24 19:06:19 +0800683 &dma_stm32_init, \
Gerard Marull-Paretas861eac32021-04-28 10:35:17 +0200684 NULL, \
Song Qiang749d2d22019-10-24 19:06:19 +0800685 &dma_stm32_data_##index, &dma_stm32_config_##index, \
Maureen Helmb5396992021-10-28 13:49:26 -0500686 PRE_KERNEL_1, CONFIG_DMA_INIT_PRIORITY, \
Song Qiang749d2d22019-10-24 19:06:19 +0800687 &dma_funcs)
688
Erwin Rol67f68e62020-08-21 23:50:00 +0200689#ifdef CONFIG_DMA_STM32_SHARED_IRQS
690
691#define DMA_STM32_DEFINE_IRQ_HANDLER(dma, chan) /* nothing */
692
693#define DMA_STM32_IRQ_CONNECT(dma, chan) \
694 do { \
695 IRQ_CONNECT(DT_INST_IRQ_BY_IDX(dma, chan, irq), \
696 DT_INST_IRQ_BY_IDX(dma, chan, priority), \
697 dma_stm32_shared_irq_handler, \
Kumar Galac5582352020-12-17 11:53:29 -0600698 DEVICE_DT_INST_GET(dma), 0); \
Erwin Rol67f68e62020-08-21 23:50:00 +0200699 irq_enable(DT_INST_IRQ_BY_IDX(dma, chan, irq)); \
700 } while (0)
701
702
703#else /* CONFIG_DMA_STM32_SHARED_IRQS */
704
705#define DMA_STM32_DEFINE_IRQ_HANDLER(dma, chan) \
706static void dma_stm32_irq_##dma##_##chan(const struct device *dev) \
Song Qiang749d2d22019-10-24 19:06:19 +0800707{ \
Erwin Rol67f68e62020-08-21 23:50:00 +0200708 dma_stm32_irq_handler(dev, chan); \
Song Qiang749d2d22019-10-24 19:06:19 +0800709}
710
Erwin Rol67f68e62020-08-21 23:50:00 +0200711
712#define DMA_STM32_IRQ_CONNECT(dma, chan) \
713 do { \
714 IRQ_CONNECT(DT_INST_IRQ_BY_IDX(dma, chan, irq), \
Tomasz Bursztyka5c9dd0d2020-08-03 13:34:08 +0200715 DT_INST_IRQ_BY_IDX(dma, chan, priority), \
Erwin Rol67f68e62020-08-21 23:50:00 +0200716 dma_stm32_irq_##dma##_##chan, \
Kumar Galac5582352020-12-17 11:53:29 -0600717 DEVICE_DT_INST_GET(dma), 0); \
Erwin Rol67f68e62020-08-21 23:50:00 +0200718 irq_enable(DT_INST_IRQ_BY_IDX(dma, chan, irq)); \
719 } while (0)
720
721#endif /* CONFIG_DMA_STM32_SHARED_IRQS */
722
Song Qiang749d2d22019-10-24 19:06:19 +0800723
Martí Bolívar6e8775f2020-05-11 11:56:08 -0700724#if DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay)
Song Qiang749d2d22019-10-24 19:06:19 +0800725
Erwin Rol67f68e62020-08-21 23:50:00 +0200726DMA_STM32_DEFINE_IRQ_HANDLER(0, 0);
727DMA_STM32_DEFINE_IRQ_HANDLER(0, 1);
728DMA_STM32_DEFINE_IRQ_HANDLER(0, 2);
729DMA_STM32_DEFINE_IRQ_HANDLER(0, 3);
730DMA_STM32_DEFINE_IRQ_HANDLER(0, 4);
Kumar Gala989484b2020-03-24 14:28:48 -0500731#if DT_INST_IRQ_HAS_IDX(0, 5)
Erwin Rol67f68e62020-08-21 23:50:00 +0200732DMA_STM32_DEFINE_IRQ_HANDLER(0, 5);
Kumar Gala989484b2020-03-24 14:28:48 -0500733#if DT_INST_IRQ_HAS_IDX(0, 6)
Erwin Rol67f68e62020-08-21 23:50:00 +0200734DMA_STM32_DEFINE_IRQ_HANDLER(0, 6);
Kumar Gala989484b2020-03-24 14:28:48 -0500735#if DT_INST_IRQ_HAS_IDX(0, 7)
Erwin Rol67f68e62020-08-21 23:50:00 +0200736DMA_STM32_DEFINE_IRQ_HANDLER(0, 7);
Kumar Gala989484b2020-03-24 14:28:48 -0500737#endif /* DT_INST_IRQ_HAS_IDX(0, 5) */
738#endif /* DT_INST_IRQ_HAS_IDX(0, 6) */
739#endif /* DT_INST_IRQ_HAS_IDX(0, 7) */
Erwin Rol67f68e62020-08-21 23:50:00 +0200740
741static void dma_stm32_config_irq_0(const struct device *dev)
742{
743 ARG_UNUSED(dev);
744
745 DMA_STM32_IRQ_CONNECT(0, 0);
746 DMA_STM32_IRQ_CONNECT(0, 1);
747#ifndef CONFIG_DMA_STM32_SHARED_IRQS
748 DMA_STM32_IRQ_CONNECT(0, 2);
749#endif /* CONFIG_DMA_STM32_SHARED_IRQS */
750 DMA_STM32_IRQ_CONNECT(0, 3);
751#ifndef CONFIG_DMA_STM32_SHARED_IRQS
752 DMA_STM32_IRQ_CONNECT(0, 4);
753#if DT_INST_IRQ_HAS_IDX(0, 5)
754 DMA_STM32_IRQ_CONNECT(0, 5);
755#if DT_INST_IRQ_HAS_IDX(0, 6)
756 DMA_STM32_IRQ_CONNECT(0, 6);
757#if DT_INST_IRQ_HAS_IDX(0, 7)
758 DMA_STM32_IRQ_CONNECT(0, 7);
759#endif /* DT_INST_IRQ_HAS_IDX(0, 5) */
760#endif /* DT_INST_IRQ_HAS_IDX(0, 6) */
761#endif /* DT_INST_IRQ_HAS_IDX(0, 7) */
762#endif /* CONFIG_DMA_STM32_SHARED_IRQS */
Francois Ramu53dec092020-01-24 15:04:06 +0100763/* Either 5 or 6 or 7 or 8 channels for DMA across all stm32 series. */
Song Qiang749d2d22019-10-24 19:06:19 +0800764}
Erwin Rol67f68e62020-08-21 23:50:00 +0200765
766DMA_STM32_INIT_DEV(0);
767
Martí Bolívar6e8775f2020-05-11 11:56:08 -0700768#endif /* DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay) */
Francois Ramu53dec092020-01-24 15:04:06 +0100769
Song Qiang749d2d22019-10-24 19:06:19 +0800770
Martí Bolívar6e8775f2020-05-11 11:56:08 -0700771#if DT_NODE_HAS_STATUS(DT_DRV_INST(1), okay)
Song Qiang749d2d22019-10-24 19:06:19 +0800772
Erwin Rol67f68e62020-08-21 23:50:00 +0200773DMA_STM32_DEFINE_IRQ_HANDLER(1, 0);
774DMA_STM32_DEFINE_IRQ_HANDLER(1, 1);
775DMA_STM32_DEFINE_IRQ_HANDLER(1, 2);
776DMA_STM32_DEFINE_IRQ_HANDLER(1, 3);
Josh Hansenae4f68c2021-10-14 09:46:33 -0400777#if DT_INST_IRQ_HAS_IDX(1, 4)
Erwin Rol67f68e62020-08-21 23:50:00 +0200778DMA_STM32_DEFINE_IRQ_HANDLER(1, 4);
Kumar Gala989484b2020-03-24 14:28:48 -0500779#if DT_INST_IRQ_HAS_IDX(1, 5)
Erwin Rol67f68e62020-08-21 23:50:00 +0200780DMA_STM32_DEFINE_IRQ_HANDLER(1, 5);
Kumar Gala989484b2020-03-24 14:28:48 -0500781#if DT_INST_IRQ_HAS_IDX(1, 6)
Erwin Rol67f68e62020-08-21 23:50:00 +0200782DMA_STM32_DEFINE_IRQ_HANDLER(1, 6);
Kumar Gala989484b2020-03-24 14:28:48 -0500783#if DT_INST_IRQ_HAS_IDX(1, 7)
Erwin Rol67f68e62020-08-21 23:50:00 +0200784DMA_STM32_DEFINE_IRQ_HANDLER(1, 7);
Josh Hansenae4f68c2021-10-14 09:46:33 -0400785#endif /* DT_INST_IRQ_HAS_IDX(1, 4) */
Kumar Gala989484b2020-03-24 14:28:48 -0500786#endif /* DT_INST_IRQ_HAS_IDX(1, 5) */
787#endif /* DT_INST_IRQ_HAS_IDX(1, 6) */
788#endif /* DT_INST_IRQ_HAS_IDX(1, 7) */
Erwin Rol67f68e62020-08-21 23:50:00 +0200789
790static void dma_stm32_config_irq_1(const struct device *dev)
791{
792 ARG_UNUSED(dev);
793
Thomas Stranger5a475d72021-03-17 16:05:42 +0100794#ifndef CONFIG_DMA_STM32_SHARED_IRQS
Erwin Rol67f68e62020-08-21 23:50:00 +0200795 DMA_STM32_IRQ_CONNECT(1, 0);
796 DMA_STM32_IRQ_CONNECT(1, 1);
Erwin Rol67f68e62020-08-21 23:50:00 +0200797 DMA_STM32_IRQ_CONNECT(1, 2);
Erwin Rol67f68e62020-08-21 23:50:00 +0200798 DMA_STM32_IRQ_CONNECT(1, 3);
Josh Hansenae4f68c2021-10-14 09:46:33 -0400799#if DT_INST_IRQ_HAS_IDX(1, 4)
Erwin Rol67f68e62020-08-21 23:50:00 +0200800 DMA_STM32_IRQ_CONNECT(1, 4);
801#if DT_INST_IRQ_HAS_IDX(1, 5)
802 DMA_STM32_IRQ_CONNECT(1, 5);
803#if DT_INST_IRQ_HAS_IDX(1, 6)
804 DMA_STM32_IRQ_CONNECT(1, 6);
805#if DT_INST_IRQ_HAS_IDX(1, 7)
806 DMA_STM32_IRQ_CONNECT(1, 7);
Josh Hansenae4f68c2021-10-14 09:46:33 -0400807#endif /* DT_INST_IRQ_HAS_IDX(1, 4) */
Erwin Rol67f68e62020-08-21 23:50:00 +0200808#endif /* DT_INST_IRQ_HAS_IDX(1, 5) */
809#endif /* DT_INST_IRQ_HAS_IDX(1, 6) */
810#endif /* DT_INST_IRQ_HAS_IDX(1, 7) */
811#endif /* CONFIG_DMA_STM32_SHARED_IRQS */
Thomas Stranger5a475d72021-03-17 16:05:42 +0100812/*
813 * Either 5 or 6 or 7 or 8 channels for DMA across all stm32 series.
814 * STM32F0 and STM32G0: if dma2 exits, the channel interrupts overlap with dma1
815 */
Song Qiang749d2d22019-10-24 19:06:19 +0800816}
Erwin Rol67f68e62020-08-21 23:50:00 +0200817
818DMA_STM32_INIT_DEV(1);
819
Martí Bolívar6e8775f2020-05-11 11:56:08 -0700820#endif /* DT_NODE_HAS_STATUS(DT_DRV_INST(1), okay) */