|  | /* | 
|  | * Copyright (c) 2016 Linaro Limited. | 
|  | * Copyright (c) 2019 Song Qiang <songqiang1304521@gmail.com> | 
|  | * Copyright (c) 2022 STMicroelectronics | 
|  | * | 
|  | * SPDX-License-Identifier: Apache-2.0 | 
|  | */ | 
|  |  | 
|  | /** | 
|  | * @brief Common part of DMA drivers for stm32U5. | 
|  | * @note  Functions named with stm32_dma_* are SoCs related functions | 
|  | * | 
|  | */ | 
|  |  | 
|  | #include "dma_stm32.h" | 
|  |  | 
|  | #include <zephyr/init.h> | 
|  | #include <zephyr/drivers/clock_control.h> | 
|  | #include <zephyr/drivers/dma/dma_stm32.h> | 
|  |  | 
|  | #include <zephyr/logging/log.h> | 
|  | LOG_MODULE_REGISTER(dma_stm32, CONFIG_DMA_LOG_LEVEL); | 
|  |  | 
|  | #define DT_DRV_COMPAT st_stm32u5_dma | 
|  |  | 
|  | /* STM32U5 soc has only one GPDMA instance of 15 channels */ | 
|  | #if DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay) | 
|  | #define DMA_STM32_0_STREAM_COUNT 16 | 
|  | #endif /* DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay) */ | 
|  |  | 
|  | static uint32_t table_m_size[] = { | 
|  | LL_DMA_SRC_DATAWIDTH_BYTE, | 
|  | LL_DMA_SRC_DATAWIDTH_HALFWORD, | 
|  | LL_DMA_SRC_DATAWIDTH_WORD, | 
|  | }; | 
|  |  | 
|  | static uint32_t table_p_size[] = { | 
|  | LL_DMA_DEST_DATAWIDTH_BYTE, | 
|  | LL_DMA_DEST_DATAWIDTH_HALFWORD, | 
|  | LL_DMA_DEST_DATAWIDTH_WORD, | 
|  | }; | 
|  |  | 
|  | static uint32_t table_priority[4] = { | 
|  | LL_DMA_LOW_PRIORITY_LOW_WEIGHT, | 
|  | LL_DMA_LOW_PRIORITY_MID_WEIGHT, | 
|  | LL_DMA_LOW_PRIORITY_HIGH_WEIGHT, | 
|  | LL_DMA_HIGH_PRIORITY, | 
|  | }; | 
|  |  | 
|  | static void dma_stm32_dump_stream_irq(const struct device *dev, uint32_t id) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  |  | 
|  | stm32_dma_dump_stream_irq(dma, id); | 
|  | } | 
|  |  | 
|  | static void dma_stm32_clear_stream_irq(const struct device *dev, uint32_t id) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  |  | 
|  | dma_stm32_clear_tc(dma, id); | 
|  | dma_stm32_clear_ht(dma, id); | 
|  | stm32_dma_clear_stream_irq(dma, id); | 
|  | } | 
|  |  | 
|  |  | 
|  | uint32_t dma_stm32_id_to_stream(uint32_t id) | 
|  | { | 
|  | static const uint32_t stream_nr[] = { | 
|  | LL_DMA_CHANNEL_0, | 
|  | LL_DMA_CHANNEL_1, | 
|  | LL_DMA_CHANNEL_2, | 
|  | LL_DMA_CHANNEL_3, | 
|  | LL_DMA_CHANNEL_4, | 
|  | LL_DMA_CHANNEL_5, | 
|  | LL_DMA_CHANNEL_6, | 
|  | LL_DMA_CHANNEL_7, | 
|  | LL_DMA_CHANNEL_8, | 
|  | LL_DMA_CHANNEL_9, | 
|  | LL_DMA_CHANNEL_10, | 
|  | LL_DMA_CHANNEL_11, | 
|  | LL_DMA_CHANNEL_12, | 
|  | LL_DMA_CHANNEL_13, | 
|  | LL_DMA_CHANNEL_14, | 
|  | LL_DMA_CHANNEL_15, | 
|  | }; | 
|  |  | 
|  | __ASSERT_NO_MSG(id < ARRAY_SIZE(stream_nr)); | 
|  |  | 
|  | return stream_nr[id]; | 
|  | } | 
|  |  | 
|  | bool dma_stm32_is_tc_active(DMA_TypeDef *DMAx, uint32_t id) | 
|  | { | 
|  | return LL_DMA_IsActiveFlag_TC(DMAx, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | void dma_stm32_clear_tc(DMA_TypeDef *DMAx, uint32_t id) | 
|  | { | 
|  | LL_DMA_ClearFlag_TC(DMAx, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | /* transfer error either a data or user or link error */ | 
|  | bool dma_stm32_is_te_active(DMA_TypeDef *DMAx, uint32_t id) | 
|  | { | 
|  | return ( | 
|  | LL_DMA_IsActiveFlag_DTE(DMAx, dma_stm32_id_to_stream(id)) || | 
|  | LL_DMA_IsActiveFlag_ULE(DMAx, dma_stm32_id_to_stream(id)) || | 
|  | LL_DMA_IsActiveFlag_USE(DMAx, dma_stm32_id_to_stream(id)) | 
|  | ); | 
|  | } | 
|  | /* clear transfer error either a data or user or link error */ | 
|  | void dma_stm32_clear_te(DMA_TypeDef *DMAx, uint32_t id) | 
|  | { | 
|  | LL_DMA_ClearFlag_DTE(DMAx, dma_stm32_id_to_stream(id)); | 
|  | LL_DMA_ClearFlag_ULE(DMAx, dma_stm32_id_to_stream(id)); | 
|  | LL_DMA_ClearFlag_USE(DMAx, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | bool dma_stm32_is_ht_active(DMA_TypeDef *DMAx, uint32_t id) | 
|  | { | 
|  | return LL_DMA_IsActiveFlag_HT(DMAx, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | void dma_stm32_clear_ht(DMA_TypeDef *DMAx, uint32_t id) | 
|  | { | 
|  | LL_DMA_ClearFlag_HT(DMAx, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | void stm32_dma_dump_stream_irq(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | LOG_INF("tc: %d, ht: %d, te: %d", | 
|  | dma_stm32_is_tc_active(dma, id), | 
|  | dma_stm32_is_ht_active(dma, id), | 
|  | dma_stm32_is_te_active(dma, id)); | 
|  | } | 
|  |  | 
|  | /* Check if nsecure masked interrupt is active on channel */ | 
|  | bool stm32_dma_is_tc_irq_active(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | return (LL_DMA_IsEnabledIT_TC(dma, dma_stm32_id_to_stream(id)) && | 
|  | LL_DMA_IsActiveFlag_TC(dma, dma_stm32_id_to_stream(id))); | 
|  | } | 
|  |  | 
|  | bool stm32_dma_is_ht_irq_active(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | return (LL_DMA_IsEnabledIT_HT(dma, dma_stm32_id_to_stream(id)) && | 
|  | LL_DMA_IsActiveFlag_HT(dma, dma_stm32_id_to_stream(id))); | 
|  | } | 
|  |  | 
|  | static inline bool stm32_dma_is_te_irq_active(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | return ( | 
|  | (LL_DMA_IsEnabledIT_DTE(dma, dma_stm32_id_to_stream(id)) && | 
|  | LL_DMA_IsActiveFlag_DTE(dma, dma_stm32_id_to_stream(id))) || | 
|  | (LL_DMA_IsEnabledIT_ULE(dma, dma_stm32_id_to_stream(id)) && | 
|  | LL_DMA_IsActiveFlag_ULE(dma, dma_stm32_id_to_stream(id))) || | 
|  | (LL_DMA_IsEnabledIT_USE(dma, dma_stm32_id_to_stream(id)) && | 
|  | LL_DMA_IsActiveFlag_USE(dma, dma_stm32_id_to_stream(id))) | 
|  | ); | 
|  | } | 
|  |  | 
|  | /* check if and irq of any type occurred on the channel */ | 
|  | #define stm32_dma_is_irq_active LL_DMA_IsActiveFlag_MIS | 
|  |  | 
|  | void stm32_dma_clear_stream_irq(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | dma_stm32_clear_te(dma, id); | 
|  |  | 
|  | LL_DMA_ClearFlag_TO(dma, dma_stm32_id_to_stream(id)); | 
|  | LL_DMA_ClearFlag_SUSP(dma, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | bool stm32_dma_is_irq_happened(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | if (dma_stm32_is_te_active(dma, id)) { | 
|  | return true; | 
|  | } | 
|  |  | 
|  | return false; | 
|  | } | 
|  |  | 
|  | void stm32_dma_enable_stream(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | LL_DMA_EnableChannel(dma, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | int stm32_dma_disable_stream(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | /* GPDMA channel abort sequence */ | 
|  | LL_DMA_SuspendChannel(dma, dma_stm32_id_to_stream(id)); | 
|  |  | 
|  | /* reset the channel will disable it */ | 
|  | LL_DMA_ResetChannel(dma, dma_stm32_id_to_stream(id)); | 
|  |  | 
|  | if (!LL_DMA_IsEnabledChannel(dma, dma_stm32_id_to_stream(id))) { | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | return -EAGAIN; | 
|  | } | 
|  |  | 
|  | void stm32_dma_set_mem_periph_address(DMA_TypeDef *dma, | 
|  | uint32_t channel, | 
|  | uint32_t src_addr, | 
|  | uint32_t dest_addr) | 
|  | { | 
|  | LL_DMA_ConfigAddresses(dma, channel, src_addr, dest_addr); | 
|  | } | 
|  |  | 
|  | /* same function to set periph/mem addresses */ | 
|  | void stm32_dma_set_periph_mem_address(DMA_TypeDef *dma, | 
|  | uint32_t channel, | 
|  | uint32_t src_addr, | 
|  | uint32_t dest_addr) | 
|  | { | 
|  | LL_DMA_ConfigAddresses(dma, channel, src_addr, dest_addr); | 
|  | } | 
|  |  | 
|  | static void dma_stm32_irq_handler(const struct device *dev, uint32_t id) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  | struct dma_stm32_stream *stream; | 
|  | uint32_t callback_arg; | 
|  |  | 
|  | __ASSERT_NO_MSG(id < config->max_streams); | 
|  |  | 
|  | stream = &config->streams[id]; | 
|  | /* Exit if stream is no more busy */ | 
|  | if (stream->busy == false) { | 
|  | dma_stm32_clear_stream_irq(dev, id); | 
|  | return; | 
|  | } | 
|  | callback_arg = id + STM32_DMA_STREAM_OFFSET; | 
|  | stream->busy = false; | 
|  |  | 
|  | /* The dma stream id is in range from STM32_DMA_STREAM_OFFSET..<dma-requests> */ | 
|  | if (stm32_dma_is_ht_irq_active(dma, id)) { | 
|  | /* Let HAL DMA handle flags on its own */ | 
|  | if (!stream->hal_override) { | 
|  | dma_stm32_clear_ht(dma, id); | 
|  | } | 
|  | stream->dma_callback(dev, stream->user_data, callback_arg, 0); | 
|  | } else if (stm32_dma_is_tc_irq_active(dma, id)) { | 
|  | #ifdef CONFIG_DMAMUX_STM32 | 
|  | stream->busy = false; | 
|  | #endif | 
|  | /* Let HAL DMA handle flags on its own */ | 
|  | if (!stream->hal_override) { | 
|  | dma_stm32_clear_tc(dma, id); | 
|  | } | 
|  | stream->dma_callback(dev, stream->user_data, callback_arg, 0); | 
|  | } else { | 
|  | LOG_ERR("Transfer Error."); | 
|  | dma_stm32_dump_stream_irq(dev, id); | 
|  | dma_stm32_clear_stream_irq(dev, id); | 
|  | stream->dma_callback(dev, stream->user_data, | 
|  | callback_arg, -EIO); | 
|  | } | 
|  | } | 
|  |  | 
|  | static int dma_stm32_get_priority(uint8_t priority, uint32_t *ll_priority) | 
|  | { | 
|  | if (priority > ARRAY_SIZE(table_priority)) { | 
|  | LOG_ERR("Priority error. %d", priority); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | *ll_priority = table_priority[priority]; | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_get_direction(enum dma_channel_direction direction, | 
|  | uint32_t *ll_direction) | 
|  | { | 
|  | switch (direction) { | 
|  | case MEMORY_TO_MEMORY: | 
|  | *ll_direction = LL_DMA_DIRECTION_MEMORY_TO_MEMORY; | 
|  | break; | 
|  | case MEMORY_TO_PERIPHERAL: | 
|  | *ll_direction = LL_DMA_DIRECTION_MEMORY_TO_PERIPH; | 
|  | break; | 
|  | case PERIPHERAL_TO_MEMORY: | 
|  | *ll_direction = LL_DMA_DIRECTION_PERIPH_TO_MEMORY; | 
|  | break; | 
|  | default: | 
|  | LOG_ERR("Direction error. %d", direction); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_disable_stream(DMA_TypeDef *dma, uint32_t id) | 
|  | { | 
|  | int count = 0; | 
|  |  | 
|  | for (;;) { | 
|  | if (stm32_dma_disable_stream(dma, id) == 0) { | 
|  | return 0; | 
|  | } | 
|  | /* After trying for 5 seconds, give up */ | 
|  | if (count++ > (5 * 1000)) { | 
|  | return -EBUSY; | 
|  | } | 
|  | k_sleep(K_MSEC(1)); | 
|  | } | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_configure(const struct device *dev, | 
|  | uint32_t id, | 
|  | struct dma_config *config) | 
|  | { | 
|  | const struct dma_stm32_config *dev_config = dev->config; | 
|  | struct dma_stm32_stream *stream = | 
|  | &dev_config->streams[id - STM32_DMA_STREAM_OFFSET]; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)dev_config->base; | 
|  | LL_DMA_InitTypeDef DMA_InitStruct; | 
|  | int ret; | 
|  |  | 
|  | LL_DMA_StructInit(&DMA_InitStruct); | 
|  |  | 
|  | /* Give channel from index 0 */ | 
|  | id = id - STM32_DMA_STREAM_OFFSET; | 
|  |  | 
|  | if (id >= dev_config->max_streams) { | 
|  | LOG_ERR("cannot configure the dma stream %d.", id); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | if (stream->busy) { | 
|  | LOG_ERR("dma stream %d is busy.", id); | 
|  | return -EBUSY; | 
|  | } | 
|  |  | 
|  | if (dma_stm32_disable_stream(dma, id) != 0) { | 
|  | LOG_ERR("could not disable dma stream %d.", id); | 
|  | return -EBUSY; | 
|  | } | 
|  |  | 
|  | dma_stm32_clear_stream_irq(dev, id); | 
|  |  | 
|  | /* Check potential DMA override (if id parameters and stream are valid) */ | 
|  | if (config->linked_channel == STM32_DMA_HAL_OVERRIDE) { | 
|  | /* DMA channel is overridden by HAL DMA | 
|  | * Retain that the channel is busy and proceed to the minimal | 
|  | * configuration to properly route the IRQ | 
|  | */ | 
|  | stream->busy = true; | 
|  | stream->hal_override = true; | 
|  | stream->dma_callback = config->dma_callback; | 
|  | stream->user_data = config->user_data; | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | if (config->head_block->block_size > DMA_STM32_MAX_DATA_ITEMS) { | 
|  | LOG_ERR("Data size too big: %d\n", | 
|  | config->head_block->block_size); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | /* Support only the same data width for source and dest */ | 
|  | if (config->dest_data_size != config->source_data_size) { | 
|  | LOG_ERR("source and dest data size differ."); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | if (config->source_data_size != 4U && | 
|  | config->source_data_size != 2U && | 
|  | config->source_data_size != 1U) { | 
|  | LOG_ERR("source and dest unit size error, %d", | 
|  | config->source_data_size); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | /* | 
|  | * STM32's circular mode will auto reset both source address | 
|  | * counter and destination address counter. | 
|  | */ | 
|  | if (config->head_block->source_reload_en != | 
|  | config->head_block->dest_reload_en) { | 
|  | LOG_ERR("source_reload_en and dest_reload_en must " | 
|  | "be the same."); | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | stream->busy		= true; | 
|  | stream->dma_callback	= config->dma_callback; | 
|  | stream->direction	= config->channel_direction; | 
|  | stream->user_data       = config->user_data; | 
|  | stream->src_size	= config->source_data_size; | 
|  | stream->dst_size	= config->dest_data_size; | 
|  |  | 
|  | /* Check dest or source memory address, warn if 0 */ | 
|  | if (config->head_block->source_address == 0) { | 
|  | LOG_WRN("source_buffer address is null."); | 
|  | } | 
|  |  | 
|  | if (config->head_block->dest_address == 0) { | 
|  | LOG_WRN("dest_buffer address is null."); | 
|  | } | 
|  |  | 
|  | DMA_InitStruct.SrcAddress = config->head_block->source_address; | 
|  | DMA_InitStruct.DestAddress = config->head_block->dest_address; | 
|  | DMA_InitStruct.BlkHWRequest = LL_DMA_HWREQUEST_SINGLEBURST; | 
|  | DMA_InitStruct.DataAlignment = LL_DMA_DATA_ALIGN_ZEROPADD; | 
|  |  | 
|  | ret = dma_stm32_get_priority(config->channel_priority, | 
|  | &DMA_InitStruct.Priority); | 
|  | if (ret < 0) { | 
|  | return ret; | 
|  | } | 
|  |  | 
|  | ret = dma_stm32_get_direction(config->channel_direction, | 
|  | &DMA_InitStruct.Direction); | 
|  | if (ret < 0) { | 
|  | return ret; | 
|  | } | 
|  |  | 
|  | /* This part is for source */ | 
|  | switch (config->head_block->source_addr_adj) { | 
|  | case DMA_ADDR_ADJ_INCREMENT: | 
|  | DMA_InitStruct.SrcIncMode = LL_DMA_SRC_INCREMENT; | 
|  | break; | 
|  | case DMA_ADDR_ADJ_NO_CHANGE: | 
|  | DMA_InitStruct.SrcIncMode = LL_DMA_SRC_FIXED; | 
|  | break; | 
|  | case DMA_ADDR_ADJ_DECREMENT: | 
|  | return -ENOTSUP; | 
|  | default: | 
|  | LOG_ERR("Memory increment error. %d", | 
|  | config->head_block->source_addr_adj); | 
|  | return -EINVAL; | 
|  | } | 
|  | LOG_DBG("Channel (%d) src inc (%x).", | 
|  | id, DMA_InitStruct.SrcIncMode); | 
|  |  | 
|  | /* This part is for dest */ | 
|  | switch (config->head_block->dest_addr_adj) { | 
|  | case DMA_ADDR_ADJ_INCREMENT: | 
|  | DMA_InitStruct.DestIncMode = LL_DMA_DEST_INCREMENT; | 
|  | break; | 
|  | case DMA_ADDR_ADJ_NO_CHANGE: | 
|  | DMA_InitStruct.DestIncMode = LL_DMA_DEST_FIXED; | 
|  | break; | 
|  | case DMA_ADDR_ADJ_DECREMENT: | 
|  | return -ENOTSUP; | 
|  | default: | 
|  | LOG_ERR("Periph increment error. %d", | 
|  | config->head_block->dest_addr_adj); | 
|  | return -EINVAL; | 
|  | } | 
|  | LOG_DBG("Channel (%d) dest inc (%x).", | 
|  | id, DMA_InitStruct.DestIncMode); | 
|  |  | 
|  | stream->source_periph = (stream->direction == PERIPHERAL_TO_MEMORY); | 
|  |  | 
|  | /* Set the data width, when source_data_size equals dest_data_size */ | 
|  | int index = find_lsb_set(config->source_data_size) - 1; | 
|  |  | 
|  | DMA_InitStruct.SrcDataWidth = table_p_size[index]; | 
|  |  | 
|  | index = find_lsb_set(config->dest_data_size) - 1; | 
|  | DMA_InitStruct.DestDataWidth = table_m_size[index]; | 
|  |  | 
|  | DMA_InitStruct.SrcBurstLength = 1; | 
|  | DMA_InitStruct.DestBurstLength = 1; | 
|  | DMA_InitStruct.TransferEventMode = LL_DMA_TCEM_BLK_TRANSFER; | 
|  | DMA_InitStruct.SrcAllocatedPort = LL_DMA_SRC_ALLOCATED_PORT0; | 
|  | DMA_InitStruct.DestAllocatedPort = LL_DMA_DEST_ALLOCATED_PORT0; | 
|  | DMA_InitStruct.LinkAllocatedPort = LL_DMA_LINK_ALLOCATED_PORT1; | 
|  | DMA_InitStruct.LinkStepMode = LL_DMA_LSM_FULL_EXECUTION; | 
|  | DMA_InitStruct.TriggerMode = LL_DMA_TRIGM_BLK_TRANSFER; | 
|  | DMA_InitStruct.TriggerPolarity = LL_DMA_TRIG_POLARITY_MASKED; | 
|  | DMA_InitStruct.TriggerSelection = 0; | 
|  |  | 
|  | if (stream->source_periph) { | 
|  | DMA_InitStruct.BlkDataLength = config->head_block->block_size / | 
|  | config->source_data_size; | 
|  | } else { | 
|  | DMA_InitStruct.BlkDataLength = config->head_block->block_size / | 
|  | config->dest_data_size; | 
|  | } | 
|  |  | 
|  | /* The request ID is stored in the dma_slot */ | 
|  | DMA_InitStruct.Request = config->dma_slot; | 
|  |  | 
|  | LL_DMA_Init(dma, dma_stm32_id_to_stream(id), &DMA_InitStruct); | 
|  |  | 
|  | LL_DMA_EnableIT_TC(dma, dma_stm32_id_to_stream(id)); | 
|  |  | 
|  | /* Enable Half-Transfer irq if circular mode is enabled */ | 
|  | if (config->head_block->source_reload_en) { | 
|  | LL_DMA_EnableIT_HT(dma, dma_stm32_id_to_stream(id)); | 
|  | } | 
|  |  | 
|  | return ret; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_reload(const struct device *dev, uint32_t id, | 
|  | uint32_t src, uint32_t dst, | 
|  | size_t size) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  | struct dma_stm32_stream *stream; | 
|  |  | 
|  | /* Give channel from index 0 */ | 
|  | id = id - STM32_DMA_STREAM_OFFSET; | 
|  |  | 
|  | if (id >= config->max_streams) { | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | stream = &config->streams[id]; | 
|  |  | 
|  | if (dma_stm32_disable_stream(dma, id) != 0) { | 
|  | return -EBUSY; | 
|  | } | 
|  |  | 
|  | if (stream->direction > PERIPHERAL_TO_MEMORY) { | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | LL_DMA_ConfigAddresses(dma, | 
|  | dma_stm32_id_to_stream(id), | 
|  | src, dst); | 
|  |  | 
|  | if (stream->source_periph) { | 
|  | LL_DMA_SetBlkDataLength(dma, dma_stm32_id_to_stream(id), | 
|  | size / stream->src_size); | 
|  | } else { | 
|  | LL_DMA_SetBlkDataLength(dma, dma_stm32_id_to_stream(id), | 
|  | size / stream->dst_size); | 
|  | } | 
|  |  | 
|  | stm32_dma_enable_stream(dma, id); | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_start(const struct device *dev, uint32_t id) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  |  | 
|  | /* Give channel from index 0 */ | 
|  | id = id - STM32_DMA_STREAM_OFFSET; | 
|  |  | 
|  | /* Only M2P or M2M mode can be started manually. */ | 
|  | if (id >= config->max_streams) { | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | dma_stm32_clear_stream_irq(dev, id); | 
|  |  | 
|  | stm32_dma_enable_stream(dma, id); | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_stop(const struct device *dev, uint32_t id) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | struct dma_stm32_stream *stream = &config->streams[id - STM32_DMA_STREAM_OFFSET]; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  |  | 
|  | /* Give channel from index 0 */ | 
|  | id = id - STM32_DMA_STREAM_OFFSET; | 
|  |  | 
|  | if (id >= config->max_streams) { | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | dma_stm32_clear_stream_irq(dev, id); | 
|  | dma_stm32_disable_stream(dma, id); | 
|  |  | 
|  | LL_DMA_DisableIT_TC(dma, dma_stm32_id_to_stream(id)); | 
|  |  | 
|  | /* Finally, flag stream as free */ | 
|  | stream->busy = false; | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_init(const struct device *dev) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | const struct device *clk = DEVICE_DT_GET(STM32_CLOCK_CONTROL_NODE); | 
|  |  | 
|  | if (clock_control_on(clk, | 
|  | (clock_control_subsys_t *) &config->pclken) != 0) { | 
|  | LOG_ERR("clock op failed\n"); | 
|  | return -EIO; | 
|  | } | 
|  |  | 
|  | config->config_irq(dev); | 
|  |  | 
|  | for (uint32_t i = 0; i < config->max_streams; i++) { | 
|  | config->streams[i].busy = false; | 
|  | } | 
|  |  | 
|  | ((struct dma_stm32_data *)dev->data)->dma_ctx.magic = 0; | 
|  | ((struct dma_stm32_data *)dev->data)->dma_ctx.dma_channels = 0; | 
|  | ((struct dma_stm32_data *)dev->data)->dma_ctx.atomic = 0; | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static int dma_stm32_get_status(const struct device *dev, | 
|  | uint32_t id, struct dma_status *stat) | 
|  | { | 
|  | const struct dma_stm32_config *config = dev->config; | 
|  | DMA_TypeDef *dma = (DMA_TypeDef *)(config->base); | 
|  | struct dma_stm32_stream *stream; | 
|  |  | 
|  | /* Give channel from index 0 */ | 
|  | id = id - STM32_DMA_STREAM_OFFSET; | 
|  | if (id >= config->max_streams) { | 
|  | return -EINVAL; | 
|  | } | 
|  |  | 
|  | stream = &config->streams[id]; | 
|  | stat->pending_length = LL_DMA_GetBlkDataLength(dma, dma_stm32_id_to_stream(id)); | 
|  | stat->dir = stream->direction; | 
|  | stat->busy = stream->busy; | 
|  |  | 
|  | return 0; | 
|  | } | 
|  |  | 
|  | static const struct dma_driver_api dma_funcs = { | 
|  | .reload		 = dma_stm32_reload, | 
|  | .config		 = dma_stm32_configure, | 
|  | .start		 = dma_stm32_start, | 
|  | .stop		 = dma_stm32_stop, | 
|  | .get_status	 = dma_stm32_get_status, | 
|  | }; | 
|  |  | 
|  | #define DMA_STM32_OFFSET_INIT(index) | 
|  | #define DMA_STM32_MEM2MEM_INIT(index) | 
|  |  | 
|  | #define DMA_STM32_INIT_DEV(index)					\ | 
|  | static struct dma_stm32_stream						\ | 
|  | dma_stm32_streams_##index[DMA_STM32_##index##_STREAM_COUNT];	\ | 
|  | \ | 
|  | const struct dma_stm32_config dma_stm32_config_##index = {		\ | 
|  | .pclken = { .bus = DT_INST_CLOCKS_CELL(index, bus),		\ | 
|  | .enr = DT_INST_CLOCKS_CELL(index, bits) },		\ | 
|  | .config_irq = dma_stm32_config_irq_##index,			\ | 
|  | .base = DT_INST_REG_ADDR(index),				\ | 
|  | DMA_STM32_MEM2MEM_INIT(index)					\ | 
|  | .max_streams = DMA_STM32_##index##_STREAM_COUNT,		\ | 
|  | .streams = dma_stm32_streams_##index,				\ | 
|  | DMA_STM32_OFFSET_INIT(index)					\ | 
|  | };									\ | 
|  | \ | 
|  | static struct dma_stm32_data dma_stm32_data_##index = {			\ | 
|  | };									\ | 
|  | \ | 
|  | DEVICE_DT_INST_DEFINE(index,						\ | 
|  | &dma_stm32_init,					\ | 
|  | NULL,						\ | 
|  | &dma_stm32_data_##index, &dma_stm32_config_##index,	\ | 
|  | PRE_KERNEL_1, CONFIG_DMA_INIT_PRIORITY,		\ | 
|  | &dma_funcs) | 
|  |  | 
|  | #define DMA_STM32_DEFINE_IRQ_HANDLER(dma, chan)				\ | 
|  | static void dma_stm32_irq_##dma##_##chan(const struct device *dev)	\ | 
|  | {									\ | 
|  | dma_stm32_irq_handler(dev, chan);				\ | 
|  | } | 
|  |  | 
|  |  | 
|  | #define DMA_STM32_IRQ_CONNECT(dma, chan)				\ | 
|  | do {								\ | 
|  | IRQ_CONNECT(DT_INST_IRQ_BY_IDX(dma, chan, irq),		\ | 
|  | DT_INST_IRQ_BY_IDX(dma, chan, priority),	\ | 
|  | dma_stm32_irq_##dma##_##chan,		\ | 
|  | DEVICE_DT_INST_GET(dma), 0);		\ | 
|  | irq_enable(DT_INST_IRQ_BY_IDX(dma, chan, irq));		\ | 
|  | } while (0) | 
|  |  | 
|  | /* STM32U5 soc has only one GPDMA instance of 15 channels */ | 
|  | #if DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay) | 
|  |  | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 0); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 1); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 2); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 3); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 4); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 5); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 6); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 7); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 8); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 9); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 10); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 11); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 12); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 13); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 14); | 
|  | DMA_STM32_DEFINE_IRQ_HANDLER(0, 15); | 
|  |  | 
|  | static void dma_stm32_config_irq_0(const struct device *dev) | 
|  | { | 
|  | ARG_UNUSED(dev); | 
|  |  | 
|  | DMA_STM32_IRQ_CONNECT(0, 0); | 
|  | DMA_STM32_IRQ_CONNECT(0, 1); | 
|  | DMA_STM32_IRQ_CONNECT(0, 2); | 
|  | DMA_STM32_IRQ_CONNECT(0, 3); | 
|  | DMA_STM32_IRQ_CONNECT(0, 4); | 
|  | #if DT_INST_IRQ_HAS_IDX(0, 5) | 
|  | DMA_STM32_IRQ_CONNECT(0, 5); | 
|  | #if DT_INST_IRQ_HAS_IDX(0, 6) | 
|  | DMA_STM32_IRQ_CONNECT(0, 6); | 
|  | #if DT_INST_IRQ_HAS_IDX(0, 7) | 
|  | DMA_STM32_IRQ_CONNECT(0, 7); | 
|  | #endif /* DT_INST_IRQ_HAS_IDX(0, 5) */ | 
|  | #endif /* DT_INST_IRQ_HAS_IDX(0, 6) */ | 
|  | #endif /* DT_INST_IRQ_HAS_IDX(0, 7) */ | 
|  | /* Either 5 or 6 or 7 or 8 channels for DMA across all stm32 series. */ | 
|  | DMA_STM32_IRQ_CONNECT(0, 8); | 
|  | DMA_STM32_IRQ_CONNECT(0, 9); | 
|  | DMA_STM32_IRQ_CONNECT(0, 10); | 
|  | DMA_STM32_IRQ_CONNECT(0, 11); | 
|  | DMA_STM32_IRQ_CONNECT(0, 12); | 
|  | DMA_STM32_IRQ_CONNECT(0, 13); | 
|  | DMA_STM32_IRQ_CONNECT(0, 14); | 
|  | DMA_STM32_IRQ_CONNECT(0, 15); | 
|  | } | 
|  |  | 
|  | DMA_STM32_INIT_DEV(0); | 
|  |  | 
|  | #endif /* DT_NODE_HAS_STATUS(DT_DRV_INST(0), okay) */ |