blob: ab0e98a31ba41cfd8fda7233a7f2152ba75adfa1 [file] [log] [blame]
Neil Armstrong9033fb22016-09-16 15:24:19 +02001/*
2 * Copyright (c) 2016 BayLibre, SAS
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
Kumar Gala989484b2020-03-24 14:28:48 -05007#define DT_DRV_COMPAT st_stm32_spi
8
Anas Nashif88aa2ca2018-09-22 21:13:18 -05009#define LOG_LEVEL CONFIG_SPI_LOG_LEVEL
10#include <logging/log.h>
11LOG_MODULE_REGISTER(spi_ll_stm32);
Neil Armstrong9033fb22016-09-16 15:24:19 +020012
Anas Nashifa2fd7d72019-06-26 10:33:55 -040013#include <sys/util.h>
Neil Armstrong9033fb22016-09-16 15:24:19 +020014#include <kernel.h>
Kumar Galaaa2bdbe2018-10-31 12:44:45 -050015#include <soc.h>
Neil Armstrong9033fb22016-09-16 15:24:19 +020016#include <errno.h>
Anas Nashifbd70f6f2019-06-25 15:54:01 -040017#include <drivers/spi.h>
Vincent Veronaff88a22017-11-14 17:05:29 +010018#include <toolchain.h>
Francois Ramuce093dc2020-03-19 09:57:20 +010019#ifdef CONFIG_SPI_STM32_DMA
20#include <dt-bindings/dma/stm32_dma.h>
21#include <drivers/dma.h>
22#endif
Peter Bigot0b0d2e642020-01-25 05:34:53 -060023#include <drivers/clock_control/stm32_clock_control.h>
Anas Nashif17ddd172019-06-25 15:53:47 -040024#include <drivers/clock_control.h>
Neil Armstrong9033fb22016-09-16 15:24:19 +020025
Sebastian Bøeb7eaeb92017-10-27 16:11:54 +020026#include "spi_ll_stm32.h"
Neil Armstrong9033fb22016-09-16 15:24:19 +020027
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +010028#define DEV_CFG(dev) \
29(const struct spi_stm32_config * const)(dev->config->config_info)
Neil Armstrong9033fb22016-09-16 15:24:19 +020030
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +010031#define DEV_DATA(dev) \
32(struct spi_stm32_data * const)(dev->driver_data)
Neil Armstrong9033fb22016-09-16 15:24:19 +020033
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +020034/*
35 * Check for SPI_SR_FRE to determine support for TI mode frame format
36 * error flag, because STM32F1 SoCs do not support it and STM32CUBE
37 * for F1 family defines an unused LL_SPI_SR_FRE.
38 */
Yaël Boutreuxe0d65342019-08-02 07:53:07 +020039#ifdef CONFIG_SOC_SERIES_STM32MP1X
40#define SPI_STM32_ERR_MSK (LL_SPI_SR_UDR | LL_SPI_SR_CRCE | LL_SPI_SR_MODF | \
41 LL_SPI_SR_OVR | LL_SPI_SR_TIFRE)
42#else
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +020043#if defined(LL_SPI_SR_UDR)
Marti Bolivar69bc5eb2017-07-17 14:20:13 -040044#define SPI_STM32_ERR_MSK (LL_SPI_SR_UDR | LL_SPI_SR_CRCERR | LL_SPI_SR_MODF | \
45 LL_SPI_SR_OVR | LL_SPI_SR_FRE)
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +020046#elif defined(SPI_SR_FRE)
Marti Bolivar69bc5eb2017-07-17 14:20:13 -040047#define SPI_STM32_ERR_MSK (LL_SPI_SR_CRCERR | LL_SPI_SR_MODF | \
48 LL_SPI_SR_OVR | LL_SPI_SR_FRE)
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +020049#else
50#define SPI_STM32_ERR_MSK (LL_SPI_SR_CRCERR | LL_SPI_SR_MODF | LL_SPI_SR_OVR)
Marti Bolivar69bc5eb2017-07-17 14:20:13 -040051#endif
Yaël Boutreuxe0d65342019-08-02 07:53:07 +020052#endif /* CONFIG_SOC_SERIES_STM32MP1X */
Marti Bolivar69bc5eb2017-07-17 14:20:13 -040053
Francois Ramuce093dc2020-03-19 09:57:20 +010054
55#ifdef CONFIG_SPI_STM32_DMA
56/* dummy value used for transferring NOP when tx buf is null */
57u32_t nop_tx;
58
59/* This function is executed in the interrupt context */
60static void dma_callback(void *arg, u32_t channel, int status)
61{
Francois Ramu469e5052020-04-22 08:49:47 +020062 /* callback_arg directly holds the client data */
63 struct spi_stm32_data *data = arg;
64 u32_t periph_addr;
65
66 if (status != 0) {
67 LOG_ERR("DMA callback error with channel %d.", channel);
68 data->dma_tx.transfer_complete = true;
69 data->dma_rx.transfer_complete = true;
70 return;
71 }
72
73 /* identify the origin of this callback */
74 if (channel == data->dma_tx.channel) {
75 /* spi tx direction has mem as source and periph as dest */
76 if (data->ctx.tx_count <= 1) {
77 /* if it was the last count, then we are done */
78 data->dma_tx.transfer_complete = true;
79 } else {
80 /* this part of the transfer ends */
81 data->dma_tx.transfer_complete = false;
82 /*
83 * Update the current Tx buffer, decreasing length of
84 * data->ctx.tx_count, by its own length
85 */
86 spi_context_update_tx(&data->ctx, 1, data->ctx.tx_len);
87 /* keep the same dest (peripheral) */
88 periph_addr =
89 data->dma_tx.dma_cfg.head_block->dest_address;
90 /* and reload dma with a new source (memory) buffer */
91 dma_reload(data->dev_dma_tx,
92 data->dma_tx.channel,
93 (u32_t)data->ctx.tx_buf,
94 periph_addr,
95 data->ctx.tx_len);
96 }
97 } else if (channel == data->dma_rx.channel) {
98 /* spi rx direction has periph as source and mem as dest */
99 if (data->ctx.rx_count <= 1) {
100 /* if it was the last count, then we are done */
101 data->dma_rx.transfer_complete = true;
102 } else {
103 /* this part of the transfer ends */
104 data->dma_rx.transfer_complete = false;
105 /*
106 * Update the current Rx buffer, decreasing length of
107 * data->ctx.rx_count, by its own length
108 */
109 spi_context_update_rx(&data->ctx, 1, data->ctx.rx_len);
110 /* keep the same source (peripheral) */
111 periph_addr =
112 data->dma_rx.dma_cfg.head_block->dest_address;
113 /* and reload dma with a new dest (memory) buffer */
114 dma_reload(data->dev_dma_rx,
115 data->dma_rx.channel,
116 periph_addr,
117 (u32_t)data->ctx.rx_buf,
118 data->ctx.rx_len);
119 }
120 } else {
121 LOG_ERR("DMA callback channel %d is not valid.", channel);
122 data->dma_tx.transfer_complete = true;
123 data->dma_rx.transfer_complete = true;
124 return;
125 }
Francois Ramuce093dc2020-03-19 09:57:20 +0100126}
127
128static int spi_stm32_dma_tx_load(struct device *dev, const u8_t *buf,
129 size_t len)
130{
131 const struct spi_stm32_config *cfg = DEV_CFG(dev);
132 struct spi_stm32_data *data = DEV_DATA(dev);
133 struct dma_block_config blk_cfg;
134 int ret;
135
136 /* remember active TX DMA channel (used in callback) */
137 struct stream *stream = &data->dma_tx;
138
139 /* prepare the block for this TX DMA channel */
140 memset(&blk_cfg, 0, sizeof(blk_cfg));
141 blk_cfg.block_size = len;
142
143 /* tx direction has memory as source and periph as dest. */
144 if (buf == NULL) {
145 nop_tx = 0;
146 /* if tx buff is null, then sends NOP on the line. */
147 blk_cfg.source_address = (u32_t)&nop_tx;
148 blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
149 } else {
150 blk_cfg.source_address = (u32_t)buf;
151 if (data->dma_tx.src_addr_increment) {
152 blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT;
153 } else {
154 blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
155 }
156 }
157
158 blk_cfg.dest_address = (u32_t)LL_SPI_DMA_GetRegAddr(cfg->spi);
159 /* fifo mode NOT USED there */
160 if (data->dma_tx.dst_addr_increment) {
161 blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT;
162 } else {
163 blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
164 }
165
166 /* give the fifo mode from the DT */
167 blk_cfg.fifo_mode_control = data->dma_tx.fifo_threshold;
168
169 /* direction is given by the DT */
170 stream->dma_cfg.head_block = &blk_cfg;
171 /* give the client data as arg, as the callback comes from the dma */
172 stream->dma_cfg.callback_arg = data;
173 /* pass our client origin to the dma: data->dma_tx.dma_channel */
174 ret = dma_config(data->dev_dma_tx, data->dma_tx.channel,
175 &stream->dma_cfg);
176 /* the channel is the actual stream from 0 */
177 if (ret != 0) {
178 return ret;
179 }
180
181 /* starting this dma transfer */
182 data->dma_tx.transfer_complete = false;
183
184 /* gives the request ID to the dma mux */
185 return dma_start(data->dev_dma_tx, data->dma_tx.channel);
186}
187
188static int spi_stm32_dma_rx_load(struct device *dev, u8_t *buf, size_t len)
189{
190 const struct spi_stm32_config *cfg = DEV_CFG(dev);
191 struct spi_stm32_data *data = DEV_DATA(dev);
192 struct dma_block_config blk_cfg;
193 int ret;
194
195 /* retrieve active RX DMA channel (used in callback) */
196 struct stream *stream = &data->dma_rx;
197
198 /* prepare the block for this RX DMA channel */
199 memset(&blk_cfg, 0, sizeof(blk_cfg));
200 blk_cfg.block_size = len;
201
202 /* rx direction has periph as source and mem as dest. */
203 blk_cfg.dest_address = (buf != NULL) ? (u32_t)buf : (u32_t)NULL;
204 blk_cfg.source_address = (u32_t)LL_SPI_DMA_GetRegAddr(cfg->spi);
205 if (data->dma_rx.src_addr_increment) {
206 blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT;
207 } else {
208 blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
209 }
210 if (data->dma_rx.dst_addr_increment) {
211 blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT;
212 } else {
213 blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
214 }
215
216 /* give the fifo mode from the DT */
217 blk_cfg.fifo_mode_control = data->dma_rx.fifo_threshold;
218
219 /* direction is given by the DT */
220 stream->dma_cfg.head_block = &blk_cfg;
221 stream->dma_cfg.callback_arg = data;
222
223
224 /* pass our client origin to the dma: data->dma_rx.channel */
225 ret = dma_config(data->dev_dma_rx, data->dma_rx.channel,
226 &stream->dma_cfg);
227 /* the channel is the actual stream from 0 */
228 if (ret != 0) {
229 return ret;
230 }
231
232 /* starting this dma transfer */
233 data->dma_rx.transfer_complete = false;
234
235 /* gives the request ID to the dma mux */
236 return dma_start(data->dev_dma_rx, data->dma_rx.channel);
237}
238
239static int spi_dma_move_buffers(struct device *dev)
240{
241 struct spi_stm32_data *data = DEV_DATA(dev);
242 int ret;
243
244 /* the length to transmit depends on the source data size (1,2 4) */
245 data->dma_segment_len = data->ctx.tx_len
246 / data->dma_tx.dma_cfg.source_data_size;
247
248 /* Load receive first, so it can accept transmit data */
249 if (data->ctx.rx_len) {
250 ret = spi_stm32_dma_rx_load(dev, data->ctx.rx_buf,
251 data->dma_segment_len);
252 } else {
253 ret = spi_stm32_dma_rx_load(dev, NULL, data->dma_segment_len);
254 }
255
256 if (ret != 0) {
257 return ret;
258 }
259
260 if (data->ctx.tx_len) {
261 ret = spi_stm32_dma_tx_load(dev, data->ctx.tx_buf,
262 data->dma_segment_len);
263 } else {
264 ret = spi_stm32_dma_tx_load(dev, NULL, data->dma_segment_len);
265 }
266
267 return ret;
268}
269
270static bool spi_stm32_dma_transfer_ongoing(struct spi_stm32_data *data)
271{
272 return ((data->dma_tx.transfer_complete != true)
273 && (data->dma_rx.transfer_complete != true));
274}
275#endif /* CONFIG_SPI_STM32_DMA */
276
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400277/* Value to shift out when no application data needs transmitting. */
278#define SPI_STM32_TX_NOP 0x00
279
280static bool spi_stm32_transfer_ongoing(struct spi_stm32_data *data)
281{
282 return spi_context_tx_on(&data->ctx) || spi_context_rx_on(&data->ctx);
283}
284
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400285static int spi_stm32_get_err(SPI_TypeDef *spi)
286{
287 u32_t sr = LL_SPI_ReadReg(spi, SR);
288
Armando Viscontibb9fe422018-05-22 15:00:53 +0200289 if (sr & SPI_STM32_ERR_MSK) {
Anas Nashif88aa2ca2018-09-22 21:13:18 -0500290 LOG_ERR("%s: err=%d", __func__,
Erwan Gouriou85742e32018-10-05 16:35:26 +0200291 sr & (u32_t)SPI_STM32_ERR_MSK);
Armando Viscontic7053642018-05-31 11:15:45 +0200292
293 /* OVR error must be explicitly cleared */
294 if (LL_SPI_IsActiveFlag_OVR(spi)) {
295 LL_SPI_ClearFlag_OVR(spi);
296 }
297
Armando Viscontibb9fe422018-05-22 15:00:53 +0200298 return -EIO;
299 }
300
301 return 0;
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400302}
303
Erwan Gouriouca618472020-01-06 17:39:53 +0100304/* Shift a SPI frame as master. */
305static void spi_stm32_shift_m(SPI_TypeDef *spi, struct spi_stm32_data *data)
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400306{
Pushpal Sidhu8f0788e2019-12-06 09:22:39 -0800307 u16_t tx_frame = SPI_STM32_TX_NOP;
Vincent Veronaff88a22017-11-14 17:05:29 +0100308 u16_t rx_frame;
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400309
Yaël Boutreux876eb632019-08-02 10:02:52 +0200310 while (!ll_func_tx_is_empty(spi)) {
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400311 /* NOP */
312 }
Yaël Boutreuxe0d65342019-08-02 07:53:07 +0200313
314#ifdef CONFIG_SOC_SERIES_STM32MP1X
315 /* With the STM32MP1, if the device is the SPI master, we need to enable
316 * the start of the transfer with LL_SPI_StartMasterTransfer(spi)
317 */
318 if (LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) {
319 LL_SPI_StartMasterTransfer(spi);
320 while (!LL_SPI_IsActiveMasterTransfer(spi)) {
321 /* NOP */
322 }
323 }
324#endif
325
Erwan Gouriouca618472020-01-06 17:39:53 +0100326 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) {
David Db96525c2020-01-16 07:18:19 +0200327 if (spi_context_tx_buf_on(&data->ctx)) {
Erwan Gouriouca618472020-01-06 17:39:53 +0100328 tx_frame = UNALIGNED_GET((u8_t *)(data->ctx.tx_buf));
329 }
Pushpal Sidhu2ce8fa12019-12-06 09:30:14 -0800330 LL_SPI_TransmitData8(spi, tx_frame);
Erwan Gouriouca618472020-01-06 17:39:53 +0100331 /* The update is ignored if TX is off. */
332 spi_context_update_tx(&data->ctx, 1, 1);
Pushpal Sidhu2ce8fa12019-12-06 09:30:14 -0800333 } else {
David Db96525c2020-01-16 07:18:19 +0200334 if (spi_context_tx_buf_on(&data->ctx)) {
Erwan Gouriouca618472020-01-06 17:39:53 +0100335 tx_frame = UNALIGNED_GET((u16_t *)(data->ctx.tx_buf));
Pushpal Sidhu2ce8fa12019-12-06 09:30:14 -0800336 }
Erwan Gouriouca618472020-01-06 17:39:53 +0100337 LL_SPI_TransmitData16(spi, tx_frame);
338 /* The update is ignored if TX is off. */
Pushpal Sidhu2ce8fa12019-12-06 09:30:14 -0800339 spi_context_update_tx(&data->ctx, 2, 1);
340 }
341
Pushpal Sidhu2ce8fa12019-12-06 09:30:14 -0800342 while (!ll_func_rx_is_not_empty(spi)) {
343 /* NOP */
344 }
345
Erwan Gouriouca618472020-01-06 17:39:53 +0100346 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) {
347 rx_frame = LL_SPI_ReceiveData8(spi);
348 if (spi_context_rx_buf_on(&data->ctx)) {
349 UNALIGNED_PUT(rx_frame, (u8_t *)data->ctx.rx_buf);
350 }
351 spi_context_update_rx(&data->ctx, 1, 1);
352 } else {
353 rx_frame = LL_SPI_ReceiveData16(spi);
354 if (spi_context_rx_buf_on(&data->ctx)) {
355 UNALIGNED_PUT(rx_frame, (u16_t *)data->ctx.rx_buf);
356 }
Yannis Damigosa3474a22017-12-15 23:06:10 +0200357 spi_context_update_rx(&data->ctx, 2, 1);
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400358 }
359}
360
361/* Shift a SPI frame as slave. */
362static void spi_stm32_shift_s(SPI_TypeDef *spi, struct spi_stm32_data *data)
363{
Yaël Boutreux876eb632019-08-02 10:02:52 +0200364 if (ll_func_tx_is_empty(spi) && spi_context_tx_on(&data->ctx)) {
Pushpal Sidhu8f0788e2019-12-06 09:22:39 -0800365 u16_t tx_frame;
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400366
Vincent Veronaff88a22017-11-14 17:05:29 +0100367 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) {
Pushpal Sidhu8f0788e2019-12-06 09:22:39 -0800368 tx_frame = UNALIGNED_GET((u8_t *)(data->ctx.tx_buf));
Vincent Veronaff88a22017-11-14 17:05:29 +0100369 LL_SPI_TransmitData8(spi, tx_frame);
Vincent Veronaff88a22017-11-14 17:05:29 +0100370 spi_context_update_tx(&data->ctx, 1, 1);
371 } else {
Pushpal Sidhu8f0788e2019-12-06 09:22:39 -0800372 tx_frame = UNALIGNED_GET((u16_t *)(data->ctx.tx_buf));
Vincent Veronaff88a22017-11-14 17:05:29 +0100373 LL_SPI_TransmitData16(spi, tx_frame);
Vincent Veronaff88a22017-11-14 17:05:29 +0100374 spi_context_update_tx(&data->ctx, 2, 1);
375 }
Neil Armstrong7fb245f2017-11-29 16:29:11 +0100376 } else {
Yaël Boutreux876eb632019-08-02 10:02:52 +0200377 ll_func_disable_int_tx_empty(spi);
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400378 }
379
Yaël Boutreux876eb632019-08-02 10:02:52 +0200380 if (ll_func_rx_is_not_empty(spi) &&
Yaël Boutreuxebcd6502019-07-04 13:51:22 +0200381 spi_context_rx_buf_on(&data->ctx)) {
Neil Armstrong7fb245f2017-11-29 16:29:11 +0100382 u16_t rx_frame;
383
Vincent Veronaff88a22017-11-14 17:05:29 +0100384 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) {
385 rx_frame = LL_SPI_ReceiveData8(spi);
Neil Armstrong7fb245f2017-11-29 16:29:11 +0100386 UNALIGNED_PUT(rx_frame, (u8_t *)data->ctx.rx_buf);
Yannis Damigosa3474a22017-12-15 23:06:10 +0200387 spi_context_update_rx(&data->ctx, 1, 1);
Vincent Veronaff88a22017-11-14 17:05:29 +0100388 } else {
389 rx_frame = LL_SPI_ReceiveData16(spi);
Neil Armstrong7fb245f2017-11-29 16:29:11 +0100390 UNALIGNED_PUT(rx_frame, (u16_t *)data->ctx.rx_buf);
Yannis Damigosa3474a22017-12-15 23:06:10 +0200391 spi_context_update_rx(&data->ctx, 2, 1);
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400392 }
393 }
394}
395
396/*
397 * Without a FIFO, we can only shift out one frame's worth of SPI
398 * data, and read the response back.
399 *
400 * TODO: support 16-bit data frames.
401 */
402static int spi_stm32_shift_frames(SPI_TypeDef *spi, struct spi_stm32_data *data)
403{
404 u16_t operation = data->ctx.config->operation;
405
406 if (SPI_OP_MODE_GET(operation) == SPI_OP_MODE_MASTER) {
Erwan Gouriouca618472020-01-06 17:39:53 +0100407 spi_stm32_shift_m(spi, data);
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400408 } else {
409 spi_stm32_shift_s(spi, data);
410 }
411
412 return spi_stm32_get_err(spi);
413}
414
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400415static void spi_stm32_complete(struct spi_stm32_data *data, SPI_TypeDef *spi,
416 int status)
417{
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400418#ifdef CONFIG_SPI_STM32_INTERRUPT
Yaël Boutreux876eb632019-08-02 10:02:52 +0200419 ll_func_disable_int_tx_empty(spi);
420 ll_func_disable_int_rx_not_empty(spi);
421 ll_func_disable_int_errors(spi);
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400422#endif
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400423
424 spi_context_cs_control(&data->ctx, false);
425
Kumar Gala59997872020-04-20 09:50:00 -0500426#if DT_HAS_COMPAT(st_stm32_spi_fifo)
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400427 /* Flush RX buffer */
Yaël Boutreux876eb632019-08-02 10:02:52 +0200428 while (ll_func_rx_is_not_empty(spi)) {
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400429 (void) LL_SPI_ReceiveData8(spi);
430 }
431#endif
432
433 if (LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) {
Yaël Boutreux876eb632019-08-02 10:02:52 +0200434 while (ll_func_spi_is_busy(spi)) {
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400435 /* NOP */
436 }
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400437 }
Yaël Boutreux3a6b46e2019-07-31 13:48:32 +0200438 /* BSY flag is cleared when MODF flag is raised */
439 if (LL_SPI_IsActiveFlag_MODF(spi)) {
440 LL_SPI_ClearFlag_MODF(spi);
441 }
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400442
Yaël Boutreux876eb632019-08-02 10:02:52 +0200443 ll_func_disable_spi(spi);
Marti Bolivar78214d02017-07-17 14:56:54 -0400444
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400445#ifdef CONFIG_SPI_STM32_INTERRUPT
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400446 spi_context_complete(&data->ctx, status);
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400447#endif
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400448}
449
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400450#ifdef CONFIG_SPI_STM32_INTERRUPT
Neil Armstrong9033fb22016-09-16 15:24:19 +0200451static void spi_stm32_isr(void *arg)
452{
453 struct device * const dev = (struct device *) arg;
454 const struct spi_stm32_config *cfg = dev->config->config_info;
455 struct spi_stm32_data *data = dev->driver_data;
456 SPI_TypeDef *spi = cfg->spi;
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400457 int err;
458
459 err = spi_stm32_get_err(spi);
460 if (err) {
461 spi_stm32_complete(data, spi, err);
462 return;
463 }
Neil Armstrong9033fb22016-09-16 15:24:19 +0200464
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400465 if (spi_stm32_transfer_ongoing(data)) {
466 err = spi_stm32_shift_frames(spi, data);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200467 }
468
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400469 if (err || !spi_stm32_transfer_ongoing(data)) {
470 spi_stm32_complete(data, spi, err);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200471 }
472}
473#endif
474
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100475static int spi_stm32_configure(struct device *dev,
476 const struct spi_config *config)
Neil Armstrong9033fb22016-09-16 15:24:19 +0200477{
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100478 const struct spi_stm32_config *cfg = DEV_CFG(dev);
479 struct spi_stm32_data *data = DEV_DATA(dev);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200480 const u32_t scaler[] = {
481 LL_SPI_BAUDRATEPRESCALER_DIV2,
482 LL_SPI_BAUDRATEPRESCALER_DIV4,
483 LL_SPI_BAUDRATEPRESCALER_DIV8,
484 LL_SPI_BAUDRATEPRESCALER_DIV16,
485 LL_SPI_BAUDRATEPRESCALER_DIV32,
486 LL_SPI_BAUDRATEPRESCALER_DIV64,
487 LL_SPI_BAUDRATEPRESCALER_DIV128,
488 LL_SPI_BAUDRATEPRESCALER_DIV256
489 };
490 SPI_TypeDef *spi = cfg->spi;
491 u32_t clock;
492 int br;
493
494 if (spi_context_configured(&data->ctx, config)) {
495 /* Nothing to do */
496 return 0;
497 }
498
Vincent Veronaff88a22017-11-14 17:05:29 +0100499 if ((SPI_WORD_SIZE_GET(config->operation) != 8)
500 && (SPI_WORD_SIZE_GET(config->operation) != 16)) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200501 return -ENOTSUP;
502 }
503
Francois Ramua0bd4342019-11-13 09:02:13 +0100504 if (clock_control_get_rate(device_get_binding(STM32_CLOCK_CONTROL_NAME),
505 (clock_control_subsys_t) &cfg->pclken, &clock) < 0) {
506 LOG_ERR("Failed call clock_control_get_rate");
507 return -EIO;
508 }
Neil Armstrong9033fb22016-09-16 15:24:19 +0200509
510 for (br = 1 ; br <= ARRAY_SIZE(scaler) ; ++br) {
511 u32_t clk = clock >> br;
512
Marti Bolivar5cb95782017-09-27 14:10:40 -0700513 if (clk <= config->frequency) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200514 break;
515 }
516 }
517
518 if (br > ARRAY_SIZE(scaler)) {
Anas Nashif88aa2ca2018-09-22 21:13:18 -0500519 LOG_ERR("Unsupported frequency %uHz, max %uHz, min %uHz",
Neil Armstrong9033fb22016-09-16 15:24:19 +0200520 config->frequency,
521 clock >> 1,
522 clock >> ARRAY_SIZE(scaler));
523 return -EINVAL;
524 }
525
526 LL_SPI_Disable(spi);
527 LL_SPI_SetBaudRatePrescaler(spi, scaler[br - 1]);
528
Florian Vaussard59f0c992017-12-19 17:46:28 +0100529 if (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200530 LL_SPI_SetClockPolarity(spi, LL_SPI_POLARITY_HIGH);
531 } else {
532 LL_SPI_SetClockPolarity(spi, LL_SPI_POLARITY_LOW);
533 }
534
Florian Vaussard59f0c992017-12-19 17:46:28 +0100535 if (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200536 LL_SPI_SetClockPhase(spi, LL_SPI_PHASE_2EDGE);
537 } else {
538 LL_SPI_SetClockPhase(spi, LL_SPI_PHASE_1EDGE);
539 }
540
541 LL_SPI_SetTransferDirection(spi, LL_SPI_FULL_DUPLEX);
542
543 if (config->operation & SPI_TRANSFER_LSB) {
544 LL_SPI_SetTransferBitOrder(spi, LL_SPI_LSB_FIRST);
545 } else {
546 LL_SPI_SetTransferBitOrder(spi, LL_SPI_MSB_FIRST);
547 }
548
549 LL_SPI_DisableCRC(spi);
550
Yaël Boutreux13ceab42019-07-24 08:23:52 +0200551 if (config->cs || !IS_ENABLED(CONFIG_SPI_STM32_USE_HW_SS)) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200552 LL_SPI_SetNSSMode(spi, LL_SPI_NSS_SOFT);
553 } else {
554 if (config->operation & SPI_OP_MODE_SLAVE) {
Marti Bolivar7b0f0f82017-07-17 10:23:28 -0400555 LL_SPI_SetNSSMode(spi, LL_SPI_NSS_HARD_INPUT);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200556 } else {
Marti Bolivar7b0f0f82017-07-17 10:23:28 -0400557 LL_SPI_SetNSSMode(spi, LL_SPI_NSS_HARD_OUTPUT);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200558 }
559 }
560
Yaël Boutreuxc9355082019-07-24 16:18:57 +0200561 if (config->operation & SPI_OP_MODE_SLAVE) {
562 LL_SPI_SetMode(spi, LL_SPI_MODE_SLAVE);
563 } else {
564 LL_SPI_SetMode(spi, LL_SPI_MODE_MASTER);
565 }
566
Vincent Veronaff88a22017-11-14 17:05:29 +0100567 if (SPI_WORD_SIZE_GET(config->operation) == 8) {
568 LL_SPI_SetDataWidth(spi, LL_SPI_DATAWIDTH_8BIT);
569 } else {
570 LL_SPI_SetDataWidth(spi, LL_SPI_DATAWIDTH_16BIT);
571 }
Neil Armstrong9033fb22016-09-16 15:24:19 +0200572
Kumar Gala59997872020-04-20 09:50:00 -0500573#if DT_HAS_COMPAT(st_stm32_spi_fifo)
Yaël Boutreux876eb632019-08-02 10:02:52 +0200574 ll_func_set_fifo_threshold_8bit(spi);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200575#endif
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +0200576
Francois Ramuce093dc2020-03-19 09:57:20 +0100577#ifdef CONFIG_SPI_STM32_DMA
578 /* with LL_SPI_FULL_DUPLEX mode, both tx and Rx DMA are on */
579 if (data->dev_dma_tx) {
580 LL_SPI_EnableDMAReq_TX(spi);
581 }
582 if (data->dev_dma_rx) {
583 LL_SPI_EnableDMAReq_RX(spi);
584 }
585#endif /* CONFIG_SPI_STM32_DMA */
586
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +0200587#ifndef CONFIG_SOC_SERIES_STM32F1X
Neil Armstrong9033fb22016-09-16 15:24:19 +0200588 LL_SPI_SetStandard(spi, LL_SPI_PROTOCOL_MOTOROLA);
Yannis Damigosaf0c9fc2017-12-13 13:46:27 +0200589#endif
Neil Armstrong9033fb22016-09-16 15:24:19 +0200590
591 /* At this point, it's mandatory to set this on the context! */
592 data->ctx.config = config;
593
594 spi_context_cs_configure(&data->ctx);
595
Anas Nashif88aa2ca2018-09-22 21:13:18 -0500596 LOG_DBG("Installed config %p: freq %uHz (div = %u),"
Neil Armstrong9033fb22016-09-16 15:24:19 +0200597 " mode %u/%u/%u, slave %u",
598 config, clock >> br, 1 << br,
599 (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) ? 1 : 0,
600 (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA) ? 1 : 0,
601 (SPI_MODE_GET(config->operation) & SPI_MODE_LOOP) ? 1 : 0,
602 config->slave);
603
604 return 0;
605}
606
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100607static int spi_stm32_release(struct device *dev,
608 const struct spi_config *config)
Neil Armstrong9033fb22016-09-16 15:24:19 +0200609{
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100610 struct spi_stm32_data *data = DEV_DATA(dev);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200611
612 spi_context_unlock_unconditionally(&data->ctx);
613
614 return 0;
615}
616
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100617static int transceive(struct device *dev,
618 const struct spi_config *config,
619 const struct spi_buf_set *tx_bufs,
620 const struct spi_buf_set *rx_bufs,
Neil Armstrong9033fb22016-09-16 15:24:19 +0200621 bool asynchronous, struct k_poll_signal *signal)
622{
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100623 const struct spi_stm32_config *cfg = DEV_CFG(dev);
624 struct spi_stm32_data *data = DEV_DATA(dev);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200625 SPI_TypeDef *spi = cfg->spi;
626 int ret;
627
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100628 if (!tx_bufs && !rx_bufs) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200629 return 0;
630 }
631
632#ifndef CONFIG_SPI_STM32_INTERRUPT
633 if (asynchronous) {
634 return -ENOTSUP;
635 }
636#endif
637
638 spi_context_lock(&data->ctx, asynchronous, signal);
639
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100640 ret = spi_stm32_configure(dev, config);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200641 if (ret) {
642 return ret;
643 }
644
645 /* Set buffers info */
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100646 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200647
Kumar Gala59997872020-04-20 09:50:00 -0500648#if DT_HAS_COMPAT(st_stm32_spi_fifo)
Neil Armstrong9033fb22016-09-16 15:24:19 +0200649 /* Flush RX buffer */
Yaël Boutreux876eb632019-08-02 10:02:52 +0200650 while (ll_func_rx_is_not_empty(spi)) {
Neil Armstrong9033fb22016-09-16 15:24:19 +0200651 (void) LL_SPI_ReceiveData8(spi);
652 }
653#endif
654
655 LL_SPI_Enable(spi);
656
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400657 /* This is turned off in spi_stm32_complete(). */
Neil Armstrong9033fb22016-09-16 15:24:19 +0200658 spi_context_cs_control(&data->ctx, true);
659
660#ifdef CONFIG_SPI_STM32_INTERRUPT
Yaël Boutreux876eb632019-08-02 10:02:52 +0200661 ll_func_enable_int_errors(spi);
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400662
Neil Armstrong9033fb22016-09-16 15:24:19 +0200663 if (rx_bufs) {
Yaël Boutreux876eb632019-08-02 10:02:52 +0200664 ll_func_enable_int_rx_not_empty(spi);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200665 }
666
Yaël Boutreux876eb632019-08-02 10:02:52 +0200667 ll_func_enable_int_tx_empty(spi);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200668
Marti Bolivar69bc5eb2017-07-17 14:20:13 -0400669 ret = spi_context_wait_for_completion(&data->ctx);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200670#else
671 do {
Marti Bolivarf3b18bc2017-07-18 13:02:33 -0400672 ret = spi_stm32_shift_frames(spi, data);
673 } while (!ret && spi_stm32_transfer_ongoing(data));
Neil Armstrong9033fb22016-09-16 15:24:19 +0200674
Marti Bolivarb13b2c52017-07-17 14:35:22 -0400675 spi_stm32_complete(data, spi, ret);
Armando Viscontibb9fe422018-05-22 15:00:53 +0200676
677#ifdef CONFIG_SPI_SLAVE
678 if (spi_context_is_slave(&data->ctx) && !ret) {
679 ret = data->ctx.recv_frames;
680 }
681#endif /* CONFIG_SPI_SLAVE */
682
Neil Armstrong9033fb22016-09-16 15:24:19 +0200683#endif
684
Marti Bolivar42987722017-07-17 15:12:44 -0400685 spi_context_release(&data->ctx, ret);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200686
Armando Viscontibb9fe422018-05-22 15:00:53 +0200687 return ret;
Neil Armstrong9033fb22016-09-16 15:24:19 +0200688}
689
Francois Ramuce093dc2020-03-19 09:57:20 +0100690#ifdef CONFIG_SPI_STM32_DMA
691static int transceive_dma(struct device *dev,
692 const struct spi_config *config,
693 const struct spi_buf_set *tx_bufs,
694 const struct spi_buf_set *rx_bufs,
695 bool asynchronous, struct k_poll_signal *signal)
696{
697 const struct spi_stm32_config *cfg = DEV_CFG(dev);
698 struct spi_stm32_data *data = DEV_DATA(dev);
699 SPI_TypeDef *spi = cfg->spi;
700 int ret;
701
702 if (!tx_bufs && !rx_bufs) {
703 return 0;
704 }
705
706 if (asynchronous) {
707 return -ENOTSUP;
708 }
709
710 spi_context_lock(&data->ctx, asynchronous, signal);
711
712 data->dma_tx.transfer_complete = false;
713 data->dma_rx.transfer_complete = false;
714
715 ret = spi_stm32_configure(dev, config);
716 if (ret) {
717 return ret;
718 }
719
720 /* Set buffers info */
721 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1);
722
723 ret = spi_dma_move_buffers(dev);
724 if (ret) {
725 return ret;
726 }
727
728 LL_SPI_Enable(spi);
729
730 do {
731 } while (spi_stm32_dma_transfer_ongoing(data));
732
733 /* This is turned off in spi_stm32_complete(). */
734 spi_context_cs_control(&data->ctx, true);
735
736 spi_context_release(&data->ctx, ret);
737
738 return ret;
739}
740#endif /* CONFIG_SPI_STM32_DMA */
741
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100742static int spi_stm32_transceive(struct device *dev,
743 const struct spi_config *config,
744 const struct spi_buf_set *tx_bufs,
745 const struct spi_buf_set *rx_bufs)
Neil Armstrong9033fb22016-09-16 15:24:19 +0200746{
Francois Ramuce093dc2020-03-19 09:57:20 +0100747#ifdef CONFIG_SPI_STM32_DMA
748 struct spi_stm32_data *data = DEV_DATA(dev);
749
750 if ((data->dma_tx.dma_name != NULL)
751 && (data->dma_rx.dma_name != NULL)) {
752 return transceive_dma(dev, config, tx_bufs, rx_bufs,
753 false, NULL);
754 }
755#endif /* CONFIG_SPI_STM32_DMA */
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100756 return transceive(dev, config, tx_bufs, rx_bufs, false, NULL);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200757}
758
Tomasz Bursztykada42c002018-01-29 22:12:50 +0100759#ifdef CONFIG_SPI_ASYNC
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100760static int spi_stm32_transceive_async(struct device *dev,
761 const struct spi_config *config,
762 const struct spi_buf_set *tx_bufs,
763 const struct spi_buf_set *rx_bufs,
Neil Armstrong9033fb22016-09-16 15:24:19 +0200764 struct k_poll_signal *async)
765{
Tomasz Bursztykaea2431f2018-01-30 13:49:01 +0100766 return transceive(dev, config, tx_bufs, rx_bufs, true, async);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200767}
Tomasz Bursztykada42c002018-01-29 22:12:50 +0100768#endif /* CONFIG_SPI_ASYNC */
Neil Armstrong9033fb22016-09-16 15:24:19 +0200769
770static const struct spi_driver_api api_funcs = {
771 .transceive = spi_stm32_transceive,
Tomasz Bursztykada42c002018-01-29 22:12:50 +0100772#ifdef CONFIG_SPI_ASYNC
Neil Armstrong9033fb22016-09-16 15:24:19 +0200773 .transceive_async = spi_stm32_transceive_async,
774#endif
775 .release = spi_stm32_release,
776};
777
778static int spi_stm32_init(struct device *dev)
779{
780 struct spi_stm32_data *data __attribute__((unused)) = dev->driver_data;
781 const struct spi_stm32_config *cfg = dev->config->config_info;
782
783 __ASSERT_NO_MSG(device_get_binding(STM32_CLOCK_CONTROL_NAME));
784
Erwan Gouriou9062e972018-12-07 11:09:28 +0100785 if (clock_control_on(device_get_binding(STM32_CLOCK_CONTROL_NAME),
786 (clock_control_subsys_t) &cfg->pclken) != 0) {
787 LOG_ERR("Could not enable SPI clock");
788 return -EIO;
789 }
Neil Armstrong9033fb22016-09-16 15:24:19 +0200790
791#ifdef CONFIG_SPI_STM32_INTERRUPT
792 cfg->irq_config(dev);
793#endif
794
Francois Ramuce093dc2020-03-19 09:57:20 +0100795#ifdef CONFIG_SPI_STM32_DMA
796 if (data->dma_tx.dma_name != NULL) {
797 /* Get the binding to the DMA device */
798 data->dev_dma_tx = device_get_binding(data->dma_tx.dma_name);
799 if (!data->dev_dma_tx) {
800 LOG_ERR("%s device not found", data->dma_tx.dma_name);
801 return -ENODEV;
802 }
803 }
804
805 if (data->dma_rx.dma_name != NULL) {
806 data->dev_dma_rx = device_get_binding(data->dma_rx.dma_name);
807 if (!data->dev_dma_rx) {
808 LOG_ERR("%s device not found", data->dma_rx.dma_name);
809 return -ENODEV;
810 }
811 }
812#endif /* CONFIG_SPI_STM32_DMA */
Marti Bolivar63187502017-07-13 18:31:44 -0400813 spi_context_unlock_unconditionally(&data->ctx);
Neil Armstrong9033fb22016-09-16 15:24:19 +0200814
815 return 0;
816}
817
Neil Armstrong9033fb22016-09-16 15:24:19 +0200818#ifdef CONFIG_SPI_STM32_INTERRUPT
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100819#define STM32_SPI_IRQ_HANDLER_DECL(id) \
820 static void spi_stm32_irq_config_func_##id(struct device *dev)
821#define STM32_SPI_IRQ_HANDLER_FUNC(id) \
822 .irq_config = spi_stm32_irq_config_func_##id,
823#define STM32_SPI_IRQ_HANDLER(id) \
824static void spi_stm32_irq_config_func_##id(struct device *dev) \
825{ \
Kumar Gala989484b2020-03-24 14:28:48 -0500826 IRQ_CONNECT(DT_INST_IRQN(id), \
827 DT_INST_IRQ(id, priority), \
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100828 spi_stm32_isr, DEVICE_GET(spi_stm32_##id), 0); \
Kumar Gala989484b2020-03-24 14:28:48 -0500829 irq_enable(DT_INST_IRQN(id)); \
Neil Armstrong9033fb22016-09-16 15:24:19 +0200830}
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100831#else
832#define STM32_SPI_IRQ_HANDLER_DECL(id)
833#define STM32_SPI_IRQ_HANDLER_FUNC(id)
834#define STM32_SPI_IRQ_HANDLER(id)
Neil Armstrong9033fb22016-09-16 15:24:19 +0200835#endif
836
Francois Ramuce093dc2020-03-19 09:57:20 +0100837#define DMA_CHANNEL_CONFIG(id, dir) \
838 DT_INST_DMAS_CELL_BY_NAME(id, dir, channel_config)
839#define DMA_FEATURES(id, dir) \
840 DT_INST_DMAS_CELL_BY_NAME(id, dir, features)
841
842#define SPI_DMA_CHANNEL_INIT(index, dir, dir_cap, src_dev, dest_dev) \
843.dma_##dir = { \
844 .dma_name = DT_INST_DMAS_LABEL_BY_NAME(index, dir), \
845 .channel = \
846 DT_INST_DMAS_CELL_BY_NAME(index, dir, channel), \
847 .dma_cfg = { \
848 .dma_slot = \
849 DT_INST_DMAS_CELL_BY_NAME(index, dir, slot), \
850 .channel_direction = STM32_DMA_CONFIG_DIRECTION( \
851 DMA_CHANNEL_CONFIG(index, dir)),\
852 .source_data_size = STM32_DMA_CONFIG_##src_dev##_DATA_SIZE(\
853 DMA_CHANNEL_CONFIG(index, dir)),\
854 .dest_data_size = STM32_DMA_CONFIG_##dest_dev##_DATA_SIZE(\
855 DMA_CHANNEL_CONFIG(index, dir)),\
856 .source_burst_length = 1, /* SINGLE transfer */ \
857 .dest_burst_length = 1, /* SINGLE transfer */ \
858 .channel_priority = STM32_DMA_CONFIG_PRIORITY( \
859 DMA_CHANNEL_CONFIG(index, dir)),\
860 .dma_callback = dma_callback, \
861 .block_count = 2, \
862 }, \
863 .src_addr_increment = STM32_DMA_CONFIG_##src_dev##_ADDR_INC( \
864 DMA_CHANNEL_CONFIG(index, dir)),\
865 .dst_addr_increment = STM32_DMA_CONFIG_##dest_dev##_ADDR_INC( \
866 DMA_CHANNEL_CONFIG(index, dir)),\
867 .transfer_complete = false, \
868 .fifo_threshold = STM32_DMA_FEATURES_FIFO_THRESHOLD( \
869 DMA_FEATURES(index, dir)) \
870}
871
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100872#define STM32_SPI_INIT(id) \
873STM32_SPI_IRQ_HANDLER_DECL(id); \
874 \
875static const struct spi_stm32_config spi_stm32_cfg_##id = { \
Kumar Gala989484b2020-03-24 14:28:48 -0500876 .spi = (SPI_TypeDef *) DT_INST_REG_ADDR(id),\
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100877 .pclken = { \
Francois Ramuce093dc2020-03-19 09:57:20 +0100878 .enr = DT_INST_CLOCKS_CELL(id, bits), \
879 .bus = DT_INST_CLOCKS_CELL(id, bus) \
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100880 }, \
881 STM32_SPI_IRQ_HANDLER_FUNC(id) \
882}; \
883 \
884static struct spi_stm32_data spi_stm32_dev_data_##id = { \
885 SPI_CONTEXT_INIT_LOCK(spi_stm32_dev_data_##id, ctx), \
886 SPI_CONTEXT_INIT_SYNC(spi_stm32_dev_data_##id, ctx), \
Francois Ramuce093dc2020-03-19 09:57:20 +0100887 UTIL_AND(DT_INST_DMAS_HAS_NAME(id, rx), \
888 SPI_DMA_CHANNEL_INIT(id, rx, RX, PERIPHERAL, MEMORY)), \
889 UTIL_AND(DT_INST_DMAS_HAS_NAME(id, tx), \
890 SPI_DMA_CHANNEL_INIT(id, tx, TX, MEMORY, PERIPHERAL)), \
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100891}; \
892 \
Kumar Gala989484b2020-03-24 14:28:48 -0500893DEVICE_AND_API_INIT(spi_stm32_##id, DT_INST_LABEL(id), \
Erwan Gouriouc2f19dc2020-02-28 14:22:32 +0100894 &spi_stm32_init, \
Marcin Niestrojbbe8e632020-01-15 01:58:28 +0100895 &spi_stm32_dev_data_##id, &spi_stm32_cfg_##id, \
896 POST_KERNEL, CONFIG_SPI_INIT_PRIORITY, \
897 &api_funcs); \
898 \
899STM32_SPI_IRQ_HANDLER(id)
900
Martí Bolívar87e17432020-05-05 16:06:32 -0700901DT_INST_FOREACH(STM32_SPI_INIT)