Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions drivers/spi/Kconfig.stm32
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ config SPI_STM32_DMA
help
Enable the SPI DMA mode for SPI instances
that enable dma channels in their device tree node.
This can be used with the SPI_RTIO config to have SPI
transactions using DMA + RTIO

config SPI_STM32_ERRATA_BUSY
bool
Expand Down
186 changes: 174 additions & 12 deletions drivers/spi/spi_stm32.c
Original file line number Diff line number Diff line change
Expand Up @@ -194,15 +194,56 @@ static uint8_t bits2bytes(spi_operation_t operation)
*/
static __aligned(32) uint32_t dummy_rx_tx_buffer __nocache;

#ifdef CONFIG_SPI_RTIO
static void spi_stm32_iodev_complete(const struct device *dev, int status);

static void spi_rtio_dma_complete_transaction(const struct device *dev)
{
struct spi_stm32_data *data = dev->data;
const struct spi_stm32_config *cfg = dev->config;

#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi)
if (LL_SPI_IsActiveFlag_EOT(cfg->spi)) {
LL_SPI_DisableIT_EOT(cfg->spi);
LL_SPI_ClearFlag_EOT(cfg->spi);
}
/* Clear the interrupt flags */
if (cfg->fifo_enabled) {
LL_SPI_ClearFlag_TXTF(cfg->spi);
LL_SPI_ClearFlag_OVR(cfg->spi);
}
#else
LL_SPI_DisableDMAReq_TX(cfg->spi);
LL_SPI_DisableDMAReq_RX(cfg->spi);
#endif /* st_stm32h7_spi */

int err;

err = dma_stop(data->dma_rx.dma_dev, data->dma_rx.channel);
if (err != 0) {
LOG_DBG("Rx dma_stop failed with error %d", err);
}

err = dma_stop(data->dma_tx.dma_dev, data->dma_tx.channel);
if (err != 0) {
LOG_DBG("Tx dma_stop failed with error %d", err);
}

spi_stm32_iodev_complete(dev, 0);
}

#endif /* CONFIG_SPI_RTIO */

/* This function is executed in the interrupt context */
static void dma_callback(const struct device *dma_dev, void *arg, uint32_t channel, int status)
{
ARG_UNUSED(dma_dev);

/* arg holds SPI DMA data
/* arg holds the SPI device
* Passed in spi_stm32_dma_tx/rx_load()
*/
struct spi_stm32_data *data = arg;
const struct device *spi_dev = arg;
struct spi_stm32_data *data = spi_dev->data;

if (status < 0) {
LOG_ERR("DMA callback error with channel %d.", channel);
Expand All @@ -221,6 +262,43 @@ static void dma_callback(const struct device *dma_dev, void *arg, uint32_t chann
}
}

#ifdef CONFIG_SPI_RTIO
/* If both TX and RX are done, complete the transfer */
if ((data->status_flags & SPI_STM32_DMA_TX_DONE_FLAG) != 0 &&
(data->status_flags & SPI_STM32_DMA_RX_DONE_FLAG) != 0) {

const struct spi_stm32_config *cfg = spi_dev->config;

if ((data->status_flags & SPI_STM32_DMA_ERROR_FLAG) != 0U) {
spi_stm32_iodev_complete(spi_dev, -EIO);
return;
}

#ifdef SPI_SR_FTLVL
while (LL_SPI_GetTxFIFOLevel(cfg->spi) > 0) {
}
#endif /* SPI_SR_FTLVL */

#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi)
if (cfg->fifo_enabled && !LL_SPI_IsActiveFlag_EOT(cfg->spi)) {
LL_SPI_EnableIT_EOT(cfg->spi);
return;
}
#else
#ifdef CONFIG_SPI_STM32_ERRATA_BUSY
WAIT_FOR(!ll_spi_dma_busy(cfg->spi), CONFIG_SPI_STM32_BUSY_FLAG_TIMEOUT, k_yield());
#else
while (ll_spi_dma_busy(cfg->spi) && LL_SPI_IsEnabled(cfg->spi)) {
if (LL_SPI_HALF_DUPLEX_RX == LL_SPI_GetTransferDirection(cfg->spi)) {
ll_disable_spi(cfg->spi);
}
}
#endif /* CONFIG_SPI_STM32_ERRATA_BUSY */
#endif /* DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi)*/

spi_rtio_dma_complete_transaction(spi_dev);
}
#endif /* CONFIG_SPI_RTIO */
k_sem_give(&data->status_sem);
}

Expand Down Expand Up @@ -271,8 +349,8 @@ static int spi_stm32_dma_tx_load(const struct device *dev, const uint8_t *buf, s

/* direction is given by the DT */
stream->dma_cfg.head_block = blk_cfg;
/* give the dma channel data as arg, as the callback comes from the dma */
stream->dma_cfg.user_data = data;
/* Pass SPI device as callback context */
stream->dma_cfg.user_data = (void *)dev;
/* pass our client origin to the dma: data->dma_tx.dma_channel */
ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.channel, &stream->dma_cfg);
/* the channel is the actual stream from 0 */
Expand Down Expand Up @@ -327,7 +405,7 @@ static int spi_stm32_dma_rx_load(const struct device *dev, uint8_t *buf, size_t

/* direction is given by the DT */
stream->dma_cfg.head_block = blk_cfg;
stream->dma_cfg.user_data = data;
stream->dma_cfg.user_data = (void *)dev;


/* pass our client origin to the dma: data->dma_rx.channel */
Expand Down Expand Up @@ -666,6 +744,8 @@ static void spi_stm32_cs_control(const struct device *dev, bool on __maybe_unuse
#endif /* DT_HAS_COMPAT_STATUS_OKAY(st_stm32_spi_subghz) */
}

#if !(defined(CONFIG_SPI_RTIO) && defined(CONFIG_SPI_STM32_DMA))

static void spi_stm32_msg_start(const struct device *dev, bool is_rx_empty)
{
const struct spi_stm32_config *cfg = dev->config;
Expand Down Expand Up @@ -725,6 +805,7 @@ static void spi_stm32_msg_start(const struct device *dev, bool is_rx_empty)
#endif /* CONFIG_SOC_SERIES_STM32H7X */
#endif /* CONFIG_SPI_STM32_INTERRUPT */
}
#endif /* !(CONFIG_SPI_RTIO && CONFIG_SPI_STM32_DMA) */

#ifdef CONFIG_SPI_RTIO
/* Forward declaration for RTIO handlers conveniance */
Expand All @@ -743,7 +824,8 @@ static void spi_stm32_iodev_msg_start(const struct device *dev, struct spi_confi
const uint8_t *tx_buf, uint8_t *rx_buf, uint32_t buf_len)
{
struct spi_stm32_data *data = dev->data;
uint32_t size = buf_len / bits2bytes(config->operation);
const uint8_t dfs = bits2bytes(config->operation);
const uint32_t size = buf_len / dfs;

const struct spi_buf current_tx = {.buf = NULL, .len = size};
const struct spi_buf current_rx = {.buf = NULL, .len = size};
Expand All @@ -764,9 +846,10 @@ static void spi_stm32_iodev_msg_start(const struct device *dev, struct spi_confi
ctx->recv_frames = 0;
#endif /* CONFIG_SPI_SLAVE */

__maybe_unused const struct spi_stm32_config *cfg = dev->config;
__maybe_unused SPI_TypeDef *spi = cfg->spi;

#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi)
const struct spi_stm32_config *cfg = dev->config;
SPI_TypeDef *spi = cfg->spi;

if (cfg->fifo_enabled && SPI_OP_MODE_GET(config->operation) == SPI_OP_MODE_MASTER) {
if (LL_SPI_IsEnabled(spi)) {
Expand All @@ -780,7 +863,77 @@ static void spi_stm32_iodev_msg_start(const struct device *dev, struct spi_confi
}
#endif /* DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi) */

#ifdef CONFIG_SPI_STM32_DMA
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This whole block is duplicated, roughly, from elsewhere in the code. Ideally should be extracted and re-used.

This is also missing checks for the buffers being in cached memory, so this silently fails if they are, instead of checking/warning/exiting earlier.

I was able to track that down and get this working as well for my sensor streaming test case, once I disabled DCACHE, but it would be good to have this check (which properly catches this case in tranceive_dma.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added helper functions and the DCACHE test


#ifdef CONFIG_DCACHE

if ((tx_buf != NULL && !stm32_buf_in_nocache((uintptr_t)tx_buf, buf_len)) ||
(rx_buf != NULL && !stm32_buf_in_nocache((uintptr_t)rx_buf, buf_len))) {
LOG_ERR("SPI DMA transfers not supported on cached memory");
spi_stm32_iodev_complete(dev, -EINVAL);
return;
}
#endif /* CONFIG_DCACHE */

size_t dma_len;
int ret = 0;

struct dma_config *rx_cfg = &data->dma_rx.dma_cfg;
struct dma_config *tx_cfg = &data->dma_tx.dma_cfg;

rx_cfg->source_data_size = rx_cfg->source_burst_length = dfs;
rx_cfg->dest_data_size = rx_cfg->dest_burst_length = dfs;
tx_cfg->source_data_size = tx_cfg->source_burst_length = dfs;
tx_cfg->dest_data_size = tx_cfg->dest_burst_length = dfs;

uint32_t transfer_dir = LL_SPI_GetTransferDirection(spi);
#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi)
/* set request before enabling (else SPI CFG1 reg is write protected) */
spi_dma_enable_requests(spi);

LL_SPI_Enable(spi);

/* In half-duplex rx mode, start transfer after
* setting DMA configurations
*/
if (transfer_dir != LL_SPI_HALF_DUPLEX_RX && LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) {
LL_SPI_StartMasterTransfer(spi);
}
#else
LL_SPI_Enable(spi);
#endif /* st_stm32h7_spi */

/* Assert CS before enabling transfer */
spi_stm32_cs_control(dev, true);
data->status_flags = 0;

if (transfer_dir == LL_SPI_FULL_DUPLEX) {
dma_len = spi_context_max_continuous_chunk(&data->ctx);
ret = spi_dma_move_buffers(dev, dma_len);
} else if (transfer_dir == LL_SPI_HALF_DUPLEX_TX) {
dma_len = data->ctx.tx_len;
ret = spi_dma_move_tx_buffers(dev, dma_len);
} else {
dma_len = data->ctx.rx_len;
ret = spi_dma_move_rx_buffers(dev, dma_len);
}
if (ret != 0) {
LOG_ERR("Failed to start SPI DMA transfer");
spi_stm32_iodev_complete(dev, -EIO);
}

#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi)
if (transfer_dir == LL_SPI_HALF_DUPLEX_RX && LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) {
LL_SPI_StartMasterTransfer(spi);
}
#else
/* toggle the DMA request to restart the transfer */
spi_dma_enable_requests(spi);

#endif /* DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi) */
#else
spi_stm32_msg_start(dev, rx_buf == NULL);
#endif /* CONFIG_SPI_STM32_DMA */
}

static void spi_stm32_iodev_start(const struct device *dev)
Expand Down Expand Up @@ -1001,6 +1154,15 @@ static void spi_stm32_isr(const struct device *dev)
}
}

#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32h7_spi) && defined(CONFIG_SPI_RTIO) && \
defined(CONFIG_SPI_STM32_DMA)
if (LL_SPI_IsActiveFlag_EOT(spi)) {
spi_rtio_dma_complete_transaction(dev);
k_sem_give(&data->status_sem);
return;
}
#endif /* st_stm32h7_spi && CONFIG_SPI_RTIO && CONFIG_SPI_STM32_DMA */

err = spi_stm32_get_err(spi);
if (err) {
spi_stm32_complete(dev, err);
Expand Down Expand Up @@ -1464,7 +1626,7 @@ static int transceive(const struct device *dev,
return ret;
}

#ifdef CONFIG_SPI_STM32_DMA
#if defined(CONFIG_SPI_STM32_DMA) && !defined(CONFIG_SPI_RTIO)

static int wait_dma_rx_tx_done(const struct device *dev)
{
Expand Down Expand Up @@ -1762,21 +1924,21 @@ static int transceive_dma(const struct device *dev,

return ret;
}
#endif /* CONFIG_SPI_STM32_DMA */
#endif /* CONFIG_SPI_STM32_DMA && !CONFIG_SPI_RTIO */

static int spi_stm32_transceive(const struct device *dev,
const struct spi_config *config,
const struct spi_buf_set *tx_bufs,
const struct spi_buf_set *rx_bufs)
{
#ifdef CONFIG_SPI_STM32_DMA
#if defined(CONFIG_SPI_STM32_DMA) && !defined(CONFIG_SPI_RTIO)
struct spi_stm32_data *data = dev->data;

if ((data->dma_tx.dma_dev != NULL) && (data->dma_rx.dma_dev != NULL)) {
return transceive_dma(dev, config, tx_bufs, rx_bufs,
false, NULL, NULL);
}
#endif /* CONFIG_SPI_STM32_DMA */
#endif /* CONFIG_SPI_STM32_DMA && !CONFIG_SPI_RTIO */
return transceive(dev, config, tx_bufs, rx_bufs, false, NULL, NULL);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#
# Copyright (c) 2026 STMicroelectronics
#
# SPDX-License-Identifier: Apache-2.0
#
CONFIG_SPI_RTIO=y
CONFIG_SPI_STM32_DMA=y
CONFIG_SPI_ASYNC=n
23 changes: 23 additions & 0 deletions tests/drivers/spi/spi_loopback/testcase.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,29 @@ tests:
- stm32u083c_dk
integration_platforms:
- stm32h573i_dk
drivers.spi.stm32_spi_rtio_dma.loopback:
extra_args: EXTRA_CONF_FILE="overlay-stm32-spi-rtio-dma.conf"
platform_allow:
- b_u585i_iot02a
- nucleo_c071rb
- nucleo_f207zg
- nucleo_f429zi
- nucleo_f746zg
- nucleo_f767zi
- nucleo_g474re
- nucleo_h743zi
- nucleo_h753zi
- nucleo_h745zi_q/stm32h745xx/m4
- nucleo_h745zi_q/stm32h745xx/m7
- nucleo_l152re
- nucleo_wba55cg
- nucleo_wb55rg
- nucleo_wl55jc
- stm32h573i_dk
- stm32n6570_dk/stm32n657xx/sb
- stm32u083c_dk
integration_platforms:
- stm32h573i_dk
drivers.spi.gd32_spi_interrupt.loopback:
extra_args: EXTRA_CONF_FILE="overlay-gd32-spi-interrupt.conf"
platform_allow:
Expand Down