duplicate emptyline removal (#14027)
[betaflight.git] / src / platform / STM32 / bus_spi_ll.c
blobda00e15d3fb61e59f4e53b5acc45a2ad0bc6da93
1 /*
2 * This file is part of Cleanflight and Betaflight.
4 * Cleanflight and Betaflight are free software. You can redistribute
5 * this software and/or modify this software under the terms of the
6 * GNU General Public License as published by the Free Software
7 * Foundation, either version 3 of the License, or (at your option)
8 * any later version.
10 * Cleanflight and Betaflight are distributed in the hope that they
11 * will be useful, but WITHOUT ANY WARRANTY; without even the implied
12 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
13 * See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this software.
18 * If not, see <http://www.gnu.org/licenses/>.
21 #include <stdbool.h>
22 #include <stdint.h>
23 #include <string.h>
25 #include "platform.h"
27 #if defined(USE_SPI)
29 #include "common/utils.h"
30 #include "common/maths.h"
32 #include "drivers/bus.h"
33 #include "drivers/bus_spi.h"
34 #include "drivers/bus_spi_impl.h"
35 #include "drivers/dma.h"
36 #include "drivers/io.h"
37 #include "drivers/rcc.h"
39 // Use DMA if possible if this many bytes are to be transferred
40 #define SPI_DMA_THRESHOLD 8
42 #ifndef SPI1_NSS_PIN
43 #define SPI1_NSS_PIN NONE
44 #endif
45 #ifndef SPI2_NSS_PIN
46 #define SPI2_NSS_PIN NONE
47 #endif
48 #ifndef SPI3_NSS_PIN
49 #define SPI3_NSS_PIN NONE
50 #endif
51 #ifndef SPI4_NSS_PIN
52 #define SPI4_NSS_PIN NONE
53 #endif
55 #define SPI_DEFAULT_TIMEOUT 10
57 #ifdef STM32H7
58 #define IS_DTCM(p) (((uint32_t)p & 0xfffe0000) == 0x20000000)
59 #elif defined(STM32F7)
60 #define IS_DTCM(p) (((uint32_t)p & 0xffff0000) == 0x20000000)
61 #elif defined(STM32G4)
62 #define IS_CCM(p) ((((uint32_t)p & 0xffff8000) == 0x10000000) || (((uint32_t)p & 0xffff8000) == 0x20018000))
63 #endif
64 static LL_SPI_InitTypeDef defaultInit =
66 .TransferDirection = LL_SPI_FULL_DUPLEX,
67 .Mode = LL_SPI_MODE_MASTER,
68 .DataWidth = LL_SPI_DATAWIDTH_8BIT,
69 .NSS = LL_SPI_NSS_SOFT,
70 .BaudRate = LL_SPI_BAUDRATEPRESCALER_DIV8,
71 .BitOrder = LL_SPI_MSB_FIRST,
72 .CRCCalculation = LL_SPI_CRCCALCULATION_DISABLE,
73 .ClockPolarity = LL_SPI_POLARITY_HIGH,
74 .ClockPhase = LL_SPI_PHASE_2EDGE,
77 static uint32_t spiDivisorToBRbits(const SPI_TypeDef *instance, uint16_t divisor)
79 #if !defined(STM32H7)
80 // SPI2 and SPI3 are on APB1/AHB1 which PCLK is half that of APB2/AHB2.
82 if (instance == SPI2 || instance == SPI3) {
83 divisor /= 2; // Safe for divisor == 0 or 1
85 #else
86 UNUSED(instance);
87 #endif
89 divisor = constrain(divisor, 2, 256);
91 #if defined(STM32H7)
92 const uint32_t baudRatePrescaler[8] = {
93 LL_SPI_BAUDRATEPRESCALER_DIV2,
94 LL_SPI_BAUDRATEPRESCALER_DIV4,
95 LL_SPI_BAUDRATEPRESCALER_DIV8,
96 LL_SPI_BAUDRATEPRESCALER_DIV16,
97 LL_SPI_BAUDRATEPRESCALER_DIV32,
98 LL_SPI_BAUDRATEPRESCALER_DIV64,
99 LL_SPI_BAUDRATEPRESCALER_DIV128,
100 LL_SPI_BAUDRATEPRESCALER_DIV256,
102 int prescalerIndex = ffs(divisor) - 2; // prescaler begins at "/2"
104 return baudRatePrescaler[prescalerIndex];
105 #else
106 return (ffs(divisor) - 2) << SPI_CR1_BR_Pos;
107 #endif
110 void spiInitDevice(SPIDevice device)
112 spiDevice_t *spi = &spiDevice[device];
114 if (!spi->dev) {
115 return;
118 // Enable SPI clock
119 RCC_ClockCmd(spi->rcc, ENABLE);
120 RCC_ResetCmd(spi->rcc, ENABLE);
122 IOInit(IOGetByTag(spi->sck), OWNER_SPI_SCK, RESOURCE_INDEX(device));
123 IOInit(IOGetByTag(spi->miso), OWNER_SPI_SDI, RESOURCE_INDEX(device));
124 IOInit(IOGetByTag(spi->mosi), OWNER_SPI_SDO, RESOURCE_INDEX(device));
126 IOConfigGPIOAF(IOGetByTag(spi->miso), SPI_IO_AF_SDI_CFG, spi->misoAF);
127 IOConfigGPIOAF(IOGetByTag(spi->mosi), SPI_IO_AF_CFG, spi->mosiAF);
128 IOConfigGPIOAF(IOGetByTag(spi->sck), SPI_IO_AF_SCK_CFG_HIGH, spi->sckAF);
130 LL_SPI_Disable(spi->dev);
131 LL_SPI_DeInit(spi->dev);
133 #if defined(STM32H7)
134 // Prevent glitching when SPI is disabled
135 LL_SPI_EnableGPIOControl(spi->dev);
137 LL_SPI_SetFIFOThreshold(spi->dev, LL_SPI_FIFO_TH_01DATA);
138 LL_SPI_Init(spi->dev, &defaultInit);
139 #else
140 LL_SPI_SetRxFIFOThreshold(spi->dev, SPI_RXFIFO_THRESHOLD_QF);
142 LL_SPI_Init(spi->dev, &defaultInit);
143 LL_SPI_Enable(spi->dev);
144 #endif
147 void spiInternalResetDescriptors(busDevice_t *bus)
149 LL_DMA_InitTypeDef *initTx = bus->initTx;
151 LL_DMA_StructInit(initTx);
152 #if defined(STM32G4) || defined(STM32H7)
153 initTx->PeriphRequest = bus->dmaTx->channel;
154 #else
155 initTx->Channel = bus->dmaTx->channel;
156 #endif
157 initTx->Mode = LL_DMA_MODE_NORMAL;
158 initTx->Direction = LL_DMA_DIRECTION_MEMORY_TO_PERIPH;
159 #if defined(STM32H7)
160 initTx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->TXDR;
161 #else
162 initTx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->DR;
163 #endif
164 initTx->Priority = LL_DMA_PRIORITY_LOW;
165 initTx->PeriphOrM2MSrcIncMode = LL_DMA_PERIPH_NOINCREMENT;
166 initTx->PeriphOrM2MSrcDataSize = LL_DMA_PDATAALIGN_BYTE;
167 initTx->MemoryOrM2MDstDataSize = LL_DMA_MDATAALIGN_BYTE;
169 if (bus->dmaRx) {
170 LL_DMA_InitTypeDef *initRx = bus->initRx;
172 LL_DMA_StructInit(initRx);
173 #if defined(STM32G4) || defined(STM32H7)
174 initRx->PeriphRequest = bus->dmaRx->channel;
175 #else
176 initRx->Channel = bus->dmaRx->channel;
177 #endif
178 initRx->Mode = LL_DMA_MODE_NORMAL;
179 initRx->Direction = LL_DMA_DIRECTION_PERIPH_TO_MEMORY;
180 #if defined(STM32H7)
181 initRx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->RXDR;
182 #else
183 initRx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->DR;
184 #endif
185 initRx->Priority = LL_DMA_PRIORITY_LOW;
186 initRx->PeriphOrM2MSrcIncMode = LL_DMA_PERIPH_NOINCREMENT;
187 initRx->PeriphOrM2MSrcDataSize = LL_DMA_PDATAALIGN_BYTE;
191 void spiInternalResetStream(dmaChannelDescriptor_t *descriptor)
193 // Disable the stream
194 #if defined(STM32G4)
195 LL_DMA_DisableChannel(descriptor->dma, descriptor->stream);
196 while (LL_DMA_IsEnabledChannel(descriptor->dma, descriptor->stream));
197 #else
198 LL_DMA_DisableStream(descriptor->dma, descriptor->stream);
199 while (LL_DMA_IsEnabledStream(descriptor->dma, descriptor->stream));
200 #endif
202 // Clear any pending interrupt flags
203 DMA_CLEAR_FLAG(descriptor, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
206 FAST_CODE static bool spiInternalReadWriteBufPolled(SPI_TypeDef *instance, const uint8_t *txData, uint8_t *rxData, int len)
208 #if defined(STM32H7)
209 LL_SPI_SetTransferSize(instance, len);
210 LL_SPI_Enable(instance);
211 LL_SPI_StartMasterTransfer(instance);
212 while (len) {
213 while (!LL_SPI_IsActiveFlag_TXP(instance));
214 uint8_t b = txData ? *(txData++) : 0xFF;
215 LL_SPI_TransmitData8(instance, b);
217 while (!LL_SPI_IsActiveFlag_RXP(instance));
218 b = LL_SPI_ReceiveData8(instance);
219 if (rxData) {
220 *(rxData++) = b;
222 --len;
224 while (!LL_SPI_IsActiveFlag_EOT(instance));
225 LL_SPI_ClearFlag_TXTF(instance);
226 LL_SPI_Disable(instance);
227 #else
228 // set 16-bit transfer
229 CLEAR_BIT(instance->CR2, SPI_RXFIFO_THRESHOLD);
230 while (len > 1) {
231 while (!LL_SPI_IsActiveFlag_TXE(instance));
232 uint16_t w;
233 if (txData) {
234 w = *((uint16_t *)txData);
235 txData += 2;
236 } else {
237 w = 0xFFFF;
239 LL_SPI_TransmitData16(instance, w);
241 while (!LL_SPI_IsActiveFlag_RXNE(instance));
242 w = LL_SPI_ReceiveData16(instance);
243 if (rxData) {
244 *((uint16_t *)rxData) = w;
245 rxData += 2;
247 len -= 2;
249 // set 8-bit transfer
250 SET_BIT(instance->CR2, SPI_RXFIFO_THRESHOLD);
251 if (len) {
252 while (!LL_SPI_IsActiveFlag_TXE(instance));
253 uint8_t b = txData ? *(txData++) : 0xFF;
254 LL_SPI_TransmitData8(instance, b);
256 while (!LL_SPI_IsActiveFlag_RXNE(instance));
257 b = LL_SPI_ReceiveData8(instance);
258 if (rxData) {
259 *(rxData++) = b;
261 --len;
263 #endif
265 return true;
268 void spiInternalInitStream(const extDevice_t *dev, bool preInit)
270 STATIC_DMA_DATA_AUTO uint8_t dummyTxByte = 0xff;
271 STATIC_DMA_DATA_AUTO uint8_t dummyRxByte;
272 busDevice_t *bus = dev->bus;
274 busSegment_t *segment = (busSegment_t *)bus->curSegment;
276 if (preInit) {
277 // Prepare the init structure for the next segment to reduce inter-segment interval
278 segment++;
279 if(segment->len == 0) {
280 // There's no following segment
281 return;
285 int len = segment->len;
287 uint8_t *txData = segment->u.buffers.txData;
288 LL_DMA_InitTypeDef *initTx = bus->initTx;
290 if (txData) {
291 #ifdef __DCACHE_PRESENT
292 #ifdef STM32H7
293 if ((txData < &_dmaram_start__) || (txData >= &_dmaram_end__)) {
294 #else
295 // No need to flush DTCM memory
296 if (!IS_DTCM(txData)) {
297 #endif
298 // Flush the D cache to ensure the data to be written is in main memory
299 SCB_CleanDCache_by_Addr(
300 (uint32_t *)((uint32_t)txData & ~CACHE_LINE_MASK),
301 (((uint32_t)txData & CACHE_LINE_MASK) + len - 1 + CACHE_LINE_SIZE) & ~CACHE_LINE_MASK);
303 #endif // __DCACHE_PRESENT
304 initTx->MemoryOrM2MDstAddress = (uint32_t)txData;
305 initTx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_INCREMENT;
306 } else {
307 initTx->MemoryOrM2MDstAddress = (uint32_t)&dummyTxByte;
308 initTx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_NOINCREMENT;
310 initTx->NbData = len;
312 #if !defined(STM32G4) && !defined(STM32H7)
313 if (dev->bus->dmaRx) {
314 #endif
315 uint8_t *rxData = segment->u.buffers.rxData;
316 LL_DMA_InitTypeDef *initRx = bus->initRx;
318 if (rxData) {
319 /* Flush the D cache for the start and end of the receive buffer as
320 * the cache will be invalidated after the transfer and any valid data
321 * just before/after must be in memory at that point
323 #ifdef __DCACHE_PRESENT
324 // No need to flush/invalidate DTCM memory
325 #ifdef STM32H7
326 if ((rxData < &_dmaram_start__) || (rxData >= &_dmaram_end__)) {
327 #else
328 // No need to flush DTCM memory
329 if (!IS_DTCM(rxData)) {
330 #endif
331 SCB_CleanInvalidateDCache_by_Addr(
332 (uint32_t *)((uint32_t)rxData & ~CACHE_LINE_MASK),
333 (((uint32_t)rxData & CACHE_LINE_MASK) + len - 1 + CACHE_LINE_SIZE) & ~CACHE_LINE_MASK);
335 #endif // __DCACHE_PRESENT
336 initRx->MemoryOrM2MDstAddress = (uint32_t)rxData;
337 initRx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_INCREMENT;
338 } else {
339 initRx->MemoryOrM2MDstAddress = (uint32_t)&dummyRxByte;
340 initRx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_NOINCREMENT;
342 initRx->NbData = len;
343 #if !defined(STM32G4) && !defined(STM32H7)
345 #endif
348 void spiInternalStartDMA(const extDevice_t *dev)
350 busDevice_t *bus = dev->bus;
352 dmaChannelDescriptor_t *dmaTx = bus->dmaTx;
353 dmaChannelDescriptor_t *dmaRx = bus->dmaRx;
355 #if !defined(STM32G4) && !defined(STM32H7)
356 if (dmaRx) {
357 #endif
358 // Use the correct callback argument
359 dmaRx->userParam = (uint32_t)dev;
361 // Clear transfer flags
362 DMA_CLEAR_FLAG(dmaTx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
363 DMA_CLEAR_FLAG(dmaRx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
365 #ifdef STM32G4
366 // Disable channels to enable update
367 LL_DMA_DisableChannel(dmaTx->dma, dmaTx->stream);
368 LL_DMA_DisableChannel(dmaRx->dma, dmaRx->stream);
370 /* Use the Rx interrupt as this occurs once the SPI operation is complete whereas the Tx interrupt
371 * occurs earlier when the Tx FIFO is empty, but the SPI operation is still in progress
373 LL_DMA_EnableIT_TC(dmaRx->dma, dmaRx->stream);
375 // Update channels
376 LL_DMA_Init(dmaTx->dma, dmaTx->stream, bus->initTx);
377 LL_DMA_Init(dmaRx->dma, dmaRx->stream, bus->initRx);
379 // Enable channels
380 LL_DMA_EnableChannel(dmaTx->dma, dmaTx->stream);
381 LL_DMA_EnableChannel(dmaRx->dma, dmaRx->stream);
383 SET_BIT(dev->bus->busType_u.spi.instance->CR2, SPI_CR2_TXDMAEN | SPI_CR2_RXDMAEN);
384 #else
385 DMA_Stream_TypeDef *streamRegsTx = (DMA_Stream_TypeDef *)dmaTx->ref;
386 DMA_Stream_TypeDef *streamRegsRx = (DMA_Stream_TypeDef *)dmaRx->ref;
388 // Disable streams to enable update
389 LL_DMA_WriteReg(streamRegsTx, CR, 0U);
390 LL_DMA_WriteReg(streamRegsRx, CR, 0U);
392 /* Use the Rx interrupt as this occurs once the SPI operation is complete whereas the Tx interrupt
393 * occurs earlier when the Tx FIFO is empty, but the SPI operation is still in progress
395 LL_EX_DMA_EnableIT_TC(streamRegsRx);
397 // Update streams
398 LL_DMA_Init(dmaTx->dma, dmaTx->stream, bus->initTx);
399 LL_DMA_Init(dmaRx->dma, dmaRx->stream, bus->initRx);
401 /* Note from AN4031
403 * If the user enables the used peripheral before the corresponding DMA stream, a FEIF
404 * (FIFO Error Interrupt Flag) may be set due to the fact the DMA is not ready to provide
405 * the first required data to the peripheral (in case of memory-to-peripheral transfer).
408 // Enable the SPI DMA Tx & Rx requests
409 #if defined(STM32H7)
410 LL_SPI_SetTransferSize(dev->bus->busType_u.spi.instance, dev->bus->curSegment->len);
411 LL_DMA_EnableStream(dmaTx->dma, dmaTx->stream);
412 LL_DMA_EnableStream(dmaRx->dma, dmaRx->stream);
413 SET_BIT(dev->bus->busType_u.spi.instance->CFG1, SPI_CFG1_RXDMAEN | SPI_CFG1_TXDMAEN);
414 LL_SPI_Enable(dev->bus->busType_u.spi.instance);
415 LL_SPI_StartMasterTransfer(dev->bus->busType_u.spi.instance);
416 #else
417 // Enable streams
418 LL_DMA_EnableStream(dmaTx->dma, dmaTx->stream);
419 LL_DMA_EnableStream(dmaRx->dma, dmaRx->stream);
421 SET_BIT(dev->bus->busType_u.spi.instance->CR2, SPI_CR2_TXDMAEN | SPI_CR2_RXDMAEN);
422 #endif
423 #if !defined(STM32G4) && !defined(STM32H7)
424 } else {
425 DMA_Stream_TypeDef *streamRegsTx = (DMA_Stream_TypeDef *)dmaTx->ref;
427 // Use the correct callback argument
428 dmaTx->userParam = (uint32_t)dev;
430 // Clear transfer flags
431 DMA_CLEAR_FLAG(dmaTx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
433 // Disable streams to enable update
434 LL_DMA_WriteReg(streamRegsTx, CR, 0U);
436 LL_EX_DMA_EnableIT_TC(streamRegsTx);
438 // Update streams
439 LL_DMA_Init(dmaTx->dma, dmaTx->stream, bus->initTx);
441 /* Note from AN4031
443 * If the user enables the used peripheral before the corresponding DMA stream, a FEIF
444 * (FIFO Error Interrupt Flag) may be set due to the fact the DMA is not ready to provide
445 * the first required data to the peripheral (in case of memory-to-peripheral transfer).
448 // Enable the SPI DMA Tx request
449 // Enable streams
450 LL_DMA_EnableStream(dmaTx->dma, dmaTx->stream);
452 SET_BIT(dev->bus->busType_u.spi.instance->CR2, SPI_CR2_TXDMAEN);
454 #endif
455 #endif
458 void spiInternalStopDMA (const extDevice_t *dev)
460 busDevice_t *bus = dev->bus;
462 dmaChannelDescriptor_t *dmaTx = bus->dmaTx;
463 dmaChannelDescriptor_t *dmaRx = bus->dmaRx;
464 SPI_TypeDef *instance = bus->busType_u.spi.instance;
466 #if !defined(STM32G4) && !defined(STM32H7)
467 if (dmaRx) {
468 #endif
469 // Disable the DMA engine and SPI interface
470 #ifdef STM32G4
471 LL_DMA_DisableChannel(dmaTx->dma, dmaTx->stream);
472 LL_DMA_DisableChannel(dmaRx->dma, dmaRx->stream);
473 #else
474 LL_DMA_DisableStream(dmaRx->dma, dmaRx->stream);
475 LL_DMA_DisableStream(dmaTx->dma, dmaTx->stream);
476 #endif
478 // Clear transfer flags
479 DMA_CLEAR_FLAG(dmaRx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
481 LL_SPI_DisableDMAReq_TX(instance);
482 LL_SPI_DisableDMAReq_RX(instance);
483 #if defined(STM32H7)
484 LL_SPI_ClearFlag_TXTF(dev->bus->busType_u.spi.instance);
485 LL_SPI_Disable(dev->bus->busType_u.spi.instance);
486 #endif
487 #if !defined(STM32G4) && !defined(STM32H7)
488 } else {
489 SPI_TypeDef *instance = bus->busType_u.spi.instance;
491 // Ensure the current transmission is complete
492 while (LL_SPI_IsActiveFlag_BSY(instance));
494 // Drain the RX buffer
495 while (LL_SPI_IsActiveFlag_RXNE(instance)) {
496 instance->DR;
499 // Disable the DMA engine and SPI interface
500 LL_DMA_DisableStream(dmaTx->dma, dmaTx->stream);
502 DMA_CLEAR_FLAG(dmaTx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
504 LL_SPI_DisableDMAReq_TX(instance);
505 #endif
506 #if !defined(STM32G4) && !defined(STM32H7)
508 #endif
511 // DMA transfer setup and start
512 FAST_CODE void spiSequenceStart(const extDevice_t *dev)
514 busDevice_t *bus = dev->bus;
515 SPI_TypeDef *instance = bus->busType_u.spi.instance;
516 spiDevice_t *spi = &spiDevice[spiDeviceByInstance(instance)];
517 bool dmaSafe = dev->useDMA;
518 uint32_t xferLen = 0;
519 uint32_t segmentCount = 0;
521 bus->initSegment = true;
523 // Switch bus speed
524 #if !defined(STM32H7)
525 LL_SPI_Disable(instance);
526 #endif
528 if (dev->busType_u.spi.speed != bus->busType_u.spi.speed) {
529 LL_SPI_SetBaudRatePrescaler(instance, spiDivisorToBRbits(instance, dev->busType_u.spi.speed));
530 bus->busType_u.spi.speed = dev->busType_u.spi.speed;
533 // Switch SPI clock polarity/phase if necessary
534 if (dev->busType_u.spi.leadingEdge != bus->busType_u.spi.leadingEdge) {
535 if (dev->busType_u.spi.leadingEdge) {
536 IOConfigGPIOAF(IOGetByTag(spi->sck), SPI_IO_AF_SCK_CFG_LOW, spi->sckAF);
537 LL_SPI_SetClockPhase(instance, LL_SPI_PHASE_1EDGE);
538 LL_SPI_SetClockPolarity(instance, LL_SPI_POLARITY_LOW);
540 else {
541 IOConfigGPIOAF(IOGetByTag(spi->sck), SPI_IO_AF_SCK_CFG_HIGH, spi->sckAF);
542 LL_SPI_SetClockPhase(instance, LL_SPI_PHASE_2EDGE);
543 LL_SPI_SetClockPolarity(instance, LL_SPI_POLARITY_HIGH);
546 bus->busType_u.spi.leadingEdge = dev->busType_u.spi.leadingEdge;
549 #if !defined(STM32H7)
550 LL_SPI_Enable(instance);
551 #endif
553 /* Where data is being read into a buffer which is cached, where the start or end of that
554 * buffer is not cache aligned, there is a risk of corruption of other data in that cache line.
555 * After the read is complete, the cache lines covering the structure will be invalidated to ensure
556 * that the processor sees the read data, not what was in cache previously. Unfortunately if
557 * there is any other data in the area covered by those cache lines, at the start or end of the
558 * buffer, it too will be invalidated, so had the processor written to those locations during the DMA
559 * operation those written values will be lost.
562 // Check that any reads are cache aligned and of multiple cache lines in length
563 for (busSegment_t *checkSegment = (busSegment_t *)bus->curSegment; checkSegment->len; checkSegment++) {
564 // Check there is no receive data as only transmit DMA is available
565 if ((checkSegment->u.buffers.rxData) && (bus->dmaRx == (dmaChannelDescriptor_t *)NULL)) {
566 dmaSafe = false;
567 break;
569 #ifdef STM32H7
570 // Check if RX data can be DMAed
571 if ((checkSegment->u.buffers.rxData) &&
572 // DTCM can't be accessed by DMA1/2 on the H7
573 (IS_DTCM(checkSegment->u.buffers.rxData) ||
574 // Memory declared as DMA_RAM will have an address between &_dmaram_start__ and &_dmaram_end__
575 (((checkSegment->u.buffers.rxData < &_dmaram_start__) || (checkSegment->u.buffers.rxData >= &_dmaram_end__)) &&
576 (((uint32_t)checkSegment->u.buffers.rxData & (CACHE_LINE_SIZE - 1)) || (checkSegment->len & (CACHE_LINE_SIZE - 1)))))) {
577 dmaSafe = false;
578 break;
580 // Check if TX data can be DMAed
581 else if ((checkSegment->u.buffers.txData) && IS_DTCM(checkSegment->u.buffers.txData)) {
582 dmaSafe = false;
583 break;
585 #elif defined(STM32F7)
586 if ((checkSegment->u.buffers.rxData) &&
587 // DTCM is accessible and uncached on the F7
588 (!IS_DTCM(checkSegment->u.buffers.rxData) &&
589 (((uint32_t)checkSegment->u.buffers.rxData & (CACHE_LINE_SIZE - 1)) || (checkSegment->len & (CACHE_LINE_SIZE - 1))))) {
590 dmaSafe = false;
591 break;
593 #elif defined(STM32G4)
594 // Check if RX data can be DMAed
595 if ((checkSegment->u.buffers.rxData) &&
596 // CCM can't be accessed by DMA1/2 on the G4
597 IS_CCM(checkSegment->u.buffers.rxData)) {
598 dmaSafe = false;
599 break;
601 if ((checkSegment->u.buffers.txData) &&
602 // CCM can't be accessed by DMA1/2 on the G4
603 IS_CCM(checkSegment->u.buffers.txData)) {
604 dmaSafe = false;
605 break;
607 #endif
608 // Note that these counts are only valid if dmaSafe is true
609 segmentCount++;
610 xferLen += checkSegment->len;
613 // Use DMA if possible
614 // If there are more than one segments, or a single segment with negateCS negated in the list terminator then force DMA irrespective of length
615 if (bus->useDMA && dmaSafe && ((segmentCount > 1) ||
616 (xferLen >= SPI_DMA_THRESHOLD) ||
617 !bus->curSegment[segmentCount].negateCS)) {
618 // Intialise the init structures for the first transfer
619 spiInternalInitStream(dev, false);
621 // Assert Chip Select
622 IOLo(dev->busType_u.spi.csnPin);
624 // Start the transfers
625 spiInternalStartDMA(dev);
626 } else {
627 busSegment_t *lastSegment = NULL;
628 bool segmentComplete;
630 // Manually work through the segment list performing a transfer for each
631 while (bus->curSegment->len) {
632 if (!lastSegment || lastSegment->negateCS) {
633 // Assert Chip Select if necessary - it's costly so only do so if necessary
634 IOLo(dev->busType_u.spi.csnPin);
637 spiInternalReadWriteBufPolled(
638 bus->busType_u.spi.instance,
639 bus->curSegment->u.buffers.txData,
640 bus->curSegment->u.buffers.rxData,
641 bus->curSegment->len);
643 if (bus->curSegment->negateCS) {
644 // Negate Chip Select
645 IOHi(dev->busType_u.spi.csnPin);
648 segmentComplete = true;
649 if (bus->curSegment->callback) {
650 switch(bus->curSegment->callback(dev->callbackArg)) {
651 case BUS_BUSY:
652 // Repeat the last DMA segment
653 segmentComplete = false;
654 break;
656 case BUS_ABORT:
657 bus->curSegment = (busSegment_t *)BUS_SPI_FREE;
658 segmentComplete = false;
659 return;
661 case BUS_READY:
662 default:
663 // Advance to the next DMA segment
664 break;
667 if (segmentComplete) {
668 lastSegment = (busSegment_t *)bus->curSegment;
669 bus->curSegment++;
673 // If a following transaction has been linked, start it
674 if (bus->curSegment->u.link.dev) {
675 busSegment_t *endSegment = (busSegment_t *)bus->curSegment;
676 const extDevice_t *nextDev = endSegment->u.link.dev;
677 busSegment_t *nextSegments = (busSegment_t *)endSegment->u.link.segments;
678 bus->curSegment = nextSegments;
679 endSegment->u.link.dev = NULL;
680 endSegment->u.link.segments = NULL;
681 spiSequenceStart(nextDev);
682 } else {
683 // The end of the segment list has been reached, so mark transactions as complete
684 bus->curSegment = (busSegment_t *)BUS_SPI_FREE;
688 #endif