Merge pull request #11198 from SteveCEvans/sce_rc2
[betaflight.git] / src / main / drivers / bus_spi_ll.c
blob5228b90ae96221451720d3fc9afbe401a1fed2f2
1 /*
2 * This file is part of Cleanflight and Betaflight.
4 * Cleanflight and Betaflight are free software. You can redistribute
5 * this software and/or modify this software under the terms of the
6 * GNU General Public License as published by the Free Software
7 * Foundation, either version 3 of the License, or (at your option)
8 * any later version.
10 * Cleanflight and Betaflight are distributed in the hope that they
11 * will be useful, but WITHOUT ANY WARRANTY; without even the implied
12 * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
13 * See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this software.
18 * If not, see <http://www.gnu.org/licenses/>.
21 #include <stdbool.h>
22 #include <stdint.h>
23 #include <string.h>
25 #include "platform.h"
27 #if defined(USE_SPI)
29 #include "common/utils.h"
30 #include "common/maths.h"
32 #include "drivers/bus.h"
33 #include "drivers/bus_spi.h"
34 #include "drivers/bus_spi_impl.h"
35 #include "drivers/dma.h"
36 #include "drivers/io.h"
37 #include "drivers/rcc.h"
39 #ifndef SPI2_SCK_PIN
40 #define SPI2_NSS_PIN PB12
41 #define SPI2_SCK_PIN PB13
42 #define SPI2_MISO_PIN PB14
43 #define SPI2_MOSI_PIN PB15
44 #endif
46 #ifndef SPI3_SCK_PIN
47 #define SPI3_NSS_PIN PA15
48 #define SPI3_SCK_PIN PB3
49 #define SPI3_MISO_PIN PB4
50 #define SPI3_MOSI_PIN PB5
51 #endif
53 #ifndef SPI4_SCK_PIN
54 #define SPI4_NSS_PIN PA15
55 #define SPI4_SCK_PIN PB3
56 #define SPI4_MISO_PIN PB4
57 #define SPI4_MOSI_PIN PB5
58 #endif
60 #ifndef SPI1_NSS_PIN
61 #define SPI1_NSS_PIN NONE
62 #endif
63 #ifndef SPI2_NSS_PIN
64 #define SPI2_NSS_PIN NONE
65 #endif
66 #ifndef SPI3_NSS_PIN
67 #define SPI3_NSS_PIN NONE
68 #endif
69 #ifndef SPI4_NSS_PIN
70 #define SPI4_NSS_PIN NONE
71 #endif
73 #define SPI_DEFAULT_TIMEOUT 10
75 #ifdef STM32H7
76 #define IS_DTCM(p) (((uint32_t)p & 0xfffe0000) == 0x20000000)
77 #elif defined(STM32F7)
78 #define IS_DTCM(p) (((uint32_t)p & 0xffff0000) == 0x20000000)
79 #elif defined(STM32G4)
80 #define IS_CCM(p) ((((uint32_t)p & 0xffff8000) == 0x10000000) || (((uint32_t)p & 0xffff8000) == 0x20018000))
81 #endif
82 static LL_SPI_InitTypeDef defaultInit =
84 .TransferDirection = LL_SPI_FULL_DUPLEX,
85 .Mode = LL_SPI_MODE_MASTER,
86 .DataWidth = LL_SPI_DATAWIDTH_8BIT,
87 .NSS = LL_SPI_NSS_SOFT,
88 .BaudRate = LL_SPI_BAUDRATEPRESCALER_DIV8,
89 .BitOrder = LL_SPI_MSB_FIRST,
90 .CRCCalculation = LL_SPI_CRCCALCULATION_DISABLE,
91 .ClockPolarity = LL_SPI_POLARITY_HIGH,
92 .ClockPhase = LL_SPI_PHASE_2EDGE,
95 static uint32_t spiDivisorToBRbits(SPI_TypeDef *instance, uint16_t divisor)
97 #if !defined(STM32H7)
98 // SPI2 and SPI3 are on APB1/AHB1 which PCLK is half that of APB2/AHB2.
100 if (instance == SPI2 || instance == SPI3) {
101 divisor /= 2; // Safe for divisor == 0 or 1
103 #else
104 UNUSED(instance);
105 #endif
107 divisor = constrain(divisor, 2, 256);
109 #if defined(STM32H7)
110 const uint32_t baudRatePrescaler[8] = {
111 LL_SPI_BAUDRATEPRESCALER_DIV2,
112 LL_SPI_BAUDRATEPRESCALER_DIV4,
113 LL_SPI_BAUDRATEPRESCALER_DIV8,
114 LL_SPI_BAUDRATEPRESCALER_DIV16,
115 LL_SPI_BAUDRATEPRESCALER_DIV32,
116 LL_SPI_BAUDRATEPRESCALER_DIV64,
117 LL_SPI_BAUDRATEPRESCALER_DIV128,
118 LL_SPI_BAUDRATEPRESCALER_DIV256,
120 int prescalerIndex = ffs(divisor) - 2; // prescaler begins at "/2"
122 return baudRatePrescaler[prescalerIndex];
123 #else
124 return (ffs(divisor) - 2) << SPI_CR1_BR_Pos;
125 #endif
128 void spiInitDevice(SPIDevice device)
130 spiDevice_t *spi = &spiDevice[device];
132 if (!spi->dev) {
133 return;
136 // Enable SPI clock
137 RCC_ClockCmd(spi->rcc, ENABLE);
138 RCC_ResetCmd(spi->rcc, ENABLE);
140 IOInit(IOGetByTag(spi->sck), OWNER_SPI_SCK, RESOURCE_INDEX(device));
141 IOInit(IOGetByTag(spi->miso), OWNER_SPI_MISO, RESOURCE_INDEX(device));
142 IOInit(IOGetByTag(spi->mosi), OWNER_SPI_MOSI, RESOURCE_INDEX(device));
144 IOConfigGPIOAF(IOGetByTag(spi->miso), SPI_IO_AF_MISO_CFG, spi->misoAF);
145 IOConfigGPIOAF(IOGetByTag(spi->mosi), SPI_IO_AF_CFG, spi->mosiAF);
146 IOConfigGPIOAF(IOGetByTag(spi->sck), SPI_IO_AF_SCK_CFG_HIGH, spi->sckAF);
148 LL_SPI_Disable(spi->dev);
149 LL_SPI_DeInit(spi->dev);
151 #if defined(STM32H7)
152 // Prevent glitching when SPI is disabled
153 LL_SPI_EnableGPIOControl(spi->dev);
155 LL_SPI_SetFIFOThreshold(spi->dev, LL_SPI_FIFO_TH_01DATA);
156 LL_SPI_Init(spi->dev, &defaultInit);
157 #else
158 LL_SPI_SetRxFIFOThreshold(spi->dev, SPI_RXFIFO_THRESHOLD_QF);
160 LL_SPI_Init(spi->dev, &defaultInit);
161 LL_SPI_Enable(spi->dev);
162 #endif
165 void spiInternalResetDescriptors(busDevice_t *bus)
167 LL_DMA_InitTypeDef *initTx = bus->initTx;
169 LL_DMA_StructInit(initTx);
170 #if defined(STM32G4) || defined(STM32H7)
171 initTx->PeriphRequest = bus->dmaTx->channel;
172 #else
173 initTx->Channel = bus->dmaTx->channel;
174 #endif
175 initTx->Mode = LL_DMA_MODE_NORMAL;
176 initTx->Direction = LL_DMA_DIRECTION_MEMORY_TO_PERIPH;
177 #if defined(STM32H7)
178 initTx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->TXDR;
179 #else
180 initTx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->DR;
181 #endif
182 initTx->Priority = LL_DMA_PRIORITY_LOW;
183 initTx->PeriphOrM2MSrcIncMode = LL_DMA_PERIPH_NOINCREMENT;
184 initTx->PeriphOrM2MSrcDataSize = LL_DMA_PDATAALIGN_BYTE;
185 initTx->MemoryOrM2MDstDataSize = LL_DMA_MDATAALIGN_BYTE;
187 if (bus->dmaRx) {
188 LL_DMA_InitTypeDef *initRx = bus->initRx;
190 LL_DMA_StructInit(initRx);
191 #if defined(STM32G4) || defined(STM32H7)
192 initRx->PeriphRequest = bus->dmaRx->channel;
193 #else
194 initRx->Channel = bus->dmaRx->channel;
195 #endif
196 initRx->Mode = LL_DMA_MODE_NORMAL;
197 initRx->Direction = LL_DMA_DIRECTION_PERIPH_TO_MEMORY;
198 #if defined(STM32H7)
199 initRx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->RXDR;
200 #else
201 initRx->PeriphOrM2MSrcAddress = (uint32_t)&bus->busType_u.spi.instance->DR;
202 #endif
203 initRx->Priority = LL_DMA_PRIORITY_LOW;
204 initRx->PeriphOrM2MSrcIncMode = LL_DMA_PERIPH_NOINCREMENT;
205 initRx->PeriphOrM2MSrcDataSize = LL_DMA_PDATAALIGN_BYTE;
209 void spiInternalResetStream(dmaChannelDescriptor_t *descriptor)
211 // Disable the stream
212 #if defined(STM32G4)
213 LL_DMA_DisableChannel(descriptor->dma, descriptor->stream);
214 while (LL_DMA_IsEnabledChannel(descriptor->dma, descriptor->stream));
215 #else
216 LL_DMA_DisableStream(descriptor->dma, descriptor->stream);
217 while (LL_DMA_IsEnabledStream(descriptor->dma, descriptor->stream));
218 #endif
220 // Clear any pending interrupt flags
221 DMA_CLEAR_FLAG(descriptor, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
225 static bool spiInternalReadWriteBufPolled(SPI_TypeDef *instance, const uint8_t *txData, uint8_t *rxData, int len)
227 #if defined(STM32H7)
228 int txLen = len;
229 int rxLen = len;
231 LL_SPI_SetTransferSize(instance, txLen);
232 LL_SPI_Enable(instance);
233 LL_SPI_StartMasterTransfer(instance);
234 while (txLen || rxLen) {
235 if (txLen && LL_SPI_IsActiveFlag_TXP(instance)) {
236 uint8_t b = txData ? *(txData++) : 0xFF;
237 LL_SPI_TransmitData8(instance, b);
238 txLen--;
241 if (rxLen && LL_SPI_IsActiveFlag_RXP(instance)) {
242 uint8_t b = LL_SPI_ReceiveData8(instance);
243 if (rxData) {
244 *(rxData++) = b;
246 rxLen--;
249 while (!LL_SPI_IsActiveFlag_EOT(instance));
250 LL_SPI_ClearFlag_TXTF(instance);
251 LL_SPI_Disable(instance);
252 #else
253 // set 16-bit transfer
254 CLEAR_BIT(instance->CR2, SPI_RXFIFO_THRESHOLD);
255 while (len > 1) {
256 while (!LL_SPI_IsActiveFlag_TXE(instance));
257 uint16_t w;
258 if (txData) {
259 w = *((uint16_t *)txData);
260 txData += 2;
261 } else {
262 w = 0xFFFF;
264 LL_SPI_TransmitData16(instance, w);
266 while (!LL_SPI_IsActiveFlag_RXNE(instance));
267 w = LL_SPI_ReceiveData16(instance);
268 if (rxData) {
269 *((uint16_t *)rxData) = w;
270 rxData += 2;
272 len -= 2;
274 // set 8-bit transfer
275 SET_BIT(instance->CR2, SPI_RXFIFO_THRESHOLD);
276 if (len) {
277 while (!LL_SPI_IsActiveFlag_TXE(instance));
278 uint8_t b = txData ? *(txData++) : 0xFF;
279 LL_SPI_TransmitData8(instance, b);
281 while (!LL_SPI_IsActiveFlag_RXNE(instance));
282 b = LL_SPI_ReceiveData8(instance);
283 if (rxData) {
284 *(rxData++) = b;
286 --len;
288 #endif
290 return true;
293 void spiInternalInitStream(const extDevice_t *dev, bool preInit)
295 STATIC_DMA_DATA_AUTO uint8_t dummyTxByte = 0xff;
296 STATIC_DMA_DATA_AUTO uint8_t dummyRxByte;
297 busDevice_t *bus = dev->bus;
299 busSegment_t *segment = bus->curSegment;
301 if (preInit) {
302 // Prepare the init structure for the next segment to reduce inter-segment interval
303 segment++;
304 if(segment->len == 0) {
305 // There's no following segment
306 return;
310 int len = segment->len;
312 uint8_t *txData = segment->u.buffers.txData;
313 LL_DMA_InitTypeDef *initTx = bus->initTx;
315 if (txData) {
316 #ifdef __DCACHE_PRESENT
317 #ifdef STM32H7
318 if ((txData < &_dmaram_start__) || (txData >= &_dmaram_end__)) {
319 #else
320 // No need to flush DTCM memory
321 if (!IS_DTCM(txData)) {
322 #endif
323 // Flush the D cache to ensure the data to be written is in main memory
324 SCB_CleanDCache_by_Addr(
325 (uint32_t *)((uint32_t)txData & ~CACHE_LINE_MASK),
326 (((uint32_t)txData & CACHE_LINE_MASK) + len - 1 + CACHE_LINE_SIZE) & ~CACHE_LINE_MASK);
328 #endif // __DCACHE_PRESENT
329 initTx->MemoryOrM2MDstAddress = (uint32_t)txData;
330 initTx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_INCREMENT;
331 } else {
332 initTx->MemoryOrM2MDstAddress = (uint32_t)&dummyTxByte;
333 initTx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_NOINCREMENT;
335 initTx->NbData = len;
337 #if !defined(STM32G4) && !defined(STM32H7)
338 if (dev->bus->dmaRx) {
339 #endif
340 uint8_t *rxData = segment->u.buffers.rxData;
341 LL_DMA_InitTypeDef *initRx = bus->initRx;
343 if (rxData) {
344 /* Flush the D cache for the start and end of the receive buffer as
345 * the cache will be invalidated after the transfer and any valid data
346 * just before/after must be in memory at that point
348 #ifdef __DCACHE_PRESENT
349 // No need to flush/invalidate DTCM memory
350 #ifdef STM32H7
351 if ((rxData < &_dmaram_start__) || (rxData >= &_dmaram_end__)) {
352 #else
353 // No need to flush DTCM memory
354 if (!IS_DTCM(rxData)) {
355 #endif
356 SCB_CleanInvalidateDCache_by_Addr(
357 (uint32_t *)((uint32_t)rxData & ~CACHE_LINE_MASK),
358 (((uint32_t)rxData & CACHE_LINE_MASK) + len - 1 + CACHE_LINE_SIZE) & ~CACHE_LINE_MASK);
360 #endif // __DCACHE_PRESENT
361 initRx->MemoryOrM2MDstAddress = (uint32_t)rxData;
362 initRx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_INCREMENT;
363 } else {
364 initRx->MemoryOrM2MDstAddress = (uint32_t)&dummyRxByte;
365 initRx->MemoryOrM2MDstIncMode = LL_DMA_MEMORY_NOINCREMENT;
367 initRx->NbData = len;
368 #if !defined(STM32G4) && !defined(STM32H7)
370 #endif
373 void spiInternalStartDMA(const extDevice_t *dev)
375 busDevice_t *bus = dev->bus;
377 // Assert Chip Select
378 IOLo(dev->busType_u.spi.csnPin);
380 dmaChannelDescriptor_t *dmaTx = bus->dmaTx;
381 dmaChannelDescriptor_t *dmaRx = bus->dmaRx;
383 #if !defined(STM32G4) && !defined(STM32H7)
384 if (dmaRx) {
385 #endif
386 // Use the correct callback argument
387 dmaRx->userParam = (uint32_t)dev;
389 // Clear transfer flags
390 DMA_CLEAR_FLAG(dmaTx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
391 DMA_CLEAR_FLAG(dmaRx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
393 #ifdef STM32G4
394 // Disable channels to enable update
395 LL_DMA_DisableChannel(dmaTx->dma, dmaTx->stream);
396 LL_DMA_DisableChannel(dmaRx->dma, dmaRx->stream);
398 /* Use the Rx interrupt as this occurs once the SPI operation is complete whereas the Tx interrupt
399 * occurs earlier when the Tx FIFO is empty, but the SPI operation is still in progress
401 LL_DMA_EnableIT_TC(dmaRx->dma, dmaRx->stream);
403 // Update channels
404 LL_DMA_Init(dmaTx->dma, dmaTx->stream, bus->initTx);
405 LL_DMA_Init(dmaRx->dma, dmaRx->stream, bus->initRx);
407 LL_SPI_EnableDMAReq_RX(dev->bus->busType_u.spi.instance);
409 // Enable channels
410 LL_DMA_EnableChannel(dmaTx->dma, dmaTx->stream);
411 LL_DMA_EnableChannel(dmaRx->dma, dmaRx->stream);
413 LL_SPI_EnableDMAReq_TX(dev->bus->busType_u.spi.instance);
414 #else
415 DMA_Stream_TypeDef *streamRegsTx = (DMA_Stream_TypeDef *)dmaTx->ref;
416 DMA_Stream_TypeDef *streamRegsRx = (DMA_Stream_TypeDef *)dmaRx->ref;
418 // Disable streams to enable update
419 LL_DMA_WriteReg(streamRegsTx, CR, 0U);
420 LL_DMA_WriteReg(streamRegsRx, CR, 0U);
422 /* Use the Rx interrupt as this occurs once the SPI operation is complete whereas the Tx interrupt
423 * occurs earlier when the Tx FIFO is empty, but the SPI operation is still in progress
425 LL_EX_DMA_EnableIT_TC(streamRegsRx);
427 // Update streams
428 LL_DMA_Init(dmaTx->dma, dmaTx->stream, bus->initTx);
429 LL_DMA_Init(dmaRx->dma, dmaRx->stream, bus->initRx);
431 /* Note from AN4031
433 * If the user enables the used peripheral before the corresponding DMA stream, a “FEIF”
434 * (FIFO Error Interrupt Flag) may be set due to the fact the DMA is not ready to provide
435 * the first required data to the peripheral (in case of memory-to-peripheral transfer).
438 // Enable the SPI DMA Tx & Rx requests
439 #if defined(STM32H7)
440 LL_SPI_SetTransferSize(dev->bus->busType_u.spi.instance, dev->bus->curSegment->len);
441 LL_DMA_EnableStream(dmaTx->dma, dmaTx->stream);
442 LL_DMA_EnableStream(dmaRx->dma, dmaRx->stream);
443 SET_BIT(dev->bus->busType_u.spi.instance->CFG1, SPI_CFG1_RXDMAEN | SPI_CFG1_TXDMAEN);
444 LL_SPI_Enable(dev->bus->busType_u.spi.instance);
445 LL_SPI_StartMasterTransfer(dev->bus->busType_u.spi.instance);
446 #else
447 // Enable streams
448 LL_DMA_EnableStream(dmaTx->dma, dmaTx->stream);
449 LL_DMA_EnableStream(dmaRx->dma, dmaRx->stream);
451 SET_BIT(dev->bus->busType_u.spi.instance->CR2, SPI_CR2_TXDMAEN | SPI_CR2_RXDMAEN);
452 #endif
453 #if !defined(STM32G4) && !defined(STM32H7)
454 } else {
455 DMA_Stream_TypeDef *streamRegsTx = (DMA_Stream_TypeDef *)dmaTx->ref;
457 // Use the correct callback argument
458 dmaTx->userParam = (uint32_t)dev;
460 // Clear transfer flags
461 DMA_CLEAR_FLAG(dmaTx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
463 // Disable streams to enable update
464 LL_DMA_WriteReg(streamRegsTx, CR, 0U);
466 LL_EX_DMA_EnableIT_TC(streamRegsTx);
468 // Update streams
469 LL_DMA_Init(dmaTx->dma, dmaTx->stream, bus->initTx);
471 /* Note from AN4031
473 * If the user enables the used peripheral before the corresponding DMA stream, a “FEIF”
474 * (FIFO Error Interrupt Flag) may be set due to the fact the DMA is not ready to provide
475 * the first required data to the peripheral (in case of memory-to-peripheral transfer).
478 // Enable the SPI DMA Tx request
479 // Enable streams
480 LL_DMA_EnableStream(dmaTx->dma, dmaTx->stream);
482 SET_BIT(dev->bus->busType_u.spi.instance->CR2, SPI_CR2_TXDMAEN);
484 #endif
485 #endif
488 void spiInternalStopDMA (const extDevice_t *dev)
490 busDevice_t *bus = dev->bus;
492 dmaChannelDescriptor_t *dmaTx = bus->dmaTx;
493 dmaChannelDescriptor_t *dmaRx = bus->dmaRx;
494 SPI_TypeDef *instance = bus->busType_u.spi.instance;
496 #if !defined(STM32G4) && !defined(STM32H7)
497 if (dmaRx) {
498 #endif
499 // Disable the DMA engine and SPI interface
500 #ifdef STM32G4
501 LL_DMA_DisableChannel(dmaTx->dma, dmaTx->stream);
502 LL_DMA_DisableChannel(dmaRx->dma, dmaRx->stream);
503 #else
504 LL_DMA_DisableStream(dmaRx->dma, dmaRx->stream);
505 LL_DMA_DisableStream(dmaTx->dma, dmaTx->stream);
506 #endif
508 // Clear transfer flags
509 DMA_CLEAR_FLAG(dmaRx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
511 LL_SPI_DisableDMAReq_TX(instance);
512 LL_SPI_DisableDMAReq_RX(instance);
513 #if defined(STM32H7)
514 LL_SPI_ClearFlag_TXTF(dev->bus->busType_u.spi.instance);
515 LL_SPI_Disable(dev->bus->busType_u.spi.instance);
516 #endif
517 #if !defined(STM32G4) && !defined(STM32H7)
518 } else {
519 SPI_TypeDef *instance = bus->busType_u.spi.instance;
521 // Ensure the current transmission is complete
522 while (LL_SPI_IsActiveFlag_BSY(instance));
524 // Drain the RX buffer
525 while (LL_SPI_IsActiveFlag_RXNE(instance)) {
526 instance->DR;
529 // Disable the DMA engine and SPI interface
530 LL_DMA_DisableStream(dmaTx->dma, dmaTx->stream);
532 DMA_CLEAR_FLAG(dmaTx, DMA_IT_HTIF | DMA_IT_TEIF | DMA_IT_TCIF);
534 LL_SPI_DisableDMAReq_TX(instance);
535 #endif
536 #if !defined(STM32G4) && !defined(STM32H7)
538 #endif
541 // DMA transfer setup and start
542 void spiSequenceStart(const extDevice_t *dev)
544 busDevice_t *bus = dev->bus;
545 SPI_TypeDef *instance = bus->busType_u.spi.instance;
546 spiDevice_t *spi = &spiDevice[spiDeviceByInstance(instance)];
547 bool dmaSafe = dev->useDMA;
548 uint32_t xferLen = 0;
549 uint32_t segmentCount = 0;
551 bus->initSegment = true;
553 // Switch bus speed
554 #if !defined(STM32H7)
555 LL_SPI_Disable(instance);
556 #endif
558 if (dev->busType_u.spi.speed != bus->busType_u.spi.speed) {
559 LL_SPI_SetBaudRatePrescaler(instance, spiDivisorToBRbits(instance, dev->busType_u.spi.speed));
560 bus->busType_u.spi.speed = dev->busType_u.spi.speed;
563 // Switch SPI clock polarity/phase if necessary
564 if (dev->busType_u.spi.leadingEdge != bus->busType_u.spi.leadingEdge) {
565 if (dev->busType_u.spi.leadingEdge) {
566 IOConfigGPIOAF(IOGetByTag(spi->sck), SPI_IO_AF_SCK_CFG_LOW, spi->sckAF);
567 LL_SPI_SetClockPhase(instance, LL_SPI_PHASE_1EDGE);
568 LL_SPI_SetClockPolarity(instance, LL_SPI_POLARITY_LOW);
570 else {
571 IOConfigGPIOAF(IOGetByTag(spi->sck), SPI_IO_AF_SCK_CFG_HIGH, spi->sckAF);
572 LL_SPI_SetClockPhase(instance, LL_SPI_PHASE_2EDGE);
573 LL_SPI_SetClockPolarity(instance, LL_SPI_POLARITY_HIGH);
576 bus->busType_u.spi.leadingEdge = dev->busType_u.spi.leadingEdge;
579 #if !defined(STM32H7)
580 LL_SPI_Enable(instance);
581 #endif
583 /* Where data is being read into a buffer which is cached, where the start or end of that
584 * buffer is not cache aligned, there is a risk of corruption of other data in that cache line.
585 * After the read is complete, the cache lines covering the structure will be invalidated to ensure
586 * that the processor sees the read data, not what was in cache previously. Unfortunately if
587 * there is any other data in the area covered by those cache lines, at the start or end of the
588 * buffer, it too will be invalidated, so had the processor written to those locations during the DMA
589 * operation those written values will be lost.
592 // Check that any reads are cache aligned and of multiple cache lines in length
593 for (busSegment_t *checkSegment = bus->curSegment; checkSegment->len; checkSegment++) {
594 // Check there is no receive data as only transmit DMA is available
595 if ((checkSegment->u.buffers.rxData) && (bus->dmaRx == (dmaChannelDescriptor_t *)NULL)) {
596 dmaSafe = false;
597 break;
599 #ifdef STM32H7
600 // Check if RX data can be DMAed
601 if ((checkSegment->u.buffers.rxData) &&
602 // DTCM can't be accessed by DMA1/2 on the H7
603 (IS_DTCM(checkSegment->u.buffers.rxData) ||
604 // Memory declared as DMA_RAM will have an address between &_dmaram_start__ and &_dmaram_end__
605 (((checkSegment->u.buffers.rxData < &_dmaram_start__) || (checkSegment->u.buffers.rxData >= &_dmaram_end__)) &&
606 (((uint32_t)checkSegment->u.buffers.rxData & (CACHE_LINE_SIZE - 1)) || (checkSegment->len & (CACHE_LINE_SIZE - 1)))))) {
607 dmaSafe = false;
608 break;
610 // Check if TX data can be DMAed
611 else if ((checkSegment->u.buffers.txData) && IS_DTCM(checkSegment->u.buffers.txData)) {
612 dmaSafe = false;
613 break;
615 #elif defined(STM32F7)
616 if ((checkSegment->u.buffers.rxData) &&
617 // DTCM is accessible and uncached on the F7
618 (!IS_DTCM(checkSegment->u.buffers.rxData) &&
619 (((uint32_t)checkSegment->u.buffers.rxData & (CACHE_LINE_SIZE - 1)) || (checkSegment->len & (CACHE_LINE_SIZE - 1))))) {
620 dmaSafe = false;
621 break;
623 #elif defined(STM32G4)
624 // Check if RX data can be DMAed
625 if ((checkSegment->u.buffers.rxData) &&
626 // CCM can't be accessed by DMA1/2 on the G4
627 IS_CCM(checkSegment->u.buffers.rxData)) {
628 dmaSafe = false;
629 break;
631 if ((checkSegment->u.buffers.txData) &&
632 // CCM can't be accessed by DMA1/2 on the G4
633 IS_CCM(checkSegment->u.buffers.txData)) {
634 dmaSafe = false;
635 break;
637 #endif
638 // Note that these counts are only valid if dmaSafe is true
639 segmentCount++;
640 xferLen += checkSegment->len;
643 // Use DMA if possible
644 if (bus->useDMA && dmaSafe && ((segmentCount > 1) || (xferLen > 8))) {
645 // Intialise the init structures for the first transfer
646 spiInternalInitStream(dev, false);
648 // Start the transfers
649 spiInternalStartDMA(dev);
650 } else {
651 // Manually work through the segment list performing a transfer for each
652 while (bus->curSegment->len) {
653 // Assert Chip Select
654 IOLo(dev->busType_u.spi.csnPin);
656 spiInternalReadWriteBufPolled(
657 bus->busType_u.spi.instance,
658 bus->curSegment->u.buffers.txData,
659 bus->curSegment->u.buffers.rxData,
660 bus->curSegment->len);
662 if (bus->curSegment->negateCS) {
663 // Negate Chip Select
664 IOHi(dev->busType_u.spi.csnPin);
667 if (bus->curSegment->callback) {
668 switch(bus->curSegment->callback(dev->callbackArg)) {
669 case BUS_BUSY:
670 // Repeat the last DMA segment
671 bus->curSegment--;
672 break;
674 case BUS_ABORT:
675 bus->curSegment = (busSegment_t *)BUS_SPI_FREE;
676 return;
678 case BUS_READY:
679 default:
680 // Advance to the next DMA segment
681 break;
684 bus->curSegment++;
687 // If a following transaction has been linked, start it
688 if (bus->curSegment->u.link.dev) {
689 const extDevice_t *nextDev = bus->curSegment->u.link.dev;
690 busSegment_t *nextSegments = bus->curSegment->u.link.segments;
691 busSegment_t *endSegment = bus->curSegment;
692 bus->curSegment = nextSegments;
693 endSegment->u.link.dev = NULL;
694 spiSequenceStart(nextDev);
695 } else {
696 // The end of the segment list has been reached, so mark transactions as complete
697 bus->curSegment = (busSegment_t *)BUS_SPI_FREE;
701 #endif