USART not working on STM32L475 after setup - stm32

I work with a demo board B-L475E-IOT01A. On CN3 connector - pin1 - PA1-RX, pin2 - PA0-TX. So I configure for UART4.
#define UART4_PORT GPIOA
#define UART4_TX_PIN LL_GPIO_PIN_0
#define UART4_RX_PIN LL_GPIO_PIN_1
int USART_Setup(USART_TypeDef *USARTx, uint32_t baud_rate)
{
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOA);
LL_APB2_GRP1_EnableClock(LL_APB2_GRP1_PERIPH_SYSCFG);
if (USARTx == UART4)
{
irq_num = UART4_IRQn;
/* Configure Tx Pin as : Alternate function, High Speed, Push pull, Pull up */
LL_GPIO_SetPinMode(UART4_PORT, UART4_TX_PIN, LL_GPIO_MODE_ALTERNATE);
LL_GPIO_SetAFPin_8_15(UART4_PORT, UART4_TX_PIN, LL_GPIO_AF_8);
LL_GPIO_SetPinSpeed(UART4_PORT, UART4_TX_PIN, LL_GPIO_SPEED_FREQ_HIGH);
LL_GPIO_SetPinOutputType(UART4_PORT, UART4_TX_PIN, LL_GPIO_OUTPUT_PUSHPULL);
LL_GPIO_SetPinPull(UART4_PORT, UART4_TX_PIN, LL_GPIO_PULL_UP);
/* Configure Rx Pin as : Alternate function, High Speed, Push pull, Pull up */
LL_GPIO_SetPinMode(UART4_PORT, UART4_RX_PIN, LL_GPIO_MODE_ALTERNATE);
LL_GPIO_SetAFPin_8_15(UART4_PORT, UART4_RX_PIN, LL_GPIO_AF_8);
LL_GPIO_SetPinSpeed(UART4_PORT, UART4_RX_PIN, LL_GPIO_SPEED_FREQ_HIGH);
LL_GPIO_SetPinOutputType(UART4_PORT, UART4_RX_PIN, LL_GPIO_OUTPUT_PUSHPULL);
LL_GPIO_SetPinPull(UART4_PORT, UART4_RX_PIN, LL_GPIO_PULL_UP);
LL_APB1_GRP1_EnableClock(LL_APB1_GRP1_PERIPH_UART4);
LL_RCC_SetUSARTClockSource(LL_RCC_UART4_CLKSOURCE_PCLK1);
}
NVIC_SetPriority(irq_num, 0);
NVIC_EnableIRQ(irq_num);
/* TX/RX direction */
LL_USART_SetTransferDirection(USARTx, LL_USART_DIRECTION_TX_RX);
/* 8 data bit, 1 start bit, 1 stop bit, no parity */
LL_USART_ConfigCharacter(USARTx, LL_USART_DATAWIDTH_8B, LL_USART_PARITY_NONE, LL_USART_STOPBITS_1);
LL_USART_SetBaudRate(USARTx, SystemCoreClock, LL_USART_OVERSAMPLING_16, baud_rate);
LL_USART_Enable(USARTx);
//enable RX buffer not empty interrupt
USARTx->CR1 |= USART_CR1_RXNEIE;
/* Polling USART initialisation */
while((!(LL_USART_IsActiveFlag_TEACK(USARTx))) || (!(LL_USART_IsActiveFlag_REACK(USARTx))))
{
timeout++;
if (timeout > 1000000)
{
return USART_ERROR;
}
}
return USART_OK;
}
I go line by line in the debugger and it seems to be OK. However when I connect to a terminal with TTL-USB convertor there is no transmission and no reception. Both LEDs TX/RX on TTL-USB convertor are on. TTL-USB convertor tested on other boards and works OK. Do I miss something?

first I enable port clocks
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOA);
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOB);
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOC);
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOD);
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOE);
LL_AHB2_GRP1_EnableClock(LL_AHB2_GRP1_PERIPH_GPIOH);
LL_APB2_GRP1_EnableClock(LL_APB2_GRP1_PERIPH_SYSCFG);
after that I see
GPIOA->AFRL - 0x00000000
GPIOA->AFRH - 0x00000000
Then after
LL_GPIO_SetAFPin_8_15(UART4_PORT, UART4_TX_PIN, LL_GPIO_AF_8);
LL_GPIO_SetAFPin_8_15(UART4_PORT, UART4_RX_PIN, LL_GPIO_AF_8);
it stays all zeroes.
GPIOA->AFRL - 0x00000000
GPIOA->AFRH - 0x00000000

Related

STM32 code not working, while loop, delay problems

Currently, I'm facing a weird problem with the STM32. I just generated code with the STM32Cube IDE for the chosen MCU (STM32L031G6). I nearly didn't change anything, except configuring one GPIO as output and trying to let a connected LED blink.
Now the problem:
If I run the code, nothing happens, no blink at all.
Stepping through the code, I can enable the LED once when the WritePin is called. Afterwards, just one step further, the LED is off again, although there is no further WritePin call executed. The LED never gets back on again.
What can be the problem with this code? There is nothing special about it? Did I miss something which is required for generated STM32 code?
For the following code I removed any unused line and comment.
#include "main.h"
void SystemClock_Config(void);
static void MX_GPIO_Init(void);
int main(void) {
HAL_Init();
SystemClock_Config();
MX_GPIO_Init();
while(1) {
//HAL_GPIO_TogglePin(LED_GPIO_Port, LED_Pin);
HAL_GPIO_WritePin(LED_GPIO_Port, LED_Pin, GPIO_PIN_SET);
HAL_Delay(1000);
HAL_GPIO_WritePin(LED_GPIO_Port, LED_Pin, GPIO_PIN_RESET);
HAL_Delay(1000);
}
}
void SystemClock_Config(void) {
RCC_OscInitTypeDef RCC_OscInitStruct = {0};
RCC_ClkInitTypeDef RCC_ClkInitStruct = {0};
__HAL_PWR_VOLTAGESCALING_CONFIG(PWR_REGULATOR_VOLTAGE_SCALE1);
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_MSI;
RCC_OscInitStruct.MSIState = RCC_MSI_ON;
RCC_OscInitStruct.MSICalibrationValue = 0;
RCC_OscInitStruct.MSIClockRange = RCC_MSIRANGE_6;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK) {
Error_Handler();
}
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK
|RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_MSI;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_0) != HAL_OK) {
Error_Handler();
}
}
static void MX_GPIO_Init(void) {
GPIO_InitTypeDef GPIO_InitStruct = {0};
__HAL_RCC_GPIOA_CLK_ENABLE();
HAL_GPIO_WritePin(LED_GPIO_Port, LED_Pin, GPIO_PIN_RESET);
GPIO_InitStruct.Pin = LED_Pin;
GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_LOW;
HAL_GPIO_Init(LED_GPIO_Port, &GPIO_InitStruct);
}
void Error_Handler(void) {
__disable_irq();
while (1) {}
}
Update 1:
As seen in the comments the HAL_Delay is not working properly. But how to fix it? And why the code does not let the led flicker when the HAL_Delay is removed?
Update 2:
It is also not possible to use the loop the following way, but the led is not turned on in any way.
while (1) {
HAL_GPIO_TogglePin(LED_GPIO_Port, LED_Pin);
}
See the following images for the configuration.
Update 3:
When executing the code on the STM32L031G6, the debugger stops pretty soon. Stepping through the code works (sometimes). Here is the debug log when clicking "Run" in the STM32Cube IDE.
SEGGER J-Link GDB Server V7.58 Command Line Version
JLinkARM.dll V7.58 (DLL compiled Nov 4 2021 16:23:13)
Command line: -port 2331 -s -device STM32L031G6 -endian little -speed 4000 -if swd -vd
-----GDB Server start settings-----
GDBInit file: none
GDB Server Listening port: 2331
SWO raw output listening port: 2332
Terminal I/O port: 2333
Accept remote connection: localhost only
Generate logfile: off
Verify download: on
Init regs on start: off
Silent mode: off
Single run mode: on
Target connection timeout: 0 ms
------J-Link related settings------
J-Link Host interface: USB
J-Link script: none
J-Link settings file: none
------Target related settings------
Target device: STM32L031G6
Target interface: SWD
Target interface speed: 4000kHz
Target endian: little
Connecting to J-Link...
J-Link is connected.
Firmware: J-Link V11 compiled Dec 9 2021 14:14:49
Hardware: V11.00
S/N: 261014681
OEM: SEGGER-EDU
Feature(s): FlashBP, GDB
Checking target voltage...
Target voltage: 3.34 V
Listening on TCP/IP port 2331
Connecting to target...
Connected to target
Waiting for GDB connection...Connected to 127.0.0.1
GDB closed TCP/IP connection (Socket 1132)
Connected to 127.0.0.1
Reading all registers
Read 4 bytes # address 0x1FF000FC (Data = 0x89B8D002)
Read 2 bytes # address 0x1FF000FC (Data = 0xD002)
Received monitor command: WriteDP 0x2 0xF0
O.K.
Received monitor command: ReadAP 0x2
O.K.:0xF0000003
Read 4 bytes # address 0x1FF000E4 (Data = 0x05408A28)
Read 2 bytes # address 0x1FF000E4 (Data = 0x8A28)
Read 4 bytes # address 0x1FF000E4 (Data = 0x05408A28)
Read 2 bytes # address 0x1FF000E4 (Data = 0x8A28)
Reading 32 bytes # address 0xF0000FD0
Connected to 127.0.0.1
Reading all registers
Read 4 bytes # address 0x1FF000FC (Data = 0x89B8D002)
Read 2 bytes # address 0x1FF000FC (Data = 0xD002)
Received monitor command: reset
Resetting target
Downloading 192 bytes # address 0x08000000 - Verified OK
Downloading 6072 bytes # address 0x080000C0 - Verified OK
Downloading 28 bytes # address 0x08001878 - Verified OK
Downloading 8 bytes # address 0x08001894 - Verified OK
Downloading 4 bytes # address 0x0800189C - Verified OK
Downloading 4 bytes # address 0x080018A0 - Verified OK
Downloading 12 bytes # address 0x080018A4 - Verified OK
Writing register (PC = 0x 80006d0)
Starting target CPU...
GDB closed TCP/IP connection (Socket 1128)
Debugger requested to halt target...
...Target halted (PC = 0x1FF000E4)
Reading all registers
Read 4 bytes # address 0x1FF000E4 (Data = 0x05408A28)
Read 2 bytes # address 0x1FF000E4 (Data = 0x8A28)
GDB closed TCP/IP connection (Socket 1152)
Restoring target state and closing J-Link connection...
Shutting down...
On the other hand, the same code works on a STM32L031K6 of the nucleo board with the ST link disconnected.
*Update 4:
Since I'm using a custom board, there may be a flaw in the schematics. I don't see any issues with the circuit, but maybe, you see some. There is no crystal since it shouldn't be required regarding the datasheet. There are internal oszillators available.
The TOUCH net is just a circuit which connects GND to the pin if a button is pressed.
This is the circuit of the STM32L031G6U6.
This is the circuit of the LEDs that should be controlled. In the previous code I just try to control the LED with the net label STATUS_LED. Since I got the LED to blink while stepping through the code, the Mosfet circuit should work.
I'm currently very confused why I got so many problems. I tried a second and third PCB of the same circuit, but the problems are the same.
I figured out, that I cannot use any clock configurations although they are offered by the STM32Cube IDE. Using the MSI just doesn't work for some frequencies. The code will stall in the SystemClock_Config setting the oscillator or the clock.
Why the HAL_Delay sometimes doesn't work and sometimes it does?
Why the system doesn't start at all when trying to run the code (even with everything disconnected and just the power supply reconnecting)?
Why stepping through the code does work but running the code does not?
Problem solved. The Altium package I downloaded was for the wrong package of the STM32L031. It ia for the STM32L031G6U6S and not the STM32L031G6U6.

Transmitting 255 bytes of data using stm32 i2c dma lower level driver example

I am currently using STM32L0538 Discovery board. In my project i have to use lower level drivers to interface i2c with slave device (ST25DV) using DMA.
I ported LL example to STM32L0538 DISCO board by referring the LL example project available for NUCLEO-L073RZ in the firmware repo (STM32Cube_FW_L0_V1.12.1).
The issue with example is i am only able to transmit 4-bytes of data (slave addr. + 3bytes of 8bit data), afterwards i2c generates stop condition although the number of data to be transmitted is more than 4 bytes both in DMA and I2C register. I think the issue is with DMA, as it accepts uint32_t type source memory addr. but my data is of uint8_t type. I have tried typecasting as shown in the demo LL example but it doesn't work.
**Can anyone please tell me how can i transmit more than just 4bytes of data or where i am going wrong. ** Thanks in advance.
Here is the sample code ported from STM32L0 Firmware repo which only send 4 bytes of data:
uint8_t aLedOn[5] = {0x12,0x34,0x56,0x77,88};
__IO uint8_t ubNbDataToTransmit = sizeof(aLedOn);
uint8_t* pTransmitBuffer = (uint8_t*)aLedOn;
__IO uint8_t ubTransferComplete = 0;
#define SLAVE_OWN_ADDRESS 0xAE
int main(void)
{
/* Reset of all peripherals, Initializes the Flash interface and the Systick. */
LL_APB2_GRP1_EnableClock(LL_APB2_GRP1_PERIPH_SYSCFG);
LL_APB1_GRP1_EnableClock(LL_APB1_GRP1_PERIPH_PWR);
/* Configure the system clock */
SystemClock_Config();
MX_GPIO_Init();
/* USER CODE BEGIN 2 */
Configure_DMA();
Configure_I2C_Master();
LL_mDelay(1000);
Handle_I2C_Master();
/* USER CODE END 2 */
while (1)
{
/* USER CODE END WHILE */
/* USER CODE BEGIN 3 */
}
}
void Configure_DMA(void)
{
LL_AHB1_GRP1_EnableClock(LL_AHB1_GRP1_PERIPH_DMA1);
NVIC_SetPriority(DMA1_Channel4_5_6_7_IRQn, 0);
NVIC_EnableIRQ(DMA1_Channel4_5_6_7_IRQn);
LL_DMA_ConfigTransfer(DMA1, LL_DMA_CHANNEL_4, LL_DMA_DIRECTION_MEMORY_TO_PERIPH | \
LL_DMA_PRIORITY_HIGH | \
LL_DMA_MODE_NORMAL | \
LL_DMA_PERIPH_NOINCREMENT | \
LL_DMA_MEMORY_INCREMENT | \
LL_DMA_PDATAALIGN_BYTE | \
LL_DMA_MDATAALIGN_BYTE);
LL_DMA_SetDataLength(DMA1, LL_DMA_CHANNEL_4, ubNbDataToTransmit);
LL_DMA_ConfigAddresses(DMA1, LL_DMA_CHANNEL_4, (uint32_t)pTransmitBuffer, (uint32_t)LL_I2C_DMA_GetRegAddr(I2C2, LL_I2C_DMA_REG_DATA_TRANSMIT), LL_DMA_GetDataTransferDirection(DMA1, LL_DMA_CHANNEL_4));
LL_DMA_SetPeriphRequest(DMA1, LL_DMA_CHANNEL_4, LL_DMA_REQUEST_7);
LL_DMA_EnableIT_TC(DMA1, LL_DMA_CHANNEL_4);
LL_DMA_EnableIT_TE(DMA1, LL_DMA_CHANNEL_4);
}
void Configure_I2C_Master(void)
{
LL_I2C_InitTypeDef I2C_InitStruct = {0};
LL_GPIO_InitTypeDef GPIO_InitStruct = {0};
/* Enable the peripheral clock of GPIOC */
LL_IOP_GRP1_EnableClock(LL_IOP_GRP1_PERIPH_GPIOB);
GPIO_InitStruct.Pin = LL_GPIO_PIN_13;
GPIO_InitStruct.Mode = LL_GPIO_MODE_ALTERNATE;
GPIO_InitStruct.Speed = LL_GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.OutputType = LL_GPIO_OUTPUT_OPENDRAIN;
GPIO_InitStruct.Pull = LL_GPIO_PULL_UP;
GPIO_InitStruct.Alternate = LL_GPIO_AF_5;
LL_GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.Pin = LL_GPIO_PIN_14;
GPIO_InitStruct.Mode = LL_GPIO_MODE_ALTERNATE;
GPIO_InitStruct.Speed = LL_GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.OutputType = LL_GPIO_OUTPUT_OPENDRAIN;
GPIO_InitStruct.Pull = LL_GPIO_PULL_UP;
GPIO_InitStruct.Alternate = LL_GPIO_AF_5;
LL_GPIO_Init(GPIOB, &GPIO_InitStruct);
LL_APB1_GRP1_EnableClock(LL_APB1_GRP1_PERIPH_I2C2);
LL_I2C_SetTiming(I2C2, 0x00100E16);
LL_I2C_SetOwnAddress1(I2C2, 0x00, LL_I2C_OWNADDRESS1_7BIT);
LL_I2C_DisableOwnAddress1(I2C2);
LL_I2C_EnableClockStretching(I2C2);
LL_I2C_SetDigitalFilter(I2C2, 0x00);
LL_I2C_EnableAnalogFilter(I2C2);
LL_I2C_EnableGeneralCall(I2C2);
LL_I2C_SetOwnAddress2(I2C2, 0x00, LL_I2C_OWNADDRESS2_NOMASK);
LL_I2C_DisableOwnAddress2(I2C2);
LL_I2C_SetMasterAddressingMode(I2C2, LL_I2C_ADDRESSING_MODE_7BIT);
LL_I2C_SetMode(I2C2, LL_I2C_MODE_I2C);
// (4) Enable DMA transmission requests a
LL_I2C_EnableDMAReq_TX(I2C2);
LL_I2C_Enable(I2C2);
}
void Handle_I2C_Master(void)
{
ubTransferComplete = 0;
LL_DMA_EnableChannel(DMA1, LL_DMA_CHANNEL_4);
LL_I2C_HandleTransfer(I2C2, SLAVE_OWN_ADDRESS, LL_I2C_ADDRSLAVE_7BIT, ubNbDataToTransmit, LL_I2C_MODE_AUTOEND, LL_I2C_GENERATE_START_WRITE);
/* Loop until DMA transfer complete event */
while(!ubTransferComplete)
{
}
/* Loop until STOP flag is raised */
while(!LL_I2C_IsActiveFlag_STOP(I2C2))
{
}
LL_I2C_ClearFlag_STOP(I2C2);
}

Communucation between 2 stm32

I am trying to communicate 2 STM32 with I2C.My configuration is as followed:
7-bit addressing mode (no dual address, only OAR1)
100khz speed
ACK enabled (on slave)
ACK disabled (on master, since only 1 byte is transferred between master/slave at any time)
on both master/slave, using GPIOB (PB6) as SCL as AF and GPIOB (PB7) as SDA as AF.Where is the problem?
Master code:
#include "stm32f10x.h" // Device header #include "delay.h"
void pinConfig(void);
void i2c_Master_Config(void);
void sendData(uint8_t data);
int main() {
delay_init();
pinConfig();
i2c_Master_Config();
while(1)
{
uint8_t butonState=GPIOA->IDR & 0x00001000;
sendData(0x68,butonState);
delay_ms(10);
}
}
void pinConfig() {
RCC->APB1ENR |=1<<21;//Enable I2C 1 clock
RCC->APB2ENR |=1<<2;//Enable GPIOA clock
RCC->APB2ENR |=1<<3;//Enable GPIOB clock
RCC->APB2ENR |=1<<0;//Enable AFIO clock
GPIOA->CRL |= 0x00008000; //PA3 button pull-down
GPIOB->CRL = 0xFF000000; //SCL and SDA AF Open Drain SCL => PB6 SDA =>PB7
}
void i2c_Master_Config() {
I2C1->CR2 |=1<<5; //36 Mhz peripheral clock.
I2C1->CR2 |=1<<2; //36 Mhz peripheral clock.
I2C1->CCR =0x28;//100 khz clock
I2C1->TRISE =9;//1/8MHZ= 125 ns => 1000ns/125ns =8 => 8+1 =9
I2C1->CR1 |=(1<<0);//Peripheral enable..
}
void sendData(uint8_t data) {
volatile int temp;
while(I2C1->SR2 &(1<<1));//BUSY bit.
I2C1->CR1 |=1<<8;//START bit.
while(!(I2C1->SR1 & (1<<0))); //wait until start flag is set
I2C1->DR = slaveAdres<<1;//7 bit adress.
while(!(I2C1->SR1 &(1<<1)));//wait until addr flag is set
gecici=I2C1->SR2;//clear addr flag.
I2C1->DR = data;
while (!(I2C1->SR1 & (1<<7))){} //wait until txe is set
while (!(I2C1->SR1 & (1<<2)));//BTF(Byte transfer finished)=1 .
I2C1->CR1 |= 1<<9;//STOP bit.
I2C1->CR1 &=~(1<<0);//Peripheral disable.
}
Slave code:
#include "stm32f10x.h" // Device header
void pinConfig(void);
void i2c_Slave_Config(void);
uint8_t readData(void);
uint8_t data;
int main()
{
pinConfig();
i2c_Slave_Config();
while(1)
{
data=readData();
if(data==0)
GPIOB->BSRR |=1<<3;
else if(data==1)
GPIOB->BRR |=1<<3;
}
}
void pinConfig()
{
RCC->APB1ENR |=1<<21;//I2C 1 Clock Aktif.
RCC->APB2ENR |=1<<2;//Enable GPIOA clock
RCC->APB2ENR |=1<<3;//Enable GPIOB clock
RCC->APB2ENR |=1<<0;//Enable AFIO clock
GPIOA->CRL |= 0x00002000; //PA3 led.
GPIOB->CRL = 0xFF000000; //SCL and SDA AF Open Drain SCL => PB6 SDA =>PB7
GPIOA->BSRR |=1<<3;//Turn off the led.
}
void i2c_Slave_Config() {
RCC->APB1ENR |=1<<21;//I2C 1 Clock Enable.
I2C1->CR2 |=1<<5; //36 Mhz peripheral clock.
I2C1->CR2 |=1<<2; //36 Mhz peripheral clock.
I2C1->CCR =0x28;//100 khz clock
I2C1->OAR1 &=~(1<<15);//7-bit slave adress.
I2C1 ->CR1 |= 1<<10;//ACK enable.
//0x68 Slave Adress Configured.
I2C1 ->OAR1 &=~(1<<1);
I2C1 ->OAR1 &=~(1<<2);
I2C1 ->OAR1 &=~(1<<3);
I2C1 ->OAR1 &=~(1<<5);
I2C1 ->OAR1 |=(1<<4);
I2C1 ->OAR1 |=(1<<6);
I2C1 ->OAR1 |=(1<<7);
//0x68 Slave Adress Configured.
}
uint8_t readData()
{
volatile int temp;
uint8_t data;
I2C1->CR1 |=(1<<0);//Peripheral enable.
while(I2C1->SR2 &(1<<1));//BUSY bit.
I2C1->CR1 |=1<<8;//START bit.
while(!(I2C1->SR1 & (1<<0))); // wait until start flag is set.
while(!(I2C1->SR1 &(1<<1)));// wait until addr flag is set
temp=I2C1->SR2;//clear addr .
while (!(I2C1->SR1 & (1<<6))){} // wait until rxne is set
data=I2C1->DR;
while (!(I2C1->SR1 & (1<<4))){} // wait until STOPF is set
gecici=I2C1->SR1;
I2C1->SR1 |=1<<9;
I2C1->CR1 &=~(1<<0);//Peripheral disable.
return data;
}
Adress doesnt match. I do not see the any response on the SDA and SCL signals on the scope.I pulled both SCL and SDA to 3.3v using 4.7kohms on
each line.(STM32F103C6).
For now master I2C module enabled at main start in i2c_Master_Config(), then disabled at end of sendData() and never enabled again.
Slave I2C module enabled at the begining and disabled at the end of readData().
That is enough for broken communication. I2C peripheral module should be enabled all time (until you will understand when it should be disabled).
p.s. Sorry, I not analysed your code deeply — a lot of "magic numbers" instead of mnemonic bit names makes the analysis difficult. Perhaps, there are other mistakes.

how to implement SPI on STM32F3 board using standard peripheral library?

I am trying to implement SPI on STM32F3 Discovery board using standard peripheral library. I don't want to use HAL drivers(It is a constraint). I am not able to figure what I am missing. I am implementing the SPI in slave mode. Below is the SPI configuration and code in main function.
void main() {
/* Initializes the SPI communication */
SPI_I2S_DeInit(SPIx);
SPI_InitStructure.SPI_Mode = SPI_Mode_Slave;
SPI_Init(SPIx, &SPI_InitStructure);
/* Initialize the FIFO threshold */
SPI_RxFIFOThresholdConfig(SPIx, SPI_RxFIFOThreshold_QF);
While(1){
// Start SPI transfer
/* DMA channel Rx of SPI Configuration */
DMA_InitStructure.DMA_BufferSize = NumberOfByte;
DMA_InitStructure.DMA_PeripheralBaseAddr = (uint32_t)SPIx_DR_ADDRESS;
DMA_InitStructure.DMA_MemoryBaseAddr = (uint32_t)RxBuffer;
DMA_InitStructure.DMA_DIR = DMA_DIR_PeripheralSRC;
DMA_InitStructure.DMA_Priority = DMA_Priority_High;
DMA_Init(SPIx_RX_DMA_CHANNEL, &DMA_InitStructure);
/* DMA channel Tx of SPI Configuration */
DMA_InitStructure.DMA_BufferSize = NumberOfByte;
DMA_InitStructure.DMA_PeripheralBaseAddr = (uint32_t)SPIx_DR_ADDRESS;
DMA_InitStructure.DMA_MemoryBaseAddr = (uint32_t)TxBuffer;
DMA_InitStructure.DMA_DIR = DMA_DIR_PeripheralDST;
DMA_InitStructure.DMA_Priority = DMA_Priority_Low;
DMA_Init(SPIx_TX_DMA_CHANNEL, &DMA_InitStructure);
/* Enable the SPI Rx and Tx DMA requests */
SPI_I2S_DMACmd(SPIx, SPI_I2S_DMAReq_Rx, ENABLE);
SPI_I2S_DMACmd(SPIx, SPI_I2S_DMAReq_Tx, ENABLE);
/* Enable the SPI peripheral */
SPI_Cmd(SPIx, ENABLE);
/* Enable the DMA channels */
DMA_Cmd(SPIx_RX_DMA_CHANNEL, ENABLE);
DMA_Cmd(SPIx_TX_DMA_CHANNEL, ENABLE);
/* Wait the SPI DMA transfers complete or time out */
while (DMA_GetFlagStatus(SPIx_RX_DMA_FLAG_TC) == RESET)
{}
TimeOut = USER_TIMEOUT;
while ((DMA_GetFlagStatus(SPIx_TX_DMA_FLAG_TC) == RESET)&&(TimeOut != 0x00))
{}
if(TimeOut == 0)
{
//TimeOut_UserCallback();
count += 1;
}
/* The BSY flag can be monitored to ensure that the SPI communication is complete.
This is required to avoid corrupting the last transmission before disabling
the SPI or entering the Stop mode. The software must first wait until TXE=1
and then until BSY=0.*/
TimeOut = USER_TIMEOUT;
while ((SPI_I2S_GetFlagStatus(SPIx, SPI_I2S_FLAG_TXE) == RESET)&&(TimeOut != 0x00))
{}
if(TimeOut == 0)
{
//TimeOut_UserCallback();
count += 1;
}
TimeOut = USER_TIMEOUT;
while ((SPI_I2S_GetFlagStatus(SPIx, SPI_I2S_FLAG_BSY) == SET)&&(TimeOut != 0x00))
{}
if(TimeOut == 0)
{
//TimeOut_UserCallback();
count += 1;
}
/* Clear DMA1 global flags */
DMA_ClearFlag(SPIx_TX_DMA_FLAG_GL);
DMA_ClearFlag(SPIx_RX_DMA_FLAG_GL);
/* Disable the DMA channels */
DMA_Cmd(SPIx_RX_DMA_CHANNEL, DISABLE);
DMA_Cmd(SPIx_TX_DMA_CHANNEL, DISABLE);
/* Disable the SPI peripheral */
SPI_Cmd(SPIx, DISABLE);
/* Disable the SPI Rx and Tx DMA requests */
SPI_I2S_DMACmd(SPIx, SPI_I2S_DMAReq_Rx, DISABLE);
SPI_I2S_DMACmd(SPIx, SPI_I2S_DMAReq_Tx, DISABLE);
}}
SPI configuration is as follows :
static void SPI_Config(void)
{
GPIO_InitTypeDef GPIO_InitStructure;
/* Enable the SPI peripheral */
RCC_APB2PeriphClockCmd(SPIx_CLK, ENABLE);
/* Enable the DMA peripheral */
RCC_AHBPeriphClockCmd(SPI_DMAx_CLK | TIM_DMAx_CLK, ENABLE);
/* Enable the TIM peripheral */
RCC_APB1PeriphClockCmd(TIMx_CLK, ENABLE);
/* Enable SCK, MOSI, MISO and NSS GPIO clocks */
RCC_AHBPeriphClockCmd(SPIx_SCK_GPIO_CLK | SPIx_MISO_GPIO_CLK |
SPIx_MOSI_GPIO_CLK |
SPIx_NSS_GPIO_CLK , ENABLE);
/* Enable TIM DMA trigger clock */
RCC_AHBPeriphClockCmd(TIMx_TRIGGER_GPIO_CLK, ENABLE);
/* SPI pin mappings */
GPIO_PinAFConfig(SPIx_SCK_GPIO_PORT, SPIx_SCK_SOURCE, SPIx_SCK_AF);
GPIO_PinAFConfig(SPIx_MOSI_GPIO_PORT, SPIx_MOSI_SOURCE, SPIx_MOSI_AF);
GPIO_PinAFConfig(SPIx_MISO_GPIO_PORT, SPIx_MISO_SOURCE, SPIx_MISO_AF);
GPIO_PinAFConfig(SPIx_NSS_GPIO_PORT, SPIx_NSS_SOURCE, SPIx_NSS_AF);
/* TIM capture compare pin mapping */
GPIO_PinAFConfig(TIMx_TRIGGER_GPIO_PORT, TIMx_TRIGGER_SOURCE,
TIMx_TRIGGER_AF);
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStructure.GPIO_OType = GPIO_OType_PP;
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_DOWN;
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
/* SPI SCK pin configuration */
GPIO_InitStructure.GPIO_Pin = SPIx_SCK_PIN;
GPIO_Init(SPIx_SCK_GPIO_PORT, &GPIO_InitStructure);
/* SPI MOSI pin configuration */
GPIO_InitStructure.GPIO_Pin = SPIx_MOSI_PIN;
GPIO_Init(SPIx_MOSI_GPIO_PORT, &GPIO_InitStructure);
/* SPI MISO pin configuration */
GPIO_InitStructure.GPIO_Pin = SPIx_MISO_PIN;
GPIO_Init(SPIx_MISO_GPIO_PORT, &GPIO_InitStructure);
/* SPI NSS pin configuration */
GPIO_InitStructure.GPIO_Pin = SPIx_NSS_PIN;
GPIO_Init(SPIx_NSS_GPIO_PORT, &GPIO_InitStructure);
/* Configure the TIM channelx capture compare as DMA Trigger */
GPIO_InitStructure.GPIO_Pin = TIMx_TRIGGER_PIN;
GPIO_Init(TIMx_TRIGGER_GPIO_PORT, &GPIO_InitStructure);
/* SPI configuration -------------------------------------------------------
*/
SPI_I2S_DeInit(SPIx);
SPI_InitStructure.SPI_Direction = SPI_Direction_2Lines_FullDuplex;
SPI_InitStructure.SPI_DataSize = SPI_DATASIZE;
SPI_InitStructure.SPI_CPOL = SPI_CPOL_Low;
SPI_InitStructure.SPI_CPHA = SPI_CPHA_1Edge;
SPI_InitStructure.SPI_NSS = SPI_NSS_Hard;
SPI_InitStructure.SPI_BaudRatePrescaler = SPI_BaudRatePrescaler_64;
SPI_InitStructure.SPI_FirstBit = SPI_FirstBit_MSB;
SPI_InitStructure.SPI_CRCPolynomial = 7;
/* DMA Configuration -------------------------------------------------------
*/
DMA_InitStructure.DMA_PeripheralDataSize = DMA_PeripheralDataSize_Byte;
DMA_InitStructure.DMA_MemoryDataSize = DMA_MemoryDataSize_Byte;
DMA_InitStructure.DMA_PeripheralInc = DMA_PeripheralInc_Disable;
DMA_InitStructure.DMA_MemoryInc = DMA_MemoryInc_Enable;
DMA_InitStructure.DMA_Mode = DMA_Mode_Normal;
DMA_InitStructure.DMA_M2M = DMA_M2M_Disable;
}
Can anyone tell me where the problem is? I have Master code in HAL drivers on other board that is working fine(Tested). So there is no issue in master side. Slave side is the problem.
your code is in While(1) where you are configuring and initializing the SPI peripheral. make it in a separate function and initialize and configure only once. Also first try it without DMA and later put DMA back in.

How to define platform_data in a Linux 3.8 device tree structure (DTS) file

I'm trying to get the at86rf230 kernel driver running on a BeagleBone Black to communicate with my radio. I have confirmed that I am able to interact with the device using some userspace SPI code. Here's the fragment of the DTS file I'm working with:
fragment#0 {
target = <&am33xx_pinmux>;
__overlay__ {
spi1_pins_s0: spi1_pins_s0 {
pinctrl-single,pins = <
0x040 0x37 /* DIG2 GPIO_9.15 I_PULLUP | MODE7-GPIO1_16 */
0x044 0x17 /* SLPTR GPIO_9.23 O_PULLUP | MODE7-GPIO1_17 */
0x1AC 0x17 /* RSTN GPIO_9.25 O_PULLUP | MODE7-GPIO3_21 */
0x1A4 0x37 /* IRQ GPIO_9.26 I_PULLUP | MODE7-GPIO3_19 */
0x190 0x33 /* SCLK mcasp0_aclkx.spi1_sclk, INPUT_PULLUP | MODE3 */
0x194 0x33 /* MISO mcasp0_fsx.spi1_d0, INPUT_PULLUP | MODE3 */
0x198 0x13 /* MOSI mcasp0_axr0.spi1_d1, OUTPUT_PULLUP | MODE3 */
0x19c 0x13 /* SCS0 mcasp0_ahclkr.spi1_cs0, OUTPUT_PULLUP | MODE3 */
>;
};
};
};
fragment#3 {
target = <&spi1>;
__overlay__ {
#address-cells = <1>;
#size-cells = <0>;
status = "okay";
pinctrl-names = "default";
pinctrl-0 = <&spi1_pins_s0>;
at86rf230#0 {
spi-max-frequency = <1000000>;
reg = <0>;
compatible = "at86rf230";
interrupts = <19>;
interrupt-parent = <&gpio3>;
};
};
};
On loading the module I get the following error in dmesg:
[ 352.668833] at86rf230 spi1.0: no platform_data
[ 352.668945] at86rf230: probe of spi1.0 failed with error -22
I am trying to work out the right way to attach platform_data to the SPI overlay. Here's what I'd like to attach:
platform_data {
rstn = <&gpio3 21 0>;
slp_tr = <&gpio1 17 0>;
dig2 = <&gpio1 16 0>;
};
Unfortunately, just sticking it in as-is doesn't work so well when I use dtc to compile the DTS. I get the following error:
syntax error: properties must precede subnodes
FATAL ERROR: Unable to parse input tree
I feel that I'm ridiculously close to solving this, and I just need a little shove in the right direction ;)
First of all, the GPIO names in your excerpt are wrong. Accordingly to the latest code in linux-next there are
pdata->rstn = of_get_named_gpio(spi->dev.of_node, "reset-gpio", 0);
pdata->slp_tr = of_get_named_gpio(spi->dev.of_node, "sleep-gpio", 0);
There are only two of them.
Second, you have to adjust the DTS for your exact board. The entire DTS has to be considered as a platform data for all devices found on the board (some supported, some might be not). The section for the specific device should be described as device node.
So, the good start point is to check what is in upstream already exists, namely in arch/arm/boot/dts/am335x-boneblack.dts, don't forget to check included files as well.
And the example for this specific driver is in Documentation/devicetree/bindings/net/ieee802154/at86rf230.txt.