I'm using one STM32F4 and I want to communicate with my LSM303 accelerometer. For that I'm using I2C, and just using I2C works fine but when I try to use DMA with it, it stops working.
When I use HAL_I2C_Master_Transmit_DMA it works and I got the IRQHandler and . But when after that I want to use HAL_I2C_Master_Receive_DMA it says that the State of the I2C is not ready...
I read that the I2C was kind of messed up with the STM32FX but I don't understand why it's working fine without DMA.
Also when it hits the callback I2C_DMAXferCplt for the Master_Transmit_DMA it says that the CurrentState of the I2C_HandleTypeDef is still equal to HAL_I2C_STATE_BUSY_TX and therefor it does not put the state back to READY. That why it does not receive anything when I call the Master_Receive_DMA.
Here's my I2C init :
void MX_I2C2_Init(void)
{
I2C_ST_INS.Instance = I2C2;
I2C_ST_INS.Init.ClockSpeed = 400000;
I2C_ST_INS.Init.DutyCycle = I2C_DUTYCYCLE_2;
I2C_ST_INS.Init.OwnAddress1 = 0;
I2C_ST_INS.Init.AddressingMode = I2C_ADDRESSINGMODE_7BIT;
I2C_ST_INS.Init.DualAddressMode = I2C_DUALADDRESS_DISABLED;
I2C_ST_INS.Init.OwnAddress2 = 0;
I2C_ST_INS.Init.GeneralCallMode = I2C_GENERALCALL_DISABLED;
I2C_ST_INS.Init.NoStretchMode = I2C_NOSTRETCH_DISABLED;
HAL_I2C_Init(&I2C_ST_INS);
}
void HAL_I2C_MspInit(I2C_HandleTypeDef* i2cHandle)
{
GPIO_InitTypeDef GPIO_InitStruct;
if(i2cHandle->Instance==I2C1)
{
//Not useful for this post
}
else if(i2cHandle->Instance==I2C2)
{
GPIO_InitStruct.Pin = MASTER_IMUB_I2C_SDA_Pin|MASTER_IMUB_I2C_SCL_Pin;
GPIO_InitStruct.Mode = GPIO_MODE_AF_OD;
GPIO_InitStruct.Pull = GPIO_PULLUP;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.Alternate = GPIO_AF4_I2C2;
HAL_GPIO_Init(GPIOF, &GPIO_InitStruct);
__HAL_RCC_I2C2_CLK_ENABLE();
/* DMA controller clock enable */
__HAL_RCC_DMA1_CLK_ENABLE();
hdma_i2c2_rx.Instance = DMA1_Stream2;
hdma_i2c2_rx.Init.Channel = DMA_CHANNEL_7;
hdma_i2c2_rx.Init.Direction = DMA_PERIPH_TO_MEMORY;
hdma_i2c2_rx.Init.PeriphInc = DMA_PINC_DISABLE;
hdma_i2c2_rx.Init.MemInc = DMA_MINC_ENABLE;
hdma_i2c2_rx.Init.PeriphDataAlignment = DMA_PDATAALIGN_BYTE;
hdma_i2c2_rx.Init.MemDataAlignment = DMA_MDATAALIGN_BYTE;
hdma_i2c2_rx.Init.Mode = DMA_NORMAL;
hdma_i2c2_rx.Init.Priority = DMA_PRIORITY_VERY_HIGH;
hdma_i2c2_rx.Init.FIFOMode = DMA_FIFOMODE_ENABLE;
hdma_i2c2_rx.Init.FIFOThreshold = DMA_FIFO_THRESHOLD_FULL;
hdma_i2c2_rx.Init.MemBurst = DMA_MBURST_SINGLE;
hdma_i2c2_rx.Init.PeriphBurst = DMA_PBURST_SINGLE;
if (HAL_DMA_Init(&hdma_i2c2_rx) != HAL_OK)
{
Error_Handler();
}
__HAL_LINKDMA(i2cHandle,hdmarx,hdma_i2c2_rx);
HAL_NVIC_SetPriority(DMA1_Stream2_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(DMA1_Stream2_IRQn);
hdma_i2c2_tx.Instance = DMA1_Stream7;
hdma_i2c2_tx.Init.Channel = DMA_CHANNEL_7;
hdma_i2c2_tx.Init.Direction = DMA_MEMORY_TO_PERIPH;
hdma_i2c2_tx.Init.PeriphInc = DMA_PINC_DISABLE;
hdma_i2c2_tx.Init.MemInc = DMA_MINC_ENABLE;
hdma_i2c2_tx.Init.PeriphDataAlignment = DMA_PDATAALIGN_BYTE;
hdma_i2c2_tx.Init.MemDataAlignment = DMA_MDATAALIGN_BYTE;
hdma_i2c2_tx.Init.Mode = DMA_NORMAL;
hdma_i2c2_tx.Init.Priority = DMA_PRIORITY_VERY_HIGH;
hdma_i2c2_tx.Init.FIFOMode = DMA_FIFOMODE_ENABLE;
hdma_i2c2_tx.Init.FIFOThreshold = DMA_FIFO_THRESHOLD_FULL;
hdma_i2c2_tx.Init.MemBurst = DMA_MBURST_SINGLE;
hdma_i2c2_tx.Init.PeriphBurst = DMA_PBURST_SINGLE;
if (HAL_DMA_Init(&hdma_i2c2_tx) != HAL_OK)
{
Error_Handler();
}
__HAL_LINKDMA(i2cHandle,hdmatx,hdma_i2c2_tx);
HAL_NVIC_SetPriority(DMA1_Stream7_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(DMA1_Stream7_IRQn);
}
}
Do you have any ideas why it does not work when I'm using DMA with I2C ?
Thanks,
Victor
It worked for me when I enabled the I2C_event interrupt on top of the DMA interrupt, see generated code and CubeMX config below
HAL_NVIC_SetPriority(I2C1_EV_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(I2C1_EV_IRQn);
CubeMX does not automatically check the I2C1 event global interrupt when selecting DMA, I think it should (STmicro please fix this) as I dont see how it can work without it.
I've had the same issue. I've solved it by lowering the frequency.
ST Errata document says you have to step down the I2C frequency to 88kHz to fix some other problem.
I know it doesn't explain why this error doesn't occur in blocking mode but happens with DMA, but I hope it helps.
I had been struggling with the same problem on STM32F407 and I2C1.
After searching for potential bugs in the program flow, I found out that the function HAL_I2C_Master_Transmit_DMA leads to following line:
dmaxferstatus = HAL_DMA_Start_IT(hi2c->hdmatx, (uint32_t)hi2c->pBuffPtr, (uint32_t)&hi2c->Instance->DR, hi2c->XferSize);
After the first transfer, this won't return HAL_OK, which is necessary for the transmission to continue.
So my solution was simply abort the previous DMA interrupt in the callback function which is called after the transmission has completed. The same can be implied with HAL_I2C_Master_Receive_DMA. To resolve the problem, I added the following callback functions in main.c:
void HAL_I2C_MasterTxCpltCallback(I2C_HandleTypeDef *hi2c)
{
if (hi2c->Instance==hi2c1.Instance)
{
HAL_DMA_Abort_IT(hi2c->hdmatx);
}
}
void HAL_I2C_MasterRxCpltCallback(I2C_HandleTypeDef *hi2c)
{
if (hi2c->Instance==hi2c1.Instance)
{
HAL_DMA_Abort_IT(hi2c->hdmarx);
}
}
Please consider this is only a workaround. If someone finds out, I would like to know more about the underlying reason for this bug.
Related
I am trying to do uart communication. However, HAL_UART_TxCpltCallback function does not work.
Also, USART1_IRQHandler is not executed. I think there is a setting that enables uart and interrupt, but
I don't know. The source code is as follows. If you have more information, please tell me. Thanks.
/// main source
HW_UART_Init( );
uint8_t init_ment[60] = "\n\rWelcome to RF Test program\n\r";
strcpy(uart1_txbuffer,init_ment);
HAL_UART_Transmit_IT(&huart1, uart1_txbuffer, 30);
HAL_Delay(50);
InitQueue(&queue);
HAL_UART_Receive_IT(&huart1, uart1_rxbuffer, 1);
HAL_Delay(1);
void HW_UART_Init( void )
{
/* USER CODE END USART1_Init 1 */
huart1.Instance = USART1;
huart1.Init.BaudRate = 19200;
huart1.Init.WordLength = UART_WORDLENGTH_8B;
huart1.Init.StopBits = UART_STOPBITS_1;
huart1.Init.Parity = UART_PARITY_NONE;
huart1.Init.Mode = UART_MODE_TX_RX;
huart1.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart1.Init.OverSampling = UART_OVERSAMPLING_16;
huart1.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
huart1.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&huart1) != HAL_OK)
{
Error_Handler();
}
/* USER CODE BEGIN USART1_Init 2 */
}
If you are using cubeMX for initializing your MCU, there is a tab in USART section "NVIC Settings" which you can enable USART1 global interrupt over there.
It adds these two lines in your code, in stm32f4xx_hal_msp.c file.
HAL_NVIC_SetPriority(USART1_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(USART1_IRQn);
I suggest you to use cubeMX.
If you do your own configuration without cube mx, first you should check if you add convenient uart files into your project. STM32 Cube defines IRQ Handlers inside the "xxxx_it.c" file, so possibly you dont have it. On the other hand necessary NVIC configurations are also done by Cube MX.
I suggest you to use Cube MX, then you you can find what is wrong here. In your code snippet there seem nothing wrong.
I'm using stwinkt1 board connected to encoder to timer 3.
I want to get a interrupt every time that the encoder count is 300, so the code in the interrupt is:
void TIM3_IRQHandler(void){
// do things here
//reset the encoder so ill start counting again
__HAL_TIM_SET_COUNTER(&encoderTimer, 0);
//clear the interrupt
HAL_TIM_IRQHandler(&encoderTimer);
}
but no matter how i define the encoder I get a interrupt each pulse- this is useless and consumes CPU without any reason.
this is how I define the encoder & start it:
void Init_Encoder_TIM(void){
TIM_Encoder_InitTypeDef Config = {0};
TIM_MasterConfigTypeDef MasterConfig = {0};
//define the encoder clock
encoderTimer.Instance = TIM_ENCODER;
encoderTimer.Init.Prescaler = 0;
encoderTimer.Init.CounterMode = TIM_COUNTERMODE_UP;
encoderTimer.Init.Period = 0xFFFF;
encoderTimer.Init.ClockDivision = TIM_CLOCKDIVISION_DIV1;
encoderTimer.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_DISABLE;
Config.EncoderMode = TIM_ENCODERMODE_TI1;
Config.IC1Polarity = TIM_ICPOLARITY_RISING;
Config.IC1Selection = TIM_ICSELECTION_DIRECTTI;
Config.IC1Prescaler = TIM_ICPSC_DIV1;
Config.IC1Filter = 0;
Config.IC2Polarity = TIM_ICPOLARITY_RISING;
Config.IC2Selection = TIM_ICSELECTION_DIRECTTI;
Config.IC2Prescaler = TIM_ICPSC_DIV1;
Config.IC2Filter = 0;
if (HAL_TIM_Encoder_Init(&encoderTimer, &Config) != HAL_OK)
{
Error_Handler();
}
MasterConfig.MasterOutputTrigger = TIM_TRGO_RESET;
MasterConfig.MasterSlaveMode = TIM_MASTERSLAVEMODE_DISABLE;
if (HAL_TIMEx_MasterConfigSynchronization(&encoderTimer, &MasterConfig) != HAL_OK)
{
Error_Handler();
}
HAL_TIM_Encoder_Start_IT(&encoderTimer, TIM_CHANNEL_ALL);
}
I tried to set the prescaler to 300 it didnt work.
I tried to set the Period to 300 it didnt work as well.
I tried to set the CCR3 manually to 300 and activate the interrupt channel and it didnt work as well.
To be clear- The interrupt works, I get interrupt but for each encoder pulse instead of every 300 pulses.
Hope you will know how to help me
Itay
I'm using STM32F746ZG and use five UARTs. All UARTs are working fine.
Can someone tell me the procedure to change the baud rate on the USART once it has already been initialized? I'm using USART6 and initialized with 9600 baud rate. After booting, there is no any communication through USART. I want to change the baud rate from 9600 to 57600 or 115200. For this changing, I called HAL_UART_DeInit() and MX_USART6_UART_Init_57600() but it doesn't work.
If I didn't change the baud rate, it works fine. But if I change the baud rate, I can't receive the data through USART.
If somebody knows the solution, please let me know.
The followings are my code.
int main(void)
{
HAL_Init();
SystemClock_Config();
MX_UART7_Init();
MX_UART8_Init();
MX_USART2_UART_Init();
MX_USART3_UART_Init();
MX_USART6_UART_Init();
}
void MX_USART6_UART_Init(void)
{
huart6.Instance = USART6;
huart6.Init.BaudRate = 9600;
huart6.Init.WordLength = UART_WORDLENGTH_8B;
huart6.Init.StopBits = UART_STOPBITS_1;
huart6.Init.Parity = UART_PARITY_NONE;
huart6.Init.Mode = UART_MODE_TX_RX;
huart6.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart6.Init.OverSampling = UART_OVERSAMPLING_16;
huart6.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
huart6.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&huart6) != HAL_OK)
{
Error_Handler();
}
}
void MX_USART6_UART_Init_57600(void)
{
huart6.Instance = USART6;
huart6.Init.BaudRate = 57600; // change from 9600 to 57600
huart6.Init.WordLength = UART_WORDLENGTH_8B;
huart6.Init.StopBits = UART_STOPBITS_1;
huart6.Init.Parity = UART_PARITY_NONE;
huart6.Init.Mode = UART_MODE_TX_RX;
huart6.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart6.Init.OverSampling = UART_OVERSAMPLING_16;
huart6.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
huart6.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&huart6) != HAL_OK)
{
Error_Handler();
}
}
int Change_UART(void)
{
HAL_UART_DeInit(&huart6);
MX_USART6_UART_Init_57600();
}
I called Change_UART() but it doesn't work.
Your question should be: how to change the bautrate using the bloatware HAL?
I do not know.
But it can be archived in 3 lines of the simple code.
USART6 -> CR1 &= ~(USART_CR1_UE);
USART6 -> BRR = NEWVALUE;
USART6 -> CR1 |= USART_CR1_UE;
For changing baudrate you don't need to reset UART peripheral, just stop any active transfers (polling/IT/DMA). I use a mix of both:
huart.Instance->BRR = UART_BRR_SAMPLING8(HAL_RCC_GetPCLK2Freq(), new_baudrate);
Where UART_BRR_SAMPLING8() is a macro from stm32f4xx_hal_uart.h and HAL_RCC_GetPCLK2Freq() function comes from _hal_rcc.c.
This way I don't have to calculate BRR values manually, nor execute the whole initialization procedure, which actually toggles GPIO states, thus generating noise on serial line for whatever is sitting on other end of it.
You have to abort all running HAL_UART funttions, then de-initialize the uart, change the baudrate init value and initialize it again:
HAL_UART_Abort_IT(&huart1);
HAL_UART_DeInit(&huart1);
huart1.Init.BaudRate = 57600;
if (HAL_UART_Init(&huart1) != HAL_OK) {
Error_Handler();
}
if (HAL_UART_Receive_IT(&huart1, BUFFER, YOUR_BUFFER_SIZE) != HAL_OK) {
Error_Handler();
}
Originally I was really excited by P__J__'s simple answer, but it turns out you can't just put the desired baud rate into BRR - it has to be a function of oversampling and the clock rate.
I used more or less the same method but with "LL_USART_SetBaudRate" to fill the register
I'm trying to set up one of the LEDs on the STM3210E-EVAL board as a PWM output so that I can vary the brightness.
I am targeting the red LED, which is on port F, pin 8. I have set up timer 13 which should be tied to that pin for PWM output, but I feel like like I am missing a step somewhere. Here is the current function to initialize the pin, setup the timer, and set up the PWM:
void led_init(void)
{
TIM_OC_InitTypeDef sConfigOC;
TIM_HandleTypeDef htim13;
/* Configure GPIO pins : PF8 */
__HAL_AFIO_REMAP_TIM13_ENABLE();
__GPIOF_CLK_ENABLE();
GPIO_InitStruct.Pin = GPIO_PIN_8;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Speed = GPIO_SPEED_LOW;
HAL_GPIO_Init(GPIOF, &GPIO_InitStruct);
htim13.Instance = TIM13;
htim13.Init.Prescaler = (uint32_t)(72000000 / 2000000) - 1;
htim13.Init.CounterMode = TIM_COUNTERMODE_UP;
htim13.Init.Period = 700;
htim13.Init.ClockDivision = TIM_CLOCKDIVISION_DIV1;
HAL_TIM_Base_Init(&htim13);
HAL_TIM_PWM_Init(&htim13);
sConfigOC.OCMode = TIM_OCMODE_PWM1;
sConfigOC.Pulse = 350;
sConfigOC.OCPolarity = TIM_OCPOLARITY_HIGH;
sConfigOC.OCFastMode = TIM_OCFAST_DISABLE;
sConfigOC.OCNPolarity = TIM_OCNPOLARITY_HIGH;
sConfigOC.OCNIdleState = TIM_OCNIDLESTATE_RESET;
sConfigOC.OCIdleState = TIM_OCIDLESTATE_RESET;
HAL_TIM_PWM_ConfigChannel(&htim13, &sConfigOC, TIM_CHANNEL_1);
HAL_TIM_PWM_Start(&htim13, TIM_CHANNEL_1);
}
It seems you aren't enabling the timer's clock:
__HAL_RCC_TIM13_CLK_ENABLE()
Did you start your project from an example or STM32cubeMX? Usually some init code like this part goes to the stm32f1_hal_msp.c file! It's kind of elegant to also put your PWM pin (PF8) init there!
I think you should specify which alternate function you use on the GPIO. In that case it is PWM. There must be a function like GPIO_PinAFConfig.
Im trying to configure wakeup event on stm32f4 discovery, and i'm using a bit modified example from coocox.
NVIC_InitTypeDef NVIC_InitStructure;
EXTI_InitTypeDef EXTI_InitStructure;
RCC_APB1PeriphClockCmd(RCC_APB1Periph_PWR, ENABLE);
PWR_BackupAccessCmd(ENABLE);
while(RCC_GetFlagStatus(RCC_FLAG_HSERDY) == RESET);
RTC_WriteProtectionCmd(DISABLE);
RCC_RTCCLKConfig(RCC_RTCCLKSource_HSE_Div8);
RCC_RTCCLKCmd(ENABLE);
RTC_WaitForSynchro();
RTC_InitTypeDef rtcinit;
rtcinit.RTC_HourFormat = RTC_HourFormat_24;
rtcinit.RTC_AsynchPrediv = 99;
rtcinit.RTC_SynchPrediv = 9999;
RTC_Init(&rtcinit);
EXTI_ClearITPendingBit(EXTI_Line22);
EXTI_InitStructure.EXTI_Line = EXTI_Line22;
EXTI_InitStructure.EXTI_Mode = EXTI_Mode_Interrupt;
EXTI_InitStructure.EXTI_Trigger = EXTI_Trigger_Rising_Falling;
EXTI_InitStructure.EXTI_LineCmd = ENABLE;
EXTI_Init(&EXTI_InitStructure);
NVIC_InitStructure.NVIC_IRQChannel = RTC_WKUP_IRQn;
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0;
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0;
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;
NVIC_Init(&NVIC_InitStructure);
RTC_WakeUpClockConfig(RTC_WakeUpClock_CK_SPRE_16bits);
RTC_SetWakeUpCounter(0x0);
RTC_ITConfig(RTC_IT_WUT, ENABLE);
RTC_WakeUpCmd(ENABLE);
RTC_TimeTypeDef time;
RTC_TimeStructInit(&time);
time.RTC_Hours = 12;
RTC_SetTime(RTC_Format_BIN,&time);
The problem i have is that interrupt never happens. RTC itself works just fine.
It seems I forgot to use
RTC_ClearFlag(RTC_FLAG_WUTF);
RTC_ClearITPendingBit(RTC_IT_WUT);
after
RTC_WakeUpCmd(ENABLE);
now it should work, but weird problems begins here
im using this library
https://github.com/xenovacivus/STM32DiscoveryVCP
for USB VCP.
I noticed, that when I remove
USBD_Init(&USB_OTG_dev, USB_OTG_FS_CORE_ID, &USR_desc, &USBD_CDC_cb, &USR_cb);
Wakeup works correctly, but when i use this function interrupt only happens once, twice or never (based on diode blinks), depending on where i put it (tested between initialisations of other peripherials)
In the file usb_bsp.c, comment the line
RCC_APB1PeriphResetCmd(RCC_APB1Periph_PWR, ENABLE);
It worked for me. Regards.