STM32 Multi-Channel ADC. Unexpected behaviour when unpopulated - stm32

I have added ADC functionality to my Nucleo-F446RE development board. 4 channels, DMA enabled, scan and continuous conversion mode enabled, DMA continuous requests enabled, varying sample time per channel. I'll post code at the bottom of this post (all HAL, all done in STM32CubeMX).
I have found some strange behaviour when the channels are unpopulated (e.g., analog channel pin left open). All four channels will hover at around 0.9V with no channels connected. If I add a 3.3V source to channel 0, it'll show 3.3V, but CH1 will show 2.5V, CH2 will show 1.9V, CH3 1.6V. A waterfall effect. That waterfall effect is the same if I move the 3.3V source to CH1 and leave the rest unpopulated, and the waterfall effect loops back around to CH0.
If I give each channel their own source, they'll all show them correctly, but when unpopulated the channels are influenced by the populated channel. Why is this? I have found some sources saying that this is because of the sample+hold capacitor, and the solution is to correct the sampling times, but I have played a lot with the times going from very fast to as slow as possible sampling (I am only interested in sampling the data at 1kHz, but the ADC conversion seems to be, at a minimum, a magnitude above this), but it doesn't make a change. I wondered if changing the analog channel pin configuration to pull-down would help, but again no change.
I am hoping that this isn't something to be too concerned about, as the channels appear correct when populated, but perhaps there is some background influence that I am not seeing even when populated that I want to avoid. I am certain I haven't optimised my circuit, so any advice on that would also be great. There are lots of tutorials and examples online for STM32 ADC DMA with a single channel, but not so many with multi-channel. I also don't find the STM32 provided examples to be too helpful and often seem very inefficient.
ADC definitions
(main clock 180MHz, APB2 prescaler 2 = 90MHz, although I have also dropped it to a prescaler of 16 (11.25MHz) which didn't help)
/** Configure the global features of the ADC (Clock, Resolution, Data Alignment and number of conversion)
*/
hadc1.Instance = ADC1;
hadc1.Init.ClockPrescaler = ADC_CLOCK_SYNC_PCLK_DIV8;
hadc1.Init.Resolution = ADC_RESOLUTION_12B;
hadc1.Init.ScanConvMode = ENABLE;
hadc1.Init.ContinuousConvMode = ENABLE;
hadc1.Init.DiscontinuousConvMode = DISABLE;
hadc1.Init.ExternalTrigConvEdge = ADC_EXTERNALTRIGCONVEDGE_NONE;
hadc1.Init.ExternalTrigConv = ADC_SOFTWARE_START;
hadc1.Init.DataAlign = ADC_DATAALIGN_RIGHT;
hadc1.Init.NbrOfConversion = 4;
hadc1.Init.DMAContinuousRequests = ENABLE;
hadc1.Init.EOCSelection = ADC_EOC_SINGLE_CONV;
if (HAL_ADC_Init(&hadc1) != HAL_OK)
{
Error_Handler();
}
/** Configure for the selected ADC regular channel its corresponding rank in the sequencer and its sample time.
*/
sConfig.Channel = ADC_CHANNEL_0;
sConfig.Rank = 1;
sConfig.SamplingTime = ADC_SAMPLETIME_480CYCLES;
if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
{
Error_Handler();
}
/** Configure for the selected ADC regular channel its corresponding rank in the sequencer and its sample time.
*/
sConfig.Channel = ADC_CHANNEL_1;
sConfig.Rank = 2;
sConfig.SamplingTime = ADC_SAMPLETIME_112CYCLES;
if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
{
Error_Handler();
}
/** Configure for the selected ADC regular channel its corresponding rank in the sequencer and its sample time.
*/
sConfig.Channel = ADC_CHANNEL_4;
sConfig.Rank = 3;
sConfig.SamplingTime = ADC_SAMPLETIME_56CYCLES;
if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
{
Error_Handler();
}
/** Configure for the selected ADC regular channel its corresponding rank in the sequencer and its sample time.
*/
sConfig.Channel = ADC_CHANNEL_8;
sConfig.Rank = 4;
sConfig.SamplingTime = ADC_SAMPLETIME_15CYCLES;
if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
{
Error_Handler();
}
DMA definition
__HAL_RCC_ADC1_CLK_ENABLE();
__HAL_RCC_GPIOA_CLK_ENABLE();
__HAL_RCC_GPIOB_CLK_ENABLE();
/**ADC1 GPIO Configuration
PA0-WKUP ------> ADC1_IN0
PA1 ------> ADC1_IN1
PA4 ------> ADC1_IN4
PB0 ------> ADC1_IN8
*/
GPIO_InitStruct.Pin = analog1_Pin|analog2_Pin|analog3_Pin;
GPIO_InitStruct.Mode = GPIO_MODE_ANALOG;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.Pin = analog4_Pin;
GPIO_InitStruct.Mode = GPIO_MODE_ANALOG;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(analog4_GPIO_Port, &GPIO_InitStruct);
/* ADC1 DMA Init */
/* ADC1 Init */
hdma_adc1.Instance = DMA2_Stream0;
hdma_adc1.Init.Channel = DMA_CHANNEL_0;
hdma_adc1.Init.Direction = DMA_PERIPH_TO_MEMORY;
hdma_adc1.Init.PeriphInc = DMA_PINC_DISABLE;
hdma_adc1.Init.MemInc = DMA_MINC_ENABLE;
hdma_adc1.Init.PeriphDataAlignment = DMA_PDATAALIGN_WORD;
hdma_adc1.Init.MemDataAlignment = DMA_MDATAALIGN_WORD;
hdma_adc1.Init.Mode = DMA_NORMAL;
hdma_adc1.Init.Priority = DMA_PRIORITY_MEDIUM;
hdma_adc1.Init.FIFOMode = DMA_FIFOMODE_DISABLE;
if (HAL_DMA_Init(&hdma_adc1) != HAL_OK)
{
Error_Handler();
}
__HAL_LINKDMA(adcHandle,DMA_Handle,hdma_adc1);
/* ADC1 interrupt Init */
HAL_NVIC_SetPriority(ADC_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(ADC_IRQn);
Analog read code
(analog_scale is called once per channel every 1kHz)
#include "dma.h"
#include "adc.h"
#include "analog.h"
volatile uint32_t analogBuffer[4];
void analog_init()
{
HAL_ADC_Start_DMA(&hadc1, (uint32_t *)&analogBuffer, 4);
}
uint16_t analog_scale(char ch)
{
return (uint16_t)(((analogBuffer[ch] * 3.3) / 4096.0) * 1000.0);
}
void HAL_ADC_ConvHalfCpltCallback(ADC_HandleTypeDef* hadc)
{
}
void HAL_ADC_ConvCpltCallback(ADC_HandleTypeDef* hadc)
{
HAL_ADC_Start_DMA(&hadc1, (uint32_t *)&analogBuffer, 4);
HAL_GPIO_TogglePin(test4_GPIO_Port, test4_Pin);
}

That's not a Software issue, it's normal hardware behavior.
If ADC pins are floating, they "gather" stray voltages, e.g. from adjacent Sample and Hold Capacitors, from the Voltage Reference or any voltage that is induced in the traces on the PCB or attached cables.
The "Waterfall" effect you see, is simply your input voltage on Channel 0 or 1 coupling through the sample and hold capacitors and resistors from one channel to the next, transferred by the multiplexers parasitic capacitances: a small amount of charge is transferred from one voltage path to the next while switching through the channels, and this charge has no path to flow when the connections are open, except through the ADC, resulting in a pseudo-voltage reading.
To prevent this, tie all unused channels to ground, using appropriate pull down resistors (10 kOhm should be OK …), or if you want a software solution: multiply all unused channels with 0.

Related

STM32G030F6 starts program in Flash and after a while it ends up unintentionaly in System Memory

My board have PA14-BOOT0 pin connected to SWDCLK and PA13 to SWDIO.
The BOOT0 pin is not pulled up/down.
According to documentation when nBOOT_SEL bit = 1 and nBOOT0 bit = 1 the BOOT0 pin is ignored and microcontroller boots up from Main Flash. This is the factory setup.
The Flash Option Bytes (address 0x1FFF7800) have default value of 0xDFFFE1AA which I have confirmed (nBOOT_SEL=1, nBOOT0=1).
Im running very basic code:
int main(void)
{
/* Configure the system clock */
SystemClock_Config();
volatile int cnt = 0;
while (1)
{
++cnt;
}
}
/**
* #brief System Clock Configuration
* #retval None
*/
void SystemClock_Config(void)
{
RCC_OscInitTypeDef RCC_OscInitStruct = {0};
RCC_ClkInitTypeDef RCC_ClkInitStruct = {0};
/** Configure the main internal regulator output voltage
*/
HAL_PWREx_ControlVoltageScaling(PWR_REGULATOR_VOLTAGE_SCALE1);
/** Initializes the RCC Oscillators according to the specified parameters
* in the RCC_OscInitTypeDef structure.
*/
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI;
RCC_OscInitStruct.HSIState = RCC_HSI_ON;
RCC_OscInitStruct.HSIDiv = RCC_HSI_DIV1;
RCC_OscInitStruct.HSICalibrationValue = RCC_HSICALIBRATION_DEFAULT;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK)
{
Error_Handler();
}
/** Initializes the CPU, AHB and APB buses clocks
*/
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK
|RCC_CLOCKTYPE_PCLK1;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_HSI;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_0) != HAL_OK)
{
Error_Handler();
}
}
When I step the code or run to breakpoint at ++cnt multiple times everything works fine.
But when I let the program run freely and pause after a second the Program Counter shows address 0x1FFFxxxx which is somewhere in System Memory (Bootloader) and debug fails at that point.
I have checked registers for Reset Reason, I have set breakpoints in any interrupt handler I have found in Startup file, I have checked the VTOR for proper addres but I didnt manage to catch the moment the processor jumps to System Memory.
If I pull down the BOOT0 pin with 100k resistor the program starts to work normally. I can run program freely then pause and I will be at the the position ++cnt as I expect.
The question is WHY I need to pull down BOOT0 down even the documentation states clearly that BOOT0 pin is IGNORED when nBOOT_SEL bit = 1 and nBOOT0 bit = 1? What kind of sorcery ;) is going on here that my program jumps to System Memory in the middle of debugging?

hal_uart_transmit_it is not working(No interrupt occurs)

I am trying to do uart communication. However, HAL_UART_TxCpltCallback function does not work.
Also, USART1_IRQHandler is not executed. I think there is a setting that enables uart and interrupt, but
I don't know. The source code is as follows. If you have more information, please tell me. Thanks.
/// main source
HW_UART_Init( );
uint8_t init_ment[60] = "\n\rWelcome to RF Test program\n\r";
strcpy(uart1_txbuffer,init_ment);
HAL_UART_Transmit_IT(&huart1, uart1_txbuffer, 30);
HAL_Delay(50);
InitQueue(&queue);
HAL_UART_Receive_IT(&huart1, uart1_rxbuffer, 1);
HAL_Delay(1);
void HW_UART_Init( void )
{
/* USER CODE END USART1_Init 1 */
huart1.Instance = USART1;
huart1.Init.BaudRate = 19200;
huart1.Init.WordLength = UART_WORDLENGTH_8B;
huart1.Init.StopBits = UART_STOPBITS_1;
huart1.Init.Parity = UART_PARITY_NONE;
huart1.Init.Mode = UART_MODE_TX_RX;
huart1.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart1.Init.OverSampling = UART_OVERSAMPLING_16;
huart1.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
huart1.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&huart1) != HAL_OK)
{
Error_Handler();
}
/* USER CODE BEGIN USART1_Init 2 */
}
If you are using cubeMX for initializing your MCU, there is a tab in USART section "NVIC Settings" which you can enable USART1 global interrupt over there.
It adds these two lines in your code, in stm32f4xx_hal_msp.c file.
HAL_NVIC_SetPriority(USART1_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(USART1_IRQn);
I suggest you to use cubeMX.
If you do your own configuration without cube mx, first you should check if you add convenient uart files into your project. STM32 Cube defines IRQ Handlers inside the "xxxx_it.c" file, so possibly you dont have it. On the other hand necessary NVIC configurations are also done by Cube MX.
I suggest you to use Cube MX, then you you can find what is wrong here. In your code snippet there seem nothing wrong.

How to change the UART baud rate after running on STM32 board?

I'm using STM32F746ZG and use five UARTs. All UARTs are working fine.
Can someone tell me the procedure to change the baud rate on the USART once it has already been initialized? I'm using USART6 and initialized with 9600 baud rate. After booting, there is no any communication through USART. I want to change the baud rate from 9600 to 57600 or 115200. For this changing, I called HAL_UART_DeInit() and MX_USART6_UART_Init_57600() but it doesn't work.
If I didn't change the baud rate, it works fine. But if I change the baud rate, I can't receive the data through USART.
If somebody knows the solution, please let me know.
The followings are my code.
int main(void)
{
HAL_Init();
SystemClock_Config();
MX_UART7_Init();
MX_UART8_Init();
MX_USART2_UART_Init();
MX_USART3_UART_Init();
MX_USART6_UART_Init();
}
void MX_USART6_UART_Init(void)
{
huart6.Instance = USART6;
huart6.Init.BaudRate = 9600;
huart6.Init.WordLength = UART_WORDLENGTH_8B;
huart6.Init.StopBits = UART_STOPBITS_1;
huart6.Init.Parity = UART_PARITY_NONE;
huart6.Init.Mode = UART_MODE_TX_RX;
huart6.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart6.Init.OverSampling = UART_OVERSAMPLING_16;
huart6.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
huart6.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&huart6) != HAL_OK)
{
Error_Handler();
}
}
void MX_USART6_UART_Init_57600(void)
{
huart6.Instance = USART6;
huart6.Init.BaudRate = 57600; // change from 9600 to 57600
huart6.Init.WordLength = UART_WORDLENGTH_8B;
huart6.Init.StopBits = UART_STOPBITS_1;
huart6.Init.Parity = UART_PARITY_NONE;
huart6.Init.Mode = UART_MODE_TX_RX;
huart6.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart6.Init.OverSampling = UART_OVERSAMPLING_16;
huart6.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE;
huart6.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT;
if (HAL_UART_Init(&huart6) != HAL_OK)
{
Error_Handler();
}
}
int Change_UART(void)
{
HAL_UART_DeInit(&huart6);
MX_USART6_UART_Init_57600();
}
I called Change_UART() but it doesn't work.
Your question should be: how to change the bautrate using the bloatware HAL?
I do not know.
But it can be archived in 3 lines of the simple code.
USART6 -> CR1 &= ~(USART_CR1_UE);
USART6 -> BRR = NEWVALUE;
USART6 -> CR1 |= USART_CR1_UE;
For changing baudrate you don't need to reset UART peripheral, just stop any active transfers (polling/IT/DMA). I use a mix of both:
huart.Instance->BRR = UART_BRR_SAMPLING8(HAL_RCC_GetPCLK2Freq(), new_baudrate);
Where UART_BRR_SAMPLING8() is a macro from stm32f4xx_hal_uart.h and HAL_RCC_GetPCLK2Freq() function comes from _hal_rcc.c.
This way I don't have to calculate BRR values manually, nor execute the whole initialization procedure, which actually toggles GPIO states, thus generating noise on serial line for whatever is sitting on other end of it.
You have to abort all running HAL_UART funttions, then de-initialize the uart, change the baudrate init value and initialize it again:
HAL_UART_Abort_IT(&huart1);
HAL_UART_DeInit(&huart1);
huart1.Init.BaudRate = 57600;
if (HAL_UART_Init(&huart1) != HAL_OK) {
Error_Handler();
}
if (HAL_UART_Receive_IT(&huart1, BUFFER, YOUR_BUFFER_SIZE) != HAL_OK) {
Error_Handler();
}
Originally I was really excited by P__J__'s simple answer, but it turns out you can't just put the desired baud rate into BRR - it has to be a function of oversampling and the clock rate.
I used more or less the same method but with "LL_USART_SetBaudRate" to fill the register

STM32 SysTick counting twice as fast as it should

I have a STM32L476RC nucleo board that I am using to learn STM32. I am using STM32Cube HAL and AC6 System Workbench to develop on. I am trying to stay away from CubeMX as my goal is more towards learning than just getting something to work.
The problem that I am having is when I try to set up the systick timer using the code below, it seems to be counting twice as fast as it should.
HAL_SYSTICK_Config(HAL_RCC_GetHCLKFreq()/1000);
However if I just leave it default form the power-up then it is counting at the correct speed.
I have used CubeMX to generate the following clock setup, and paste it directly into a fresh project created in System Workbench, however the systick counter still seems to be counting twice as fast as it should be. The project that was generated by CubeMX seems to be running just fine however
/** System Clock Configuration
*/
void SystemClock_Config(void)
{
RCC_OscInitTypeDef RCC_OscInitStruct;
RCC_ClkInitTypeDef RCC_ClkInitStruct;
RCC_PeriphCLKInitTypeDef PeriphClkInit;
/**Initializes the CPU, AHB and APB busses clocks
*/
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI;
RCC_OscInitStruct.HSIState = RCC_HSI_ON;
RCC_OscInitStruct.HSICalibrationValue = 16;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_ON;
RCC_OscInitStruct.PLL.PLLSource = RCC_PLLSOURCE_HSI;
RCC_OscInitStruct.PLL.PLLM = 1;
RCC_OscInitStruct.PLL.PLLN = 10;
RCC_OscInitStruct.PLL.PLLP = RCC_PLLP_DIV7;
RCC_OscInitStruct.PLL.PLLQ = RCC_PLLQ_DIV2;
RCC_OscInitStruct.PLL.PLLR = RCC_PLLR_DIV2;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK)
{
//Error_Handler();
}
/**Initializes the CPU, AHB and APB busses clocks
*/
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK
|RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_PLLCLK;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_4) != HAL_OK)
{
//Error_Handler();
}
PeriphClkInit.PeriphClockSelection = RCC_PERIPHCLK_USART2;
PeriphClkInit.Usart2ClockSelection = RCC_USART2CLKSOURCE_PCLK1;
if (HAL_RCCEx_PeriphCLKConfig(&PeriphClkInit) != HAL_OK)
{
//Error_Handler();
}
/**Configure the main internal regulator output voltage
*/
if (HAL_PWREx_ControlVoltageScaling(PWR_REGULATOR_VOLTAGE_SCALE1) != HAL_OK)
{
//Error_Handler();
}
/**Configure the Systick interrupt time
*/
HAL_SYSTICK_Config(HAL_RCC_GetHCLKFreq()/1000);
/**Configure the Systick
*/
HAL_SYSTICK_CLKSourceConfig(SYSTICK_CLKSOURCE_HCLK);
/* SysTick_IRQn interrupt configuration */
HAL_NVIC_SetPriority(SysTick_IRQn, 0, 0);
}
There must be something I am missing. Maybe somehow the tick counter is configured elsewhere, and calling HAL_SYSTICK_Config(HAL_RCC_GetHCLKFreq()/1000); is just setting another tick counter? I have no idea! Please help I am completely lost as to what is going on!
I am measuring the tick speed by simply flashing a LED and and measuring the frequency on a logic analyser:
if (HAL_GetTick() - LEDstopwatch > 1000)
{
// Toggle the LED
//BSP_LED_Toggle(LED2);
HAL_GPIO_TogglePin(GPIOC, GPIO_PIN_10);
// Reset the stopwatch
LEDstopwatch = HAL_GetTick();
}
The full code for my test project is here:
https://github.com/c-herring/STM32L476_Nucleo_FirstTest
Thanks!
In the SystemClock_Config function you copied, STM32CubeMX initializes the system clock at 80MHz. To do this, it uses HSI (16MHz) as input of the PLL, then divide it by 1 (PLLM), then multiplies it by 10 (PLLN) and finally divides its by 2 (PLLR).
When you don't use STM32CubeMX and creates a project from scratch, the clock is initialized in SystemInit in the system_stm32l4xx.c. SystemInit is called before main in the startup file (startup_stm32l476xx.S). According to the comments SystemInit initializes the clock at 40MHz using the MSI (4MHz) as input of the PLL.
The system clock is twice faster therefore you see the difference in the Systick.

How to configure the STM32103 for CAN and remapping the pins to PB8 and PB9

I am trying to, at this stage, simply send a transmission using CAN on the STM32F103 V8T6. The chip is implemented on a board that was customized by my company. Looking at the schematic I see that the CAN Tx and Rx pins were remapped to PB9 (Tx) and PB8 (Rx). All that being said, I have been able to use LoopBack mode successfully (using the latest example from STM "V3.5.0") but have have NOT been able to get Normal mode working. Can someone please let me know if they see an obvious flaw in my initial configuration?! I have included only the code related to configuration and left out the transmission function call.
int main (void)
{
RCC_APB2PeriphClockCmd(RCC_APB2Periph_AFIO, ENABLE);
RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOB, ENABLE);
RCC_APB1PeriphClockCmd(RCC_APB1Periph_CAN1, ENABLE);
NVIC_InitTypeDef NVIC_InitStructure;
NVIC_InitStructure.NVIC_IRQChannel = USB_LP_CAN1_RX0_IRQn;
NVIC_InitStructure.NVIC_IRQChannel = CAN1_RX0_IRQn;
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0;
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0;
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;
NVIC_Init(&NVIC_InitStructure);
GPIO_InitTypeDef GPIO_InitStructure;
/* Configure CAN pin: RX */
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_8;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IPU;
GPIO_Init(GPIOB, &GPIO_InitStructure);
/* Configure CAN pin: TX */
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_9;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF_PP;
GPIO_Init(GPIOB, &GPIO_InitStructure);
// Remap2 is for PB8 and PB9
GPIO_PinRemapConfig(GPIO_Remap2_CAN1 , ENABLE);
}
Here is my transmit function
void CAN_TransmitMyMsg(void)
{
CAN_InitTypeDef CAN_InitStructure;
CAN_FilterInitTypeDef CAN_FilterInitStructure;
CanTxMsg TxMessage;
uint32_t i = 0;
uint8_t TransmitMailbox = 0;
uint8_t status = 0;
/* CAN register init */
CAN_DeInit(CANx);
CAN_StructInit(&CAN_InitStructure);
/* CAN cell init */
CAN_InitStructure.CAN_TTCM=DISABLE;
CAN_InitStructure.CAN_ABOM=DISABLE;
CAN_InitStructure.CAN_AWUM=DISABLE;
CAN_InitStructure.CAN_NART=DISABLE;
CAN_InitStructure.CAN_RFLM=DISABLE;
CAN_InitStructure.CAN_TXFP=DISABLE;
CAN_InitStructure.CAN_Mode=CAN_Mode_Normal;
/* Baudrate = 125kbps*/
CAN_InitStructure.CAN_SJW=CAN_SJW_1tq;
CAN_InitStructure.CAN_BS1=CAN_BS1_2tq;
CAN_InitStructure.CAN_BS2=CAN_BS2_3tq;
CAN_InitStructure.CAN_Prescaler=48;
CAN_Init(CANx, &CAN_InitStructure);
/* CAN filter init */
CAN_FilterInitStructure.CAN_FilterNumber=0;
CAN_FilterInitStructure.CAN_FilterMode=CAN_FilterMode_IdMask;
CAN_FilterInitStructure.CAN_FilterScale=CAN_FilterScale_32bit;
CAN_FilterInitStructure.CAN_FilterIdHigh=0x0000;
CAN_FilterInitStructure.CAN_FilterIdLow=0x0000;
CAN_FilterInitStructure.CAN_FilterMaskIdHigh=0x0000;
CAN_FilterInitStructure.CAN_FilterMaskIdLow=0x0000;
CAN_FilterInitStructure.CAN_FilterFIFOAssignment=0;
CAN_FilterInitStructure.CAN_FilterActivation=ENABLE;
CAN_FilterInit(&CAN_FilterInitStructure);
/* transmit */
TxMessage.StdId=0x11;
TxMessage.RTR=CAN_RTR_DATA;
TxMessage.IDE=CAN_ID_STD;
TxMessage.DLC=2;
TxMessage.Data[0]=0xCA;
TxMessage.Data[1]=0xFE;
TransmitMailbox=CAN_Transmit(CANx, &TxMessage);
//wait until CAN transmission is OK
i = 0;
while((status != CANTXOK) && (i != 0xFFFF))
{
status = CAN_TransmitStatus(CANx, TransmitMailbox);
i++;
}
}
This resource has been helpful, but ultimately insufficient.
STM32F103 microcontroller CAN messages
Thanks!
Daniel
I had similar problems. My problem was in bad selection of comm parameters (timequantums and prescaler)
I debug it in this way:
I checked, that RSS pin of CAN transceiver (I used MCP 2551 I/P)
I switched it to loopback and with osciloscope check output signal on TX channel and on CANH to CANL (if the output is configured properly)
I looked to RCC configuration - the prescaler of APB1 clock has been changed to 2 (I excepted 4)
For a successful transmission, the CAN controller expects an 'acknowledge' bit which is part of the CAN protocol. If your CAN monitor (by NI) is in listen mode, there will not be an acknowledge-bit(!), and thus you don't get an indication of a successfull transmission at the sending end.
GPIO_Remap2_CAN1 remaps the CAN1 peripheral to PD0 and PD1, not PB8 and PB9. You want GPIO_Remap1_CAN1.
STD peripheral library is deprecated, but since I couldn't find any better examples, I used your not working code as a base.
Initially, I had the same behavior that it worked only in loopback.
After removing remapping to pins PB8 and PB9 and setting high speed GPIO for TX pin it started working. Here are my GPIO settings for PA11 and PA12
RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOA, ENABLE);
/* Configure CAN pin: RX */
GPIO_InitTypeDef GPIO_InitStructure;
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_11;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IPU;
GPIO_Init(GPIOA, &GPIO_InitStructure);
/* Configure CAN pin: TX */
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_12;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF_PP;
GPIO_InitStructure.GPIO_Speed = **GPIO_Speed_50MHz**;
GPIO_Init(GPIOA, &GPIO_InitStructure);
The rest I kept the same. (Except changing CANx to CAN1).