I have a simple program that uses ADC to convert 2 inputs, I use DMA in circular mode. Everything works great. The only problem I have is that in order to reflash the STM32, I have to first erase the chip. This is only happening when I set the ADC to circular mode.
Hopefully someone can help with this. Thanks!
Here is the code for setting up DMA/ADC
void HAL_ADC_MspInit(ADC_HandleTypeDef *hadc)
{
GPIO_InitTypeDef GPIO_InitStruct = {0};
if (hadc->Instance == ADC1)
{
__HAL_RCC_ADC1_CLK_ENABLE();
GPIO_InitStruct.Pin = GPIO_PIN_0|GPIO_PIN_1;
GPIO_InitStruct.Mode = GPIO_MODE_ANALOG;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOB, &GPIO_InitStruct);
hdma_adc1.Instance = DMA2_Stream0;
hdma_adc1.Init.Channel = DMA_CHANNEL_0;
hdma_adc1.Init.Direction = DMA_PERIPH_TO_MEMORY;
hdma_adc1.Init.PeriphInc = DMA_PINC_DISABLE;
hdma_adc1.Init.MemInc = DMA_MINC_ENABLE;
hdma_adc1.Init.PeriphDataAlignment = DMA_PDATAALIGN_HALFWORD;
hdma_adc1.Init.MemDataAlignment = DMA_MDATAALIGN_HALFWORD;
hdma_adc1.Init.Mode = DMA_CIRCULAR;
hdma_adc1.Init.Priority = DMA_PRIORITY_LOW;
hdma_adc1.Init.FIFOMode = DMA_FIFOMODE_ENABLE;
hdma_adc1.Init.FIFOThreshold = DMA_FIFO_THRESHOLD_HALFFULL;
if (HAL_DMA_Init(&hdma_adc1) != HAL_OK)
{
Error_Handler();
}
__HAL_LINKDMA(hadc,DMA_Handle,hdma_adc1);
}
}
void MX_ADC1_Init(void)
{
ADC_ChannelConfTypeDef sConfig = {0};
hadc1.Instance = ADC1;
hadc1.Init.ClockPrescaler = ADC_CLOCK_SYNC_PCLK_DIV4;
hadc1.Init.Resolution = ADC_RESOLUTION_12B;
hadc1.Init.ScanConvMode = ENABLE;
hadc1.Init.ContinuousConvMode = ENABLE;
hadc1.Init.DiscontinuousConvMode = DISABLE;
hadc1.Init.ExternalTrigConvEdge = ADC_EXTERNALTRIGCONVEDGE_NONE;
hadc1.Init.ExternalTrigConv = ADC_SOFTWARE_START;
hadc1.Init.DataAlign = ADC_DATAALIGN_RIGHT;
hadc1.Init.NbrOfConversion = 2;
hadc1.Init.DMAContinuousRequests = ENABLE;
hadc1.Init.EOCSelection = ADC_EOC_SINGLE_CONV;
if (HAL_ADC_Init(&hadc1) != HAL_OK)
{
Error_Handler();
}
sConfig.Channel = ADC_CHANNEL_8;
sConfig.Rank = 1;
sConfig.SamplingTime = ADC_SAMPLETIME_3CYCLES;
if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
{
Error_Handler();
}
sConfig.Channel = ADC_CHANNEL_9;
sConfig.Rank = 2;
sConfig.SamplingTime = ADC_SAMPLETIME_3CYCLES;
if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
{
Error_Handler();
}
}
And here is the output from jlink. Again, if I erase the chip everything works fine.
/Applications/SEGGER/JLink_V686c/JLinkGDBServerCLExe -select USB -device STM32F401RE -endian
little -if SWD -speed auto -ir -noLocalhostOnly
SEGGER J-Link GDB Server V6.86c Command Line Version
JLinkARM.dll V6.86c (DLL compiled Oct 6 2020 14:16:39)
Command line: -select USB -device STM32F401RE -endian little -if SWD -speed auto -ir -
noLocalhostOnly
-----GDB Server start settings-----
GDBInit file: none
GDB Server Listening port: 2331
SWO raw output listening port: 2332
Terminal I/O port: 2333
Accept remote connection: yes
Generate logfile: off
Verify download: off
Init regs on start: on
Silent mode: off
Single run mode: off
Target connection timeout: 0 ms
------J-Link related settings------
J-Link Host interface: USB
J-Link script: none
J-Link settings file: none
------Target related settings------
Target device: STM32F401RE
Target interface: SWD
Target interface speed: auto
Target endian: little
Connecting to J-Link...
J-Link is connected.
Firmware: J-Link EDU Mini V1 compiled Oct 2 2020 17:00:40
Hardware: V1.00
S/N: 801018944
Feature(s): FlashBP, GDB
Checking target voltage...
Target voltage: 3.28 V
Listening on TCP/IP port 2331
Connecting to target...
Connected to target
Waiting for GDB connection...Connected to 127.0.0.1
Reading all registers
Read 2 bytes # address 0x0800EF10 (Data = 0xB508)
Read 2 bytes # address 0x080080A0 (Data = 0x4770)
Read 4 bytes # address 0x00000000 (Data = 0x20018000)
Read 2 bytes # address 0x00000000 (Data = 0x8000)
Downloading 408 bytes # address 0x08000000
Downloading 16048 bytes # address 0x080001C0
Downloading 16144 bytes # address 0x08004070
Downloading 16048 bytes # address 0x08007F80
Downloading 15968 bytes # address 0x0800BE30
Downloading 16000 bytes # address 0x0800FC90
Downloading 7876 bytes # address 0x08013B10
Downloading 5908 bytes # address 0x080159D8
Downloading 956 bytes # address 0x080170EC
Downloading 1032 bytes # address 0x080174A8
Downloading 24 bytes # address 0x080178B0
Downloading 4 bytes # address 0x080178C8
Downloading 2496 bytes # address 0x080178CC
ERROR: Failed to prepare for programming.
RAM check failed # addr 0x20000E6C.
RAM check failed while testing 0x1050 bytes # addr 0x200006CC.
Writing register (PC = 0x 800c8f0)
Read 2 bytes # address 0x0800EF10 (Data = 0xB508)
Read 2 bytes # address 0x080080A0 (Data = 0x4770)
Received monitor command: reset
Resetting target
Resetting target
Debugger connected to 127.0.0.1:2331
Starting target CPU...
Related
Currently, I'm facing a weird problem with the STM32. I just generated code with the STM32Cube IDE for the chosen MCU (STM32L031G6). I nearly didn't change anything, except configuring one GPIO as output and trying to let a connected LED blink.
Now the problem:
If I run the code, nothing happens, no blink at all.
Stepping through the code, I can enable the LED once when the WritePin is called. Afterwards, just one step further, the LED is off again, although there is no further WritePin call executed. The LED never gets back on again.
What can be the problem with this code? There is nothing special about it? Did I miss something which is required for generated STM32 code?
For the following code I removed any unused line and comment.
#include "main.h"
void SystemClock_Config(void);
static void MX_GPIO_Init(void);
int main(void) {
HAL_Init();
SystemClock_Config();
MX_GPIO_Init();
while(1) {
//HAL_GPIO_TogglePin(LED_GPIO_Port, LED_Pin);
HAL_GPIO_WritePin(LED_GPIO_Port, LED_Pin, GPIO_PIN_SET);
HAL_Delay(1000);
HAL_GPIO_WritePin(LED_GPIO_Port, LED_Pin, GPIO_PIN_RESET);
HAL_Delay(1000);
}
}
void SystemClock_Config(void) {
RCC_OscInitTypeDef RCC_OscInitStruct = {0};
RCC_ClkInitTypeDef RCC_ClkInitStruct = {0};
__HAL_PWR_VOLTAGESCALING_CONFIG(PWR_REGULATOR_VOLTAGE_SCALE1);
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_MSI;
RCC_OscInitStruct.MSIState = RCC_MSI_ON;
RCC_OscInitStruct.MSICalibrationValue = 0;
RCC_OscInitStruct.MSIClockRange = RCC_MSIRANGE_6;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK) {
Error_Handler();
}
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK
|RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_MSI;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_0) != HAL_OK) {
Error_Handler();
}
}
static void MX_GPIO_Init(void) {
GPIO_InitTypeDef GPIO_InitStruct = {0};
__HAL_RCC_GPIOA_CLK_ENABLE();
HAL_GPIO_WritePin(LED_GPIO_Port, LED_Pin, GPIO_PIN_RESET);
GPIO_InitStruct.Pin = LED_Pin;
GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_LOW;
HAL_GPIO_Init(LED_GPIO_Port, &GPIO_InitStruct);
}
void Error_Handler(void) {
__disable_irq();
while (1) {}
}
Update 1:
As seen in the comments the HAL_Delay is not working properly. But how to fix it? And why the code does not let the led flicker when the HAL_Delay is removed?
Update 2:
It is also not possible to use the loop the following way, but the led is not turned on in any way.
while (1) {
HAL_GPIO_TogglePin(LED_GPIO_Port, LED_Pin);
}
See the following images for the configuration.
Update 3:
When executing the code on the STM32L031G6, the debugger stops pretty soon. Stepping through the code works (sometimes). Here is the debug log when clicking "Run" in the STM32Cube IDE.
SEGGER J-Link GDB Server V7.58 Command Line Version
JLinkARM.dll V7.58 (DLL compiled Nov 4 2021 16:23:13)
Command line: -port 2331 -s -device STM32L031G6 -endian little -speed 4000 -if swd -vd
-----GDB Server start settings-----
GDBInit file: none
GDB Server Listening port: 2331
SWO raw output listening port: 2332
Terminal I/O port: 2333
Accept remote connection: localhost only
Generate logfile: off
Verify download: on
Init regs on start: off
Silent mode: off
Single run mode: on
Target connection timeout: 0 ms
------J-Link related settings------
J-Link Host interface: USB
J-Link script: none
J-Link settings file: none
------Target related settings------
Target device: STM32L031G6
Target interface: SWD
Target interface speed: 4000kHz
Target endian: little
Connecting to J-Link...
J-Link is connected.
Firmware: J-Link V11 compiled Dec 9 2021 14:14:49
Hardware: V11.00
S/N: 261014681
OEM: SEGGER-EDU
Feature(s): FlashBP, GDB
Checking target voltage...
Target voltage: 3.34 V
Listening on TCP/IP port 2331
Connecting to target...
Connected to target
Waiting for GDB connection...Connected to 127.0.0.1
GDB closed TCP/IP connection (Socket 1132)
Connected to 127.0.0.1
Reading all registers
Read 4 bytes # address 0x1FF000FC (Data = 0x89B8D002)
Read 2 bytes # address 0x1FF000FC (Data = 0xD002)
Received monitor command: WriteDP 0x2 0xF0
O.K.
Received monitor command: ReadAP 0x2
O.K.:0xF0000003
Read 4 bytes # address 0x1FF000E4 (Data = 0x05408A28)
Read 2 bytes # address 0x1FF000E4 (Data = 0x8A28)
Read 4 bytes # address 0x1FF000E4 (Data = 0x05408A28)
Read 2 bytes # address 0x1FF000E4 (Data = 0x8A28)
Reading 32 bytes # address 0xF0000FD0
Connected to 127.0.0.1
Reading all registers
Read 4 bytes # address 0x1FF000FC (Data = 0x89B8D002)
Read 2 bytes # address 0x1FF000FC (Data = 0xD002)
Received monitor command: reset
Resetting target
Downloading 192 bytes # address 0x08000000 - Verified OK
Downloading 6072 bytes # address 0x080000C0 - Verified OK
Downloading 28 bytes # address 0x08001878 - Verified OK
Downloading 8 bytes # address 0x08001894 - Verified OK
Downloading 4 bytes # address 0x0800189C - Verified OK
Downloading 4 bytes # address 0x080018A0 - Verified OK
Downloading 12 bytes # address 0x080018A4 - Verified OK
Writing register (PC = 0x 80006d0)
Starting target CPU...
GDB closed TCP/IP connection (Socket 1128)
Debugger requested to halt target...
...Target halted (PC = 0x1FF000E4)
Reading all registers
Read 4 bytes # address 0x1FF000E4 (Data = 0x05408A28)
Read 2 bytes # address 0x1FF000E4 (Data = 0x8A28)
GDB closed TCP/IP connection (Socket 1152)
Restoring target state and closing J-Link connection...
Shutting down...
On the other hand, the same code works on a STM32L031K6 of the nucleo board with the ST link disconnected.
*Update 4:
Since I'm using a custom board, there may be a flaw in the schematics. I don't see any issues with the circuit, but maybe, you see some. There is no crystal since it shouldn't be required regarding the datasheet. There are internal oszillators available.
The TOUCH net is just a circuit which connects GND to the pin if a button is pressed.
This is the circuit of the STM32L031G6U6.
This is the circuit of the LEDs that should be controlled. In the previous code I just try to control the LED with the net label STATUS_LED. Since I got the LED to blink while stepping through the code, the Mosfet circuit should work.
I'm currently very confused why I got so many problems. I tried a second and third PCB of the same circuit, but the problems are the same.
I figured out, that I cannot use any clock configurations although they are offered by the STM32Cube IDE. Using the MSI just doesn't work for some frequencies. The code will stall in the SystemClock_Config setting the oscillator or the clock.
Why the HAL_Delay sometimes doesn't work and sometimes it does?
Why the system doesn't start at all when trying to run the code (even with everything disconnected and just the power supply reconnecting)?
Why stepping through the code does work but running the code does not?
Problem solved. The Altium package I downloaded was for the wrong package of the STM32L031. It ia for the STM32L031G6U6S and not the STM32L031G6U6.
I am currently using STM32L0538 Discovery board. In my project i have to use lower level drivers to interface i2c with slave device (ST25DV) using DMA.
I ported LL example to STM32L0538 DISCO board by referring the LL example project available for NUCLEO-L073RZ in the firmware repo (STM32Cube_FW_L0_V1.12.1).
The issue with example is i am only able to transmit 4-bytes of data (slave addr. + 3bytes of 8bit data), afterwards i2c generates stop condition although the number of data to be transmitted is more than 4 bytes both in DMA and I2C register. I think the issue is with DMA, as it accepts uint32_t type source memory addr. but my data is of uint8_t type. I have tried typecasting as shown in the demo LL example but it doesn't work.
**Can anyone please tell me how can i transmit more than just 4bytes of data or where i am going wrong. ** Thanks in advance.
Here is the sample code ported from STM32L0 Firmware repo which only send 4 bytes of data:
uint8_t aLedOn[5] = {0x12,0x34,0x56,0x77,88};
__IO uint8_t ubNbDataToTransmit = sizeof(aLedOn);
uint8_t* pTransmitBuffer = (uint8_t*)aLedOn;
__IO uint8_t ubTransferComplete = 0;
#define SLAVE_OWN_ADDRESS 0xAE
int main(void)
{
/* Reset of all peripherals, Initializes the Flash interface and the Systick. */
LL_APB2_GRP1_EnableClock(LL_APB2_GRP1_PERIPH_SYSCFG);
LL_APB1_GRP1_EnableClock(LL_APB1_GRP1_PERIPH_PWR);
/* Configure the system clock */
SystemClock_Config();
MX_GPIO_Init();
/* USER CODE BEGIN 2 */
Configure_DMA();
Configure_I2C_Master();
LL_mDelay(1000);
Handle_I2C_Master();
/* USER CODE END 2 */
while (1)
{
/* USER CODE END WHILE */
/* USER CODE BEGIN 3 */
}
}
void Configure_DMA(void)
{
LL_AHB1_GRP1_EnableClock(LL_AHB1_GRP1_PERIPH_DMA1);
NVIC_SetPriority(DMA1_Channel4_5_6_7_IRQn, 0);
NVIC_EnableIRQ(DMA1_Channel4_5_6_7_IRQn);
LL_DMA_ConfigTransfer(DMA1, LL_DMA_CHANNEL_4, LL_DMA_DIRECTION_MEMORY_TO_PERIPH | \
LL_DMA_PRIORITY_HIGH | \
LL_DMA_MODE_NORMAL | \
LL_DMA_PERIPH_NOINCREMENT | \
LL_DMA_MEMORY_INCREMENT | \
LL_DMA_PDATAALIGN_BYTE | \
LL_DMA_MDATAALIGN_BYTE);
LL_DMA_SetDataLength(DMA1, LL_DMA_CHANNEL_4, ubNbDataToTransmit);
LL_DMA_ConfigAddresses(DMA1, LL_DMA_CHANNEL_4, (uint32_t)pTransmitBuffer, (uint32_t)LL_I2C_DMA_GetRegAddr(I2C2, LL_I2C_DMA_REG_DATA_TRANSMIT), LL_DMA_GetDataTransferDirection(DMA1, LL_DMA_CHANNEL_4));
LL_DMA_SetPeriphRequest(DMA1, LL_DMA_CHANNEL_4, LL_DMA_REQUEST_7);
LL_DMA_EnableIT_TC(DMA1, LL_DMA_CHANNEL_4);
LL_DMA_EnableIT_TE(DMA1, LL_DMA_CHANNEL_4);
}
void Configure_I2C_Master(void)
{
LL_I2C_InitTypeDef I2C_InitStruct = {0};
LL_GPIO_InitTypeDef GPIO_InitStruct = {0};
/* Enable the peripheral clock of GPIOC */
LL_IOP_GRP1_EnableClock(LL_IOP_GRP1_PERIPH_GPIOB);
GPIO_InitStruct.Pin = LL_GPIO_PIN_13;
GPIO_InitStruct.Mode = LL_GPIO_MODE_ALTERNATE;
GPIO_InitStruct.Speed = LL_GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.OutputType = LL_GPIO_OUTPUT_OPENDRAIN;
GPIO_InitStruct.Pull = LL_GPIO_PULL_UP;
GPIO_InitStruct.Alternate = LL_GPIO_AF_5;
LL_GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.Pin = LL_GPIO_PIN_14;
GPIO_InitStruct.Mode = LL_GPIO_MODE_ALTERNATE;
GPIO_InitStruct.Speed = LL_GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.OutputType = LL_GPIO_OUTPUT_OPENDRAIN;
GPIO_InitStruct.Pull = LL_GPIO_PULL_UP;
GPIO_InitStruct.Alternate = LL_GPIO_AF_5;
LL_GPIO_Init(GPIOB, &GPIO_InitStruct);
LL_APB1_GRP1_EnableClock(LL_APB1_GRP1_PERIPH_I2C2);
LL_I2C_SetTiming(I2C2, 0x00100E16);
LL_I2C_SetOwnAddress1(I2C2, 0x00, LL_I2C_OWNADDRESS1_7BIT);
LL_I2C_DisableOwnAddress1(I2C2);
LL_I2C_EnableClockStretching(I2C2);
LL_I2C_SetDigitalFilter(I2C2, 0x00);
LL_I2C_EnableAnalogFilter(I2C2);
LL_I2C_EnableGeneralCall(I2C2);
LL_I2C_SetOwnAddress2(I2C2, 0x00, LL_I2C_OWNADDRESS2_NOMASK);
LL_I2C_DisableOwnAddress2(I2C2);
LL_I2C_SetMasterAddressingMode(I2C2, LL_I2C_ADDRESSING_MODE_7BIT);
LL_I2C_SetMode(I2C2, LL_I2C_MODE_I2C);
// (4) Enable DMA transmission requests a
LL_I2C_EnableDMAReq_TX(I2C2);
LL_I2C_Enable(I2C2);
}
void Handle_I2C_Master(void)
{
ubTransferComplete = 0;
LL_DMA_EnableChannel(DMA1, LL_DMA_CHANNEL_4);
LL_I2C_HandleTransfer(I2C2, SLAVE_OWN_ADDRESS, LL_I2C_ADDRSLAVE_7BIT, ubNbDataToTransmit, LL_I2C_MODE_AUTOEND, LL_I2C_GENERATE_START_WRITE);
/* Loop until DMA transfer complete event */
while(!ubTransferComplete)
{
}
/* Loop until STOP flag is raised */
while(!LL_I2C_IsActiveFlag_STOP(I2C2))
{
}
LL_I2C_ClearFlag_STOP(I2C2);
}
I have a weight cell which replies to specific frames over RS485 (cell contains its own communication protocol). Manufacturer has a software to test the cell so I connect it to my pc using a RS485 to USB converter and I am able to connect to the cell and send/receive frames. Here are the parameters I use and you can see TX-RX frames below:
Now I want to manage this protocol with a STM32 board but I am not receiving frames properly. I am using NUCLEO-F401RE and RS485 CAN Shield from Waveshare (https://www.waveshare.com/wiki/RS485_CAN_Shield). Here is what I do and my UART configuration:
Main program
int main(void)
{
HAL_Init();
SystemClock_Config();
MX_GPIO_Init();
GPIO_InitStruct.Pin = GPIO_PIN_8;
GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_HIGH;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
MX_USART1_UART_Init();
MX_USART2_UART_Init();
printf("NUCLEOF401RE - RS485 RX\r\n");
while (1)
{
// Activate sending status
HAL_GPIO_WritePin (GPIOA, GPIO_PIN_8, 1);
txBuffer[0] = 0x44; // SOF
txBuffer[1] = 0x01; // DIR
txBuffer[2] = 0x00; // ERR
txBuffer[3] = 0x25; // CMD
txBuffer[4] = 0x00; // DATOS0
txBuffer[5] = 0x00; // DATOS1
txBuffer[6] = 0x00; // DATOS2
txBuffer[7] = 0x00; // DATOS3
txBuffer[8] = txBuffer[1] ^ txBuffer[2] ^ txBuffer[3] ^ txBuffer[4] ^ txBuffer[5] ^ txBuffer[6] ^ txBuffer[7]; // CRC
txBuffer[9] = 0x0A; // EOF
printf("TX - Sending frame\r\n");
HAL_UART_Transmit(&UART_RS485, txBuffer, framelength, 1000);
// Activate receiving status
HAL_GPIO_WritePin (GPIOA, GPIO_PIN_8, 0);
HAL_UART_Receive(&UART_RS485, rxBuffer, framelength, 1000);
printf("RX - Reception Completed:\r\n");
for(int j=0; j<framelength; j++)
{
printf("Byte %d: %02X\r\n", j, rxBuffer[j]);
}
HAL_Delay(3000);
}
UART Configuration
static void MX_USART1_UART_Init(void)
{
UART_RS485.Instance = USART1;
UART_RS485.Init.BaudRate = 115200;
UART_RS485.Init.WordLength = UART_WORDLENGTH_8B;
UART_RS485.Init.StopBits = UART_STOPBITS_1;
UART_RS485.Init.Parity = UART_PARITY_NONE;
UART_RS485.Init.Mode = UART_MODE_TX_RX;
UART_RS485.Init.HwFlowCtl = UART_HWCONTROL_NONE;
UART_RS485.Init.OverSampling = UART_OVERSAMPLING_16;
if (HAL_UART_Init(&UART_RS485) != HAL_OK)
{
Error_Handler();
}
}
void HAL_UART_MspInit(UART_HandleTypeDef* huart)
{
GPIO_InitTypeDef GPIO_InitStruct = {0};
if(huart->Instance==USART1)
{
__HAL_RCC_USART1_CLK_ENABLE();
__HAL_RCC_GPIOA_CLK_ENABLE();
GPIO_InitStruct.Pin = GPIO_PIN_9;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_HIGH;
GPIO_InitStruct.Alternate = GPIO_AF7_USART1;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.Pin = GPIO_PIN_10;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Pull = GPIO_PULLUP;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_HIGH;
GPIO_InitStruct.Alternate = GPIO_AF7_USART1;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
}
I should be receiving a frame starting with 0x44 and ending with 0x0A based on the cell protocol. However, this is what I receive:
I have tried many different UART parameters, many different pin configurations and tried using a 120 ohm and 220 ohm resistors between A and B lines but nothing works.
What am I missing?
I suggest you to see: Uart Status Register of you STM32 uc (because you could see UART errors like parity error, frame error and buffer overflow). In my work UART errors very often were related to buffer overflow because uc usually have a very small buffer (for ATmega 32 - 2 bytes).
Also i suggest you to use mode with parity bit, because it a data check in any case.
Also have you calculate you UART error (depends on you clock frequency and UART speed). You also could select UART speed with 0 % of errors.
You could see article about status register and baud rate here: http://www.micromouseonline.com/2009/12/31/stm32-usart-basics/
I am currently working on a project that involves the STM32f0Discovery board and this fingerprint scanner, which uses UART communication. To initialize the UART, I do the following:
void init_uart(void){
GPIO_InitTypeDef GPIO_InitStructure;
USART_InitTypeDef USART_InitStructure;
USART_InitStructure.USART_BaudRate = 9600;
USART_InitStructure.USART_WordLength = USART_WordLength_8b;
USART_InitStructure.USART_StopBits = USART_StopBits_1;
USART_InitStructure.USART_Parity = USART_Parity_No;
USART_InitStructure.USART_HardwareFlowControl = USART_HardwareFlowControl_None;
USART_InitStructure.USART_Mode = USART_Mode_Rx | USART_Mode_Tx;
RCC_AHBPeriphClockCmd(RCC_AHBPeriph_GPIOA, ENABLE);
RCC_APB2PeriphClockCmd(RCC_APB2Periph_USART1, ENABLE);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource9, GPIO_AF_1);
GPIO_PinAFConfig(GPIOA, GPIO_PinSource10, GPIO_AF_1);
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_9 | GPIO_Pin_10;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF;
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
GPIO_InitStructure.GPIO_OType = GPIO_OType_PP;
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_UP;
GPIO_Init(GPIOA, &GPIO_InitStructure);
USART_Init(USART1, &USART_InitStructure);
USART_Cmd(USART1, ENABLE);
USART_ITConfig(USART1, USART_IT_RXNE, ENABLE);
}
Then to send/receive a byte I do the following:
void writechar(char c) {
while((USART1 -> ISR & USART_ISR_TC) != USART_ISR_TC);
USART1 -> TDR = c;
}
char readchar(void) {
return USART_ReceiveData(USART1);
}
To test this, I sent the OPEN command byte-by-byte through a for-loop, then the LEN-ON command the same method, which has no results. When reading for an ACK after the OPEN command, the results read are just an empty array of char bytes.
So, my question is, is my initialization the error? Or is the error in the reading/writing? Or could it be something else altogether?
Are you getting the ACK response ? If you are not then there is probably a configuration or wiring problem. I would suggest you try connecting the serial pins to a serial-usb converter
(FTDI such as this :Click here)
and use an app like Putty or YAT to send commands (ACK) to and from your stm. Try using the same configuration the scanner has if that doesn't work its a problem with your config if it does work its a problem scanner side.
I'm using MBED on STM32F437 MCU (my own target) and trying to properly reach STOP mode. This MCU doesn't have LPTIM (low power timer) so I'm trying to configure RTC (or WKUP pin PA0) to wakeup MCU a few seconds after STOP. So far so good. But there is problem after wakeup. MCU is running, serial printf is working, but MBED Ticker and Timer are not running as expected. Here's the deal - Ticker doesn't call attached interrupt and Timer probably overflows after wakeup.
// mcu start
Timer t;
t.start();
Thread::wait(10000); // sleep is enabled 10 seconds after MCU is powered ON
-> t = 10002
// wakeup timer for 10 seconds configured
// stop mode enabled
// this happens after wakeup
-> t = 4294783
-> t = 4295784
-> t = 4296785
-> t = 4297786
-> t = 4298787
For wakeup i'm using this library
https://os.mbed.com/users/Sissors/code/WakeUp/
And in Stop routine, there is code like
/* GPIO Ports Clock Enable */
__HAL_RCC_GPIOA_CLK_ENABLE();
/*Configure GPIO pin : PA0 */
GPIO_InitStruct.Pin = GPIO_PIN_0;
GPIO_InitStruct.Mode = GPIO_MODE_IT_RISING;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
/* Allow access to Backup */
HAL_PWR_EnableBkUpAccess();
/* Disable all used wakeup sources: Pin1(PA.0) */
HAL_PWR_DisableWakeUpPin(PWR_WAKEUP_PIN1);
/* Clear all related wakeup flags */
__HAL_PWR_CLEAR_FLAG(PWR_FLAG_WU);
/* Re-enable all used wakeup sources: Pin1(PA.0) */
HAL_PWR_EnableWakeUpPin(PWR_WAKEUP_PIN1);
/* FLASH Deep Power Down Mode enabled */
HAL_PWREx_EnableFlashPowerDown();
/* Enter Stop Mode */
HAL_PWR_EnterSTOPMode(PWR_LOWPOWERREGULATOR_ON, PWR_STOPENTRY_WFI);
/* Configures system clock after wake-up from STOP: enable HSE, PLL and select
PLL as system clock source (HSE and PLL are disabled in STOP mode) */
SetSysClock();
and this is how my clock init (SetSysClock) looks like. It's called both after MCU start and MCU wakeup.
/******************************************************************************/
/* PLL (clocked by HSE) used as System clock source */
/******************************************************************************/
uint8_t SetSysClock_PLL_HSE(uint8_t bypass)
{
RCC_OscInitTypeDef RCC_OscInitStruct;
RCC_ClkInitTypeDef RCC_ClkInitStruct;
/* The voltage scaling allows optimizing the power consumption when the device is
clocked below the maximum system frequency, to update the voltage scaling value
regarding system frequency refer to product datasheet. */
__HAL_RCC_PWR_CLK_ENABLE();
__HAL_PWR_VOLTAGESCALING_CONFIG(PWR_REGULATOR_VOLTAGE_SCALE2);
// Enable HSE oscillator and activate PLL with HSE as source
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSE;
if (bypass == 0) {
RCC_OscInitStruct.HSEState = RCC_HSE_ON; // External 8 MHz xtal on OSC_IN/OSC_OUT
} else {
RCC_OscInitStruct.HSEState = RCC_HSE_BYPASS; // External 8 MHz clock on OSC_IN
}
RCC_OscInitStruct.HSIState = RCC_HSI_OFF;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_ON;
RCC_OscInitStruct.PLL.PLLSource = RCC_PLLSOURCE_HSE;
RCC_OscInitStruct.PLL.PLLM = 25;
#if (CLOCK_SOURCE_USB)
RCC_OscInitStruct.PLL.PLLN = 336;
#else
RCC_OscInitStruct.PLL.PLLN = 336;
#endif
RCC_OscInitStruct.PLL.PLLP = RCC_PLLP_DIV2; // 180 MHz or 168 MHz if CLOCK_SOURCE_USB defined
RCC_OscInitStruct.PLL.PLLQ = 7;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK) {
return 0; // FAIL
}
// Activate the OverDrive to reach the 180 MHz Frequency
if (HAL_PWREx_EnableOverDrive() != HAL_OK) {
return 0; // FAIL
}
// Select PLL as system clock source and configure the HCLK, PCLK1 and PCLK2 clocks dividers
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_SYSCLK | RCC_CLOCKTYPE_HCLK | RCC_CLOCKTYPE_PCLK1 | RCC_CLOCKTYPE_PCLK2;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_PLLCLK;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1; // 180 or 168 MHz
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV4; // 45 or 42 MHz
RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV2; // 90 or 84 MHz
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_5) != HAL_OK) {
return 0; // FAIL
}
// HAL_RCC_MCOConfig(RCC_MCO2, RCC_MCO2SOURCE_SYSCLK, RCC_MCODIV_3);
return 1;
}
#endif