2022-07-16 12:46:52 +02:00
|
|
|
/* Reflow Oven Controller
|
|
|
|
*
|
|
|
|
* Copyright (C) 2020 Mario Hüttel <mario.huettel@gmx.net>
|
|
|
|
*
|
|
|
|
* This file is part of the Reflow Oven Controller Project.
|
|
|
|
*
|
|
|
|
* The reflow oven controller is free software: you can redistribute it and/or modify
|
|
|
|
* it under the terms of the GNU General Public License version 2 as
|
|
|
|
* published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* The Reflow Oven Control Firmware is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
* GNU General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with the reflow oven controller project.
|
|
|
|
* If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <stm-periph/uart.h>
|
|
|
|
#include <stm32/stm32f4xx.h>
|
|
|
|
#include <stm-periph/rcc-manager.h>
|
|
|
|
#include <stm-periph/stm32-gpio-macros.h>
|
|
|
|
#include <stm-periph/dma-ring-buffer.h>
|
|
|
|
#include <string.h>
|
|
|
|
|
|
|
|
int uart_init(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
int ret_val = 0;
|
|
|
|
uint32_t cr3 = 0;
|
|
|
|
uint32_t cr1 = 0;
|
|
|
|
|
|
|
|
if (!uart)
|
|
|
|
return -1000;
|
|
|
|
|
|
|
|
rcc_manager_enable_clock(uart->rcc_reg, uart->rcc_bit_no);
|
|
|
|
|
|
|
|
/* Reset all config regs */
|
|
|
|
uart->uart_dev->CR1 = uart->uart_dev->CR2 = uart->uart_dev->CR3 = 0UL;
|
|
|
|
|
|
|
|
/* Set baud rate */
|
|
|
|
uart->uart_dev->BRR = uart->brr_val;
|
|
|
|
|
|
|
|
/* If DMA buffers are present, configure for DMA use */
|
|
|
|
if (uart->dma_rx_buff && uart->rx) {
|
|
|
|
cr3 |= USART_CR3_DMAR;
|
|
|
|
|
|
|
|
ret_val = dma_ring_buffer_periph_to_mem_initialize(&uart->rx_ring_buff,
|
|
|
|
uart->base_dma_num,
|
|
|
|
uart->dma_rx_stream,
|
|
|
|
uart->rx_buff_count,
|
|
|
|
1U,
|
|
|
|
uart->dma_rx_buff,
|
|
|
|
(char *)&uart->uart_dev->DR,
|
|
|
|
uart->dma_rx_trigger_channel);
|
|
|
|
if (ret_val)
|
|
|
|
return ret_val;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (uart->dma_tx_buff && uart->tx) {
|
|
|
|
ret_val = dma_ring_buffer_mem_to_periph_initialize(&uart->tx_ring_buff,
|
|
|
|
uart->base_dma_num,
|
|
|
|
uart->dma_tx_stream,
|
|
|
|
uart->tx_buff_count,
|
|
|
|
1U,
|
|
|
|
uart->dma_tx_buff,
|
|
|
|
uart->dma_tx_trigger_channel,
|
|
|
|
(void *)&uart->uart_dev->DR);
|
|
|
|
if (ret_val)
|
|
|
|
return ret_val;
|
|
|
|
|
|
|
|
cr3 |= USART_CR3_DMAT;
|
|
|
|
}
|
|
|
|
uart->uart_dev->CR3 = cr3;
|
|
|
|
|
|
|
|
if (uart->tx)
|
|
|
|
cr1 |= USART_CR1_TE;
|
|
|
|
if (uart->rx)
|
|
|
|
cr1 |= USART_CR1_RE;
|
|
|
|
|
|
|
|
/* Enable uart */
|
|
|
|
cr1 |= USART_CR1_UE;
|
|
|
|
uart->uart_dev->CR1 = cr1;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_change_brr(struct stm_uart *uart, uint32_t brr)
|
|
|
|
{
|
|
|
|
if (!uart || !uart->uart_dev)
|
|
|
|
return;
|
|
|
|
|
|
|
|
uart->brr_val = brr;
|
|
|
|
uart->uart_dev->BRR = brr;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t uart_get_brr(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
if (!uart || !uart->uart_dev)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
return uart->brr_val;
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_disable(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
if (!uart)
|
|
|
|
return;
|
|
|
|
|
|
|
|
uart->uart_dev->CR1 = 0;
|
|
|
|
uart->uart_dev->CR2 = 0;
|
|
|
|
uart->uart_dev->CR3 = 0;
|
|
|
|
|
|
|
|
if (uart->rx && uart->dma_rx_buff)
|
|
|
|
dma_ring_buffer_periph_to_mem_stop(&uart->rx_ring_buff);
|
|
|
|
|
|
|
|
if (uart->dma_tx_buff && uart->tx)
|
|
|
|
dma_ring_buffer_mem_to_periph_stop(&uart->tx_ring_buff);
|
|
|
|
|
|
|
|
rcc_manager_disable_clock(uart->rcc_reg, uart->rcc_bit_no);
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_send_char(struct stm_uart *uart, char c)
|
|
|
|
{
|
|
|
|
if (!uart || !uart->uart_dev)
|
|
|
|
return;
|
|
|
|
|
|
|
|
while (!(uart->uart_dev->SR & USART_SR_TXE))
|
|
|
|
;
|
|
|
|
|
|
|
|
uart->uart_dev->DR = c;
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_send_array(struct stm_uart *uart, const char *data, uint32_t len)
|
|
|
|
{
|
|
|
|
uint32_t i;
|
|
|
|
|
|
|
|
for (i = 0; i < len; i++)
|
|
|
|
uart_send_char(uart, data[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_send_string(struct stm_uart *uart, const char *string)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
|
|
|
for (i = 0; string[i] != '\0'; i++)
|
|
|
|
uart_send_char(uart, string[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_send_array_with_dma(struct stm_uart *uart, const char *data, uint32_t len)
|
|
|
|
{
|
|
|
|
if (!uart || !uart->dma_tx_buff)
|
|
|
|
return;
|
|
|
|
|
|
|
|
dma_ring_buffer_mem_to_periph_insert_data(&uart->tx_ring_buff, data, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_send_string_with_dma(struct stm_uart *uart, const char *string)
|
|
|
|
{
|
|
|
|
size_t len;
|
|
|
|
|
|
|
|
len = strlen(string);
|
|
|
|
uart_send_array_with_dma(uart, string, (uint32_t)len);
|
|
|
|
}
|
|
|
|
|
|
|
|
int uart_receive_data_with_dma(struct stm_uart *uart, const char **data, size_t *len)
|
|
|
|
{
|
|
|
|
if (!uart)
|
|
|
|
return -1000;
|
|
|
|
|
|
|
|
return dma_ring_buffer_periph_to_mem_get_data(&uart->rx_ring_buff, (const volatile void **)data, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
char uart_get_char(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
if (!uart)
|
|
|
|
return 0;
|
|
|
|
/* Wait for data to be available */
|
|
|
|
while (!(uart->uart_dev->SR & USART_SR_RXNE))
|
|
|
|
;
|
|
|
|
|
|
|
|
return (char)uart->uart_dev->DR;
|
|
|
|
}
|
|
|
|
|
|
|
|
int uart_check_rx_avail(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
if (!uart)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
if (uart->uart_dev->SR & USART_SR_RXNE)
|
|
|
|
return 1;
|
|
|
|
else
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void uart_tx_dma_complete_int_callback(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
if (!uart)
|
|
|
|
return;
|
|
|
|
|
|
|
|
dma_ring_buffer_mem_to_periph_int_callback(&uart->tx_ring_buff);
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t uart_dma_tx_queue_avail(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
size_t fill_level = 0UL;
|
|
|
|
|
|
|
|
if (!uart)
|
|
|
|
return 0UL;
|
|
|
|
|
|
|
|
(void)dma_ring_buffer_mem_to_periph_fill_level(&uart->tx_ring_buff, &fill_level);
|
|
|
|
|
|
|
|
return fill_level;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t uart_dma_rx_queue_avail(struct stm_uart *uart)
|
|
|
|
{
|
|
|
|
size_t fill_level = 0UL;
|
|
|
|
|
|
|
|
if (!uart)
|
|
|
|
return 0UL;
|
|
|
|
|
|
|
|
(void)dma_ring_buffer_periph_to_mem_fill_level(&uart->rx_ring_buff, &fill_level);
|
|
|
|
|
|
|
|
return fill_level;
|
|
|
|
}
|