Document DMA ring buffer

This commit is contained in:
Mario Hüttel 2020-02-12 21:49:28 +01:00
parent 1745a7e2b2
commit 26f8e7ae99
2 changed files with 103 additions and 15 deletions

View File

@ -5,33 +5,103 @@
#include <stm32/stm32f4xx.h> #include <stm32/stm32f4xx.h>
#include <stddef.h> #include <stddef.h>
/**
* @brief DMA ring buffer for data transfer from peripheral to memory
*/
struct dma_ring_buffer_to_mem { struct dma_ring_buffer_to_mem {
void *data_ptr; void *data_ptr; /**< @brief Ring buffer */
size_t buffer_count; size_t buffer_count; /**< @brief Size of buffer in multiples of elements */
DMA_Stream_TypeDef *dma; DMA_Stream_TypeDef *dma; /**< @brief DMA Stream used to transfer data */
size_t get_idx; size_t get_idx; /**< @brief Get index for SW. Indicates the next element to be read */
uint8_t base_dma_id; uint8_t base_dma_id; /**< @brief Id of the DMA controller, the stream belongs to. Either 1 or 2. */
size_t element_size; size_t element_size; /**< @brief Size of a buffer element (1, 2, or 4 bytes) */
}; };
/**
* @brief DMA ring buffer for data transfer from memory to peripheral
*/
struct dma_ring_buffer_to_periph { struct dma_ring_buffer_to_periph {
void *src_buffer; void *src_buffer; /**< @brief Ring buffer */
size_t buffer_count; size_t buffer_count; /**< @brief Size of buffer in multiples of elements */
DMA_Stream_TypeDef *dma; DMA_Stream_TypeDef *dma; /**< @brief DMA Stream used to transfer data */
volatile size_t dma_get_idx_current; volatile size_t dma_get_idx_current; /**< @brief Current get index of the (not yet) running DMA Stream. */
volatile size_t dma_get_idx_future; volatile size_t dma_get_idx_future; /**< @brief Get index in the buffer, after the current DMA transfer has finished */
volatile size_t sw_put_idx; volatile size_t sw_put_idx; /**< @brief Put index for software */
uint8_t dma_base_id; uint8_t dma_base_id; /**< @brief Id of the DMA controller, the stream belongs to. Either 1 or 2. */
size_t element_size; size_t element_size; /**< @brief Size of a buffer element (1, 2, or 4 bytes) */
}; };
/**
* @brief Initialize a ring buffer used for transferring data from a peripheral to memory.
* @param[in,out] dma_buffer structure representing the newly initialized ring buffer
* @param base_dma_id Id of the DMA controller, the stream belongs to. Either 1 or 2
* @param[in] dma_stream DMA Stream to use
* @param buffer_element_count Size of buffer in elements
* @param element_size Size of an element. Either 1, 2, or 4 bytes
* @param[in] data_buffer Buffer to operate on
* @param[in] src_reg Source register to read data from
* @param dma_trigger_channel Trigger channel for DMA stream. Consult reference manual for a valid number.
* @note The ring buffers do not have an overrun detection. You have to make sure to empty the buffer in time.
* @return Status (0 is ok)
*/
int dma_ring_buffer_periph_to_mem_initialize(struct dma_ring_buffer_to_mem *dma_buffer, uint8_t base_dma_id, DMA_Stream_TypeDef *dma_stream, size_t buffer_element_count, size_t element_size, void *data_buffer, void *src_reg, uint8_t dma_trigger_channel); int dma_ring_buffer_periph_to_mem_initialize(struct dma_ring_buffer_to_mem *dma_buffer, uint8_t base_dma_id, DMA_Stream_TypeDef *dma_stream, size_t buffer_element_count, size_t element_size, void *data_buffer, void *src_reg, uint8_t dma_trigger_channel);
/**
* @brief Get data from a peripheral to memory ring buffer
* @param[in] buff Ring buffer structure
* @param[out] data_buff Pointer to set to new data. This must not be modified!
* @param[out] len Length in elements
* @return 0 if successful (data, no data), -1 if error, and 1 if data with wrap around. Call function again in this case to retrieve rest after wrap around.
*/
int dma_ring_buffer_periph_to_mem_get_data(struct dma_ring_buffer_to_mem *buff, const void **data_buff, size_t *len); int dma_ring_buffer_periph_to_mem_get_data(struct dma_ring_buffer_to_mem *buff, const void **data_buff, size_t *len);
/**
* @brief Stop the ring buffer operation.
* @note The ring buffer loses all its configuration after this call.
* @param buff Ring buffer to stop
*/
void dma_ring_buffer_periph_to_mem_stop(struct dma_ring_buffer_to_mem *buff); void dma_ring_buffer_periph_to_mem_stop(struct dma_ring_buffer_to_mem *buff);
/**
* @brief Initialize ring buffer to streaming data from meory to a peripheral
* @param[in,out] dma_buffer DMA ring buffer structure
* @param base_dma_id Id of the DMA controller, the stream belongs to. Either 1 or 2
* @param[in] dma_stream DMA stream to use
* @param buffer_element_count size of ring buffer in elements
* @param element_size Size of an element. Either 1, 2, or 4 bytes
* @param[in] data_buffer Ring buffer
* @param dma_trigger_channel Trigger channel to start DMA. Consult reference manual for values
* @param[in] dest_reg Destination register to stream data to
* @return 0 if successful
*/
int dma_ring_buffer_mem_to_periph_initialize(struct dma_ring_buffer_to_periph *dma_buffer, uint8_t base_dma_id, DMA_Stream_TypeDef *dma_stream, size_t buffer_element_count, size_t element_size, void *data_buffer, uint8_t dma_trigger_channel, void *dest_reg); int dma_ring_buffer_mem_to_periph_initialize(struct dma_ring_buffer_to_periph *dma_buffer, uint8_t base_dma_id, DMA_Stream_TypeDef *dma_stream, size_t buffer_element_count, size_t element_size, void *data_buffer, uint8_t dma_trigger_channel, void *dest_reg);
/**
* @brief Insert data into the ring buffer
*
* This function will try to insert data into the ring buffer. If the buffer is full, it will wait for the buffer to become empty,
* until the data fits into the buffer. If the data is larger than the full buffer, it is split. The buffer is fully written. The rest is written after the buffer has emptied out,
* until the rest fits in
*
* @param buff Ring buffer element
* @param data_to_insert Data to put in buffer
* @param count Element count of data to insert
* @return 0 if successful.
*/
int dma_ring_buffer_mem_to_periph_insert_data(struct dma_ring_buffer_to_periph *buff, const void *data_to_insert, size_t count); int dma_ring_buffer_mem_to_periph_insert_data(struct dma_ring_buffer_to_periph *buff, const void *data_to_insert, size_t count);
/**
* @brief Call this function on a transfer complete interrupt of the DMA.
* @note It is mandatory to call this function in order to provide a working ring buffer.
* @param buff Ring buffer struct
*/
void dma_ring_buffer_mem_to_periph_int_callback(struct dma_ring_buffer_to_periph *buff); void dma_ring_buffer_mem_to_periph_int_callback(struct dma_ring_buffer_to_periph *buff);
/**
* @brief Stop the ring buffer operation.
* @note The ring buffer loses all its configuration after this call.
* @param buff Ring buffer to stop
*/
void dma_ring_buffer_mem_to_periph_stop(struct dma_ring_buffer_to_periph *buff); void dma_ring_buffer_mem_to_periph_stop(struct dma_ring_buffer_to_periph *buff);
#endif /* __DMA_RING_BUFFER_H__ */ #endif /* __DMA_RING_BUFFER_H__ */

View File

@ -105,6 +105,8 @@ void dma_ring_buffer_periph_to_mem_stop(struct dma_ring_buffer_to_mem *buff)
buff->dma->FCR = 0; buff->dma->FCR = 0;
dma_ring_buffer_switch_clock_enable(buff->base_dma_id, false); dma_ring_buffer_switch_clock_enable(buff->base_dma_id, false);
memset(buff, 0, sizeof(struct dma_ring_buffer_to_mem));
} }
int dma_ring_buffer_mem_to_periph_initialize(struct dma_ring_buffer_to_periph *dma_buffer, uint8_t base_dma_id, DMA_Stream_TypeDef *dma_stream, size_t buffer_element_count, size_t element_size, void *data_buffer, uint8_t dma_trigger_channel, void *dest_reg) int dma_ring_buffer_mem_to_periph_initialize(struct dma_ring_buffer_to_periph *dma_buffer, uint8_t base_dma_id, DMA_Stream_TypeDef *dma_stream, size_t buffer_element_count, size_t element_size, void *data_buffer, uint8_t dma_trigger_channel, void *dest_reg)
@ -206,18 +208,30 @@ int dma_ring_buffer_mem_to_periph_insert_data(struct dma_ring_buffer_to_periph *
/* Fillup buffer (max is buffer end, wrap around afterwards) */ /* Fillup buffer (max is buffer end, wrap around afterwards) */
insert_ptr = (char *)data_to_insert; insert_ptr = (char *)data_to_insert;
dest_ptr = &((char *)buff->src_buffer)[buff->sw_put_idx * buff->element_size]; dest_ptr = &((char *)buff->src_buffer)[buff->sw_put_idx * buff->element_size];
/* Check if data completely fits into memory starting from put index */
if (buff->buffer_count - buff->sw_put_idx >= count) { if (buff->buffer_count - buff->sw_put_idx >= count) {
/* Copy data and move put index */
memcpy(dest_ptr, insert_ptr, buff->element_size * count); memcpy(dest_ptr, insert_ptr, buff->element_size * count);
buff->sw_put_idx += count; buff->sw_put_idx += count;
/* If buffer is used up to last element, set put index to beginning */
if(buff->sw_put_idx >= buff->buffer_count) if(buff->sw_put_idx >= buff->buffer_count)
buff->sw_put_idx = 0; buff->sw_put_idx = 0;
} else { } else {
/* Fill up to end of buffer and fill rest after wrap around */
first_round_count = buff->element_size * (buff->buffer_count - buff->sw_put_idx); first_round_count = buff->element_size * (buff->buffer_count - buff->sw_put_idx);
memcpy(dest_ptr, insert_ptr, first_round_count); memcpy(dest_ptr, insert_ptr, first_round_count);
insert_ptr += first_round_count; insert_ptr += first_round_count;
memcpy(buff->src_buffer, insert_ptr, count - first_round_count); memcpy(buff->src_buffer, insert_ptr, count - first_round_count);
/* Move put index */
buff->sw_put_idx = count - first_round_count; buff->sw_put_idx = count - first_round_count;
} }
/* Queue the DMA transfer. If DMA is already enabled, this has no effect
* DMA is triggerd from interrupt in this case
*/
queue_or_start_dma_transfer(buff); queue_or_start_dma_transfer(buff);
return_retval: return_retval:
@ -226,15 +240,19 @@ return_retval:
void dma_ring_buffer_mem_to_periph_int_callback(struct dma_ring_buffer_to_periph *buff) void dma_ring_buffer_mem_to_periph_int_callback(struct dma_ring_buffer_to_periph *buff)
{ {
/* update current get index */ /* update current get index because DMA is finished */
buff->dma_get_idx_current = buff->dma_get_idx_future; buff->dma_get_idx_current = buff->dma_get_idx_future;
/* Start new DMA transfer if not all data is trasnferred yet */
queue_or_start_dma_transfer(buff); queue_or_start_dma_transfer(buff);
} }
void dma_ring_buffer_mem_to_periph_stop(struct dma_ring_buffer_to_periph *buff) void dma_ring_buffer_mem_to_periph_stop(struct dma_ring_buffer_to_periph *buff)
{ {
/* Stop DMA and clock */
buff->dma->CR = 0; buff->dma->CR = 0;
dma_ring_buffer_switch_clock_enable(buff->dma_base_id, false); dma_ring_buffer_switch_clock_enable(buff->dma_base_id, false);
/* Reset the structure */
memset(buff, 0, sizeof(struct dma_ring_buffer_to_periph)); memset(buff, 0, sizeof(struct dma_ring_buffer_to_periph));
} }