mirror of
https://github.com/espressif/esp-idf.git
synced 2025-08-03 20:54:32 +02:00
adc: port gh pr to current adc continuous mode driver
DMA EOF may happens per multiple dma descriptors, instead of only one. Closes https://github.com/espressif/esp-idf/pull/11500
This commit is contained in:
@@ -380,7 +380,6 @@ static IRAM_ATTR bool s_adc_dma_intr(adc_digi_context_t *adc_digi_ctx)
|
|||||||
}
|
}
|
||||||
|
|
||||||
ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, finished_buffer, finished_size, &taskAwoken);
|
ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, finished_buffer, finished_size, &taskAwoken);
|
||||||
adc_hal_read_desc_finish (&adc_digi_ctx->hal);
|
|
||||||
if (ret == pdFALSE) {
|
if (ret == pdFALSE) {
|
||||||
//ringbuffer overflow
|
//ringbuffer overflow
|
||||||
adc_digi_ctx->ringbuf_overflow_flag = 1;
|
adc_digi_ctx->ringbuf_overflow_flag = 1;
|
||||||
|
@@ -61,7 +61,6 @@ static bool s_adc_dma_intr(adc_continuous_ctx_t *adc_digi_ctx);
|
|||||||
|
|
||||||
#if SOC_GDMA_SUPPORTED
|
#if SOC_GDMA_SUPPORTED
|
||||||
static bool adc_dma_in_suc_eof_callback(gdma_channel_handle_t dma_chan, gdma_event_data_t *event_data, void *user_data);
|
static bool adc_dma_in_suc_eof_callback(gdma_channel_handle_t dma_chan, gdma_event_data_t *event_data, void *user_data);
|
||||||
static bool adc_dma_descr_err_callback(gdma_channel_handle_t dma_chan, void *user_data);
|
|
||||||
#else
|
#else
|
||||||
static void adc_dma_intr_handler(void *arg);
|
static void adc_dma_intr_handler(void *arg);
|
||||||
#endif
|
#endif
|
||||||
@@ -175,8 +174,7 @@ esp_err_t adc_continuous_new_handle(const adc_continuous_handle_cfg_t *hdl_confi
|
|||||||
gdma_apply_strategy(adc_ctx->rx_dma_channel, &strategy_config);
|
gdma_apply_strategy(adc_ctx->rx_dma_channel, &strategy_config);
|
||||||
|
|
||||||
gdma_rx_event_callbacks_t cbs = {
|
gdma_rx_event_callbacks_t cbs = {
|
||||||
.on_recv_eof = adc_dma_in_suc_eof_callback,
|
.on_recv_eof = adc_dma_in_suc_eof_callback
|
||||||
.on_descr_err = adc_dma_descr_err_callback
|
|
||||||
};
|
};
|
||||||
gdma_register_rx_event_callbacks(adc_ctx->rx_dma_channel, &cbs, adc_ctx);
|
gdma_register_rx_event_callbacks(adc_ctx->rx_dma_channel, &cbs, adc_ctx);
|
||||||
|
|
||||||
@@ -264,12 +262,6 @@ static IRAM_ATTR bool adc_dma_in_suc_eof_callback(gdma_channel_handle_t dma_chan
|
|||||||
return s_adc_dma_intr(user_data);
|
return s_adc_dma_intr(user_data);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool adc_dma_descr_err_callback(gdma_channel_handle_t dma_chan, void *user_data)
|
|
||||||
{
|
|
||||||
ESP_EARLY_LOGE(ADC_TAG, "GDMA descriptor error occurred, probable ADC data loss, CPU load too high?");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
#else
|
#else
|
||||||
static IRAM_ATTR void adc_dma_intr_handler(void *arg)
|
static IRAM_ATTR void adc_dma_intr_handler(void *arg)
|
||||||
{
|
{
|
||||||
@@ -308,7 +300,6 @@ static IRAM_ATTR bool s_adc_dma_intr(adc_continuous_ctx_t *adc_digi_ctx)
|
|||||||
}
|
}
|
||||||
|
|
||||||
ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, finished_buffer, finished_size, &taskAwoken);
|
ret = xRingbufferSendFromISR(adc_digi_ctx->ringbuf_hdl, finished_buffer, finished_size, &taskAwoken);
|
||||||
adc_hal_read_desc_finish (&adc_digi_ctx->hal);
|
|
||||||
need_yield |= (taskAwoken == pdTRUE);
|
need_yield |= (taskAwoken == pdTRUE);
|
||||||
|
|
||||||
if (adc_digi_ctx->cbs.on_conv_done) {
|
if (adc_digi_ctx->cbs.on_conv_done) {
|
||||||
|
@@ -235,6 +235,7 @@ static void adc_hal_digi_dma_link_descriptors(dma_descriptor_t *desc, uint8_t *d
|
|||||||
HAL_ASSERT(((uint32_t)data_buf % 4) == 0);
|
HAL_ASSERT(((uint32_t)data_buf % 4) == 0);
|
||||||
HAL_ASSERT((per_eof_size % 4) == 0);
|
HAL_ASSERT((per_eof_size % 4) == 0);
|
||||||
uint32_t n = 0;
|
uint32_t n = 0;
|
||||||
|
dma_descriptor_t *desc_head = desc;
|
||||||
|
|
||||||
while (eof_num--) {
|
while (eof_num--) {
|
||||||
uint32_t eof_size = per_eof_size;
|
uint32_t eof_size = per_eof_size;
|
||||||
@@ -258,7 +259,7 @@ static void adc_hal_digi_dma_link_descriptors(dma_descriptor_t *desc, uint8_t *d
|
|||||||
n++;
|
n++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
desc[n-1].next = desc;
|
desc[n-1].next = desc_head;
|
||||||
}
|
}
|
||||||
|
|
||||||
void adc_hal_digi_start(adc_hal_dma_ctx_t *hal, uint8_t *data_buf)
|
void adc_hal_digi_start(adc_hal_dma_ctx_t *hal, uint8_t *data_buf)
|
||||||
@@ -313,15 +314,24 @@ adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, con
|
|||||||
|
|
||||||
//Find the eof list start
|
//Find the eof list start
|
||||||
eof_desc = eof_desc->next;
|
eof_desc = eof_desc->next;
|
||||||
|
eof_desc->dw0.owner = 1;
|
||||||
buffer_start = eof_desc->buffer;
|
buffer_start = eof_desc->buffer;
|
||||||
eof_len += eof_desc->dw0.length;
|
eof_len += eof_desc->dw0.length;
|
||||||
|
if ((intptr_t)eof_desc == eof_desc_addr) {
|
||||||
|
goto valid;
|
||||||
|
}
|
||||||
|
|
||||||
//Find the eof list end
|
//Find the eof list end
|
||||||
for (int i = 1; i < hal->eof_step; i++) {
|
for (int i = 1; i < hal->eof_step; i++) {
|
||||||
eof_desc = eof_desc->next;
|
eof_desc = eof_desc->next;
|
||||||
|
eof_desc->dw0.owner = 1;
|
||||||
eof_len += eof_desc->dw0.length;
|
eof_len += eof_desc->dw0.length;
|
||||||
|
if ((intptr_t)eof_desc == eof_desc_addr) {
|
||||||
|
goto valid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
valid:
|
||||||
hal->cur_desc_ptr = eof_desc;
|
hal->cur_desc_ptr = eof_desc;
|
||||||
*buffer = buffer_start;
|
*buffer = buffer_start;
|
||||||
*len = eof_len;
|
*len = eof_len;
|
||||||
@@ -329,11 +339,6 @@ adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, con
|
|||||||
return ADC_HAL_DMA_DESC_VALID;
|
return ADC_HAL_DMA_DESC_VALID;
|
||||||
}
|
}
|
||||||
|
|
||||||
void adc_hal_read_desc_finish(adc_hal_dma_ctx_t *hal) {
|
|
||||||
// Allow DMA to re-use descriptor.
|
|
||||||
hal->cur_desc_ptr->dw0.owner = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
void adc_hal_digi_clr_intr(adc_hal_dma_ctx_t *hal, uint32_t mask)
|
void adc_hal_digi_clr_intr(adc_hal_dma_ctx_t *hal, uint32_t mask)
|
||||||
{
|
{
|
||||||
adc_dma_ll_rx_clear_intr(hal->dev, hal->dma_chan, mask);
|
adc_dma_ll_rx_clear_intr(hal->dev, hal->dma_chan, mask);
|
||||||
|
@@ -173,7 +173,7 @@ bool adc_hal_check_event(adc_hal_dma_ctx_t *hal, uint32_t mask);
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Get the ADC reading result. Call adc_hal_read_desc_finish after using the descriptor.
|
* @brief Get the ADC reading result
|
||||||
*
|
*
|
||||||
* @param hal Context of the HAL
|
* @param hal Context of the HAL
|
||||||
* @param eof_desc_addr The last descriptor that is finished by HW. Should be got from DMA
|
* @param eof_desc_addr The last descriptor that is finished by HW. Should be got from DMA
|
||||||
@@ -184,14 +184,6 @@ bool adc_hal_check_event(adc_hal_dma_ctx_t *hal, uint32_t mask);
|
|||||||
*/
|
*/
|
||||||
adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, const intptr_t eof_desc_addr, uint8_t **buffer, uint32_t *len);
|
adc_hal_dma_desc_status_t adc_hal_get_reading_result(adc_hal_dma_ctx_t *hal, const intptr_t eof_desc_addr, uint8_t **buffer, uint32_t *len);
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Finishes reading the current descriptor and frees it for repeated usage by DMA.
|
|
||||||
*
|
|
||||||
* @param hal Context of the HAL
|
|
||||||
*/
|
|
||||||
void adc_hal_read_desc_finish(adc_hal_dma_ctx_t *hal);
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Clear interrupt
|
* @brief Clear interrupt
|
||||||
*
|
*
|
||||||
|
Reference in New Issue
Block a user