mirror of
https://github.com/espressif/esp-idf.git
synced 2025-07-31 11:17:20 +02:00
fix(i2s): fixed the false buf catenate while use preload
Closes https://github.com/espressif/esp-idf/issues/15775
This commit is contained in:
@ -277,7 +277,6 @@ static esp_err_t i2s_register_channel(i2s_controller_t *i2s_obj, i2s_dir_t dir,
|
|||||||
new_chan->callbacks.on_send_q_ovf = NULL;
|
new_chan->callbacks.on_send_q_ovf = NULL;
|
||||||
new_chan->dma.rw_pos = 0;
|
new_chan->dma.rw_pos = 0;
|
||||||
new_chan->dma.curr_ptr = NULL;
|
new_chan->dma.curr_ptr = NULL;
|
||||||
new_chan->dma.curr_desc = NULL;
|
|
||||||
new_chan->start = NULL;
|
new_chan->start = NULL;
|
||||||
new_chan->stop = NULL;
|
new_chan->stop = NULL;
|
||||||
new_chan->reserve_gpio_mask = 0;
|
new_chan->reserve_gpio_mask = 0;
|
||||||
@ -1096,8 +1095,11 @@ esp_err_t i2s_channel_enable(i2s_chan_handle_t handle)
|
|||||||
#endif
|
#endif
|
||||||
handle->start(handle);
|
handle->start(handle);
|
||||||
handle->state = I2S_CHAN_STATE_RUNNING;
|
handle->state = I2S_CHAN_STATE_RUNNING;
|
||||||
/* Reset queue */
|
if (handle->dir == I2S_DIR_RX) {
|
||||||
|
/* RX queue is reset when the channel is enabled
|
||||||
|
In case legacy data received during disable process */
|
||||||
xQueueReset(handle->msg_queue);
|
xQueueReset(handle->msg_queue);
|
||||||
|
}
|
||||||
xSemaphoreGive(handle->mutex);
|
xSemaphoreGive(handle->mutex);
|
||||||
/* Give the binary semaphore to enable reading / writing task */
|
/* Give the binary semaphore to enable reading / writing task */
|
||||||
xSemaphoreGive(handle->binary);
|
xSemaphoreGive(handle->binary);
|
||||||
@ -1125,9 +1127,13 @@ esp_err_t i2s_channel_disable(i2s_chan_handle_t handle)
|
|||||||
xSemaphoreTake(handle->binary, portMAX_DELAY);
|
xSemaphoreTake(handle->binary, portMAX_DELAY);
|
||||||
/* Reset the descriptor pointer */
|
/* Reset the descriptor pointer */
|
||||||
handle->dma.curr_ptr = NULL;
|
handle->dma.curr_ptr = NULL;
|
||||||
handle->dma.curr_desc = NULL;
|
|
||||||
handle->dma.rw_pos = 0;
|
handle->dma.rw_pos = 0;
|
||||||
handle->stop(handle);
|
handle->stop(handle);
|
||||||
|
if (handle->dir == I2S_DIR_TX) {
|
||||||
|
/* TX queue is reset when the channel is disabled
|
||||||
|
In case the queue is wrongly reset after preload the data */
|
||||||
|
xQueueReset(handle->msg_queue);
|
||||||
|
}
|
||||||
#if CONFIG_PM_ENABLE
|
#if CONFIG_PM_ENABLE
|
||||||
esp_pm_lock_release(handle->pm_lock);
|
esp_pm_lock_release(handle->pm_lock);
|
||||||
#endif
|
#endif
|
||||||
@ -1149,18 +1155,30 @@ esp_err_t i2s_channel_preload_data(i2s_chan_handle_t tx_handle, const void *src,
|
|||||||
uint8_t *data_ptr = (uint8_t *)src;
|
uint8_t *data_ptr = (uint8_t *)src;
|
||||||
size_t remain_bytes = size;
|
size_t remain_bytes = size;
|
||||||
size_t total_loaded_bytes = 0;
|
size_t total_loaded_bytes = 0;
|
||||||
|
esp_err_t ret = ESP_OK;
|
||||||
|
|
||||||
xSemaphoreTake(tx_handle->mutex, portMAX_DELAY);
|
xSemaphoreTake(tx_handle->mutex, portMAX_DELAY);
|
||||||
|
|
||||||
/* The pre-load data will be loaded from the first descriptor */
|
/* The pre-load data will be loaded from the first descriptor */
|
||||||
if (tx_handle->dma.curr_desc == NULL) {
|
if (tx_handle->dma.curr_ptr == NULL) {
|
||||||
tx_handle->dma.curr_desc = tx_handle->dma.desc[0];
|
xQueueReset(tx_handle->msg_queue);
|
||||||
|
/* Push the rest of descriptors to the queue */
|
||||||
|
for (int i = 1; i < tx_handle->dma.desc_num; i++) {
|
||||||
|
ESP_GOTO_ON_FALSE(xQueueSend(tx_handle->msg_queue, &(tx_handle->dma.desc[i]->buf), 0) == pdTRUE,
|
||||||
|
ESP_FAIL, err, TAG, "Failed to push the descriptor to the queue");
|
||||||
|
}
|
||||||
tx_handle->dma.curr_ptr = (void *)tx_handle->dma.desc[0]->buf;
|
tx_handle->dma.curr_ptr = (void *)tx_handle->dma.desc[0]->buf;
|
||||||
tx_handle->dma.rw_pos = 0;
|
tx_handle->dma.rw_pos = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Loop until no bytes in source buff remain or the descriptors are full */
|
/* Loop until no bytes in source buff remain or the descriptors are full */
|
||||||
while (remain_bytes) {
|
while (remain_bytes) {
|
||||||
|
if (tx_handle->dma.rw_pos == tx_handle->dma.buf_size) {
|
||||||
|
if (xQueueReceive(tx_handle->msg_queue, &(tx_handle->dma.curr_ptr), 0) == pdFALSE) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
tx_handle->dma.rw_pos = 0;
|
||||||
|
}
|
||||||
size_t bytes_can_load = remain_bytes > (tx_handle->dma.buf_size - tx_handle->dma.rw_pos) ?
|
size_t bytes_can_load = remain_bytes > (tx_handle->dma.buf_size - tx_handle->dma.rw_pos) ?
|
||||||
(tx_handle->dma.buf_size - tx_handle->dma.rw_pos) : remain_bytes;
|
(tx_handle->dma.buf_size - tx_handle->dma.rw_pos) : remain_bytes;
|
||||||
/* When all the descriptors has loaded data, no more bytes can be loaded, break directly */
|
/* When all the descriptors has loaded data, no more bytes can be loaded, break directly */
|
||||||
@ -1176,25 +1194,13 @@ esp_err_t i2s_channel_preload_data(i2s_chan_handle_t tx_handle, const void *src,
|
|||||||
total_loaded_bytes += bytes_can_load; // Add to the total loaded bytes
|
total_loaded_bytes += bytes_can_load; // Add to the total loaded bytes
|
||||||
remain_bytes -= bytes_can_load; // Update the remaining bytes to be loaded
|
remain_bytes -= bytes_can_load; // Update the remaining bytes to be loaded
|
||||||
tx_handle->dma.rw_pos += bytes_can_load; // Move forward the dma buffer position
|
tx_handle->dma.rw_pos += bytes_can_load; // Move forward the dma buffer position
|
||||||
/* When the current position reach the end of the dma buffer */
|
|
||||||
if (tx_handle->dma.rw_pos == tx_handle->dma.buf_size) {
|
|
||||||
/* If the next descriptor is not the first descriptor, keep load to the first descriptor
|
|
||||||
* otherwise all descriptor has been loaded, break directly, the dma buffer position
|
|
||||||
* will remain at the end of the last dma buffer */
|
|
||||||
if (STAILQ_NEXT((lldesc_t *)tx_handle->dma.curr_desc, qe) != tx_handle->dma.desc[0]) {
|
|
||||||
tx_handle->dma.curr_desc = STAILQ_NEXT((lldesc_t *)tx_handle->dma.curr_desc, qe);
|
|
||||||
tx_handle->dma.curr_ptr = (void *)(((lldesc_t *)tx_handle->dma.curr_desc)->buf);
|
|
||||||
tx_handle->dma.rw_pos = 0;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
*bytes_loaded = total_loaded_bytes;
|
*bytes_loaded = total_loaded_bytes;
|
||||||
|
|
||||||
|
err:
|
||||||
xSemaphoreGive(tx_handle->mutex);
|
xSemaphoreGive(tx_handle->mutex);
|
||||||
|
|
||||||
return ESP_OK;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
esp_err_t i2s_channel_write(i2s_chan_handle_t handle, const void *src, size_t size, size_t *bytes_written, uint32_t timeout_ms)
|
esp_err_t i2s_channel_write(i2s_chan_handle_t handle, const void *src, size_t size, size_t *bytes_written, uint32_t timeout_ms)
|
||||||
|
@ -102,7 +102,6 @@ typedef struct {
|
|||||||
bool auto_clear_before_cb; /*!< Set to auto clear DMA TX descriptor before callback, i2s will always send zero automatically if no data to send */
|
bool auto_clear_before_cb; /*!< Set to auto clear DMA TX descriptor before callback, i2s will always send zero automatically if no data to send */
|
||||||
uint32_t rw_pos; /*!< reading/writing pointer position */
|
uint32_t rw_pos; /*!< reading/writing pointer position */
|
||||||
void *curr_ptr; /*!< Pointer to current dma buffer */
|
void *curr_ptr; /*!< Pointer to current dma buffer */
|
||||||
void *curr_desc; /*!< Pointer to current dma descriptor used for pre-load */
|
|
||||||
lldesc_t **desc; /*!< dma descriptor array */
|
lldesc_t **desc; /*!< dma descriptor array */
|
||||||
uint8_t **bufs; /*!< dma buffer array */
|
uint8_t **bufs; /*!< dma buffer array */
|
||||||
} i2s_dma_t;
|
} i2s_dma_t;
|
||||||
|
@ -186,6 +186,7 @@ esp_err_t i2s_channel_disable(i2s_chan_handle_t handle);
|
|||||||
* @param[out] bytes_loaded The bytes that successfully been loaded into the TX DMA buffer
|
* @param[out] bytes_loaded The bytes that successfully been loaded into the TX DMA buffer
|
||||||
* @return
|
* @return
|
||||||
* - ESP_OK Load data successful
|
* - ESP_OK Load data successful
|
||||||
|
* - ESP_FAIL Failed to push the message queue
|
||||||
* - ESP_ERR_INVALID_ARG NULL pointer or not TX direction
|
* - ESP_ERR_INVALID_ARG NULL pointer or not TX direction
|
||||||
* - ESP_ERR_INVALID_STATE This channel has not stated
|
* - ESP_ERR_INVALID_STATE This channel has not stated
|
||||||
*/
|
*/
|
||||||
|
Reference in New Issue
Block a user