mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git
synced 2025-01-09 06:33:34 +00:00
serial: stm32: group dma pause/resume error handling into single function
Create new function "stm32_usart_dma_pause_resume" that called dmaengine_ pause/resume and in case of error, terminate dma transaction. Two other functions are created to facilitate the use of stm32_usart_dma _pause_resume : stm32_usart_tx_dma_pause, stm32_usart_tx_dma_resume. Equivalent functions for rx will be added in future patch. Signed-off-by: Valentin Caron <valentin.caron@foss.st.com> Link: https://lore.kernel.org/r/20230808161906.178996-5-valentin.caron@foss.st.com Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
This commit is contained in:
parent
00d1f9c6af
commit
7f28bcea82
@ -290,14 +290,40 @@ static int stm32_usart_init_rs485(struct uart_port *port,
|
||||
}
|
||||
|
||||
static bool stm32_usart_rx_dma_started(struct stm32_port *stm32_port)
|
||||
{
|
||||
return stm32_port->rx_ch ? stm32_port->rx_dma_busy : false;
|
||||
}
|
||||
|
||||
static void stm32_usart_rx_dma_terminate(struct stm32_port *stm32_port)
|
||||
{
|
||||
dmaengine_terminate_async(stm32_port->rx_ch);
|
||||
stm32_port->rx_dma_busy = false;
|
||||
}
|
||||
|
||||
static int stm32_usart_dma_pause_resume(struct stm32_port *stm32_port,
|
||||
struct dma_chan *chan,
|
||||
enum dma_status expected_status,
|
||||
int dmaengine_pause_or_resume(struct dma_chan *),
|
||||
bool stm32_usart_xx_dma_started(struct stm32_port *),
|
||||
void stm32_usart_xx_dma_terminate(struct stm32_port *))
|
||||
{
|
||||
struct uart_port *port = &stm32_port->port;
|
||||
const struct stm32_usart_offsets *ofs = &stm32_port->info->ofs;
|
||||
enum dma_status dma_status;
|
||||
int ret;
|
||||
|
||||
if (!stm32_port->rx_ch)
|
||||
return false;
|
||||
if (!stm32_usart_xx_dma_started(stm32_port))
|
||||
return -EPERM;
|
||||
|
||||
return !!(readl_relaxed(port->membase + ofs->cr3) & USART_CR3_DMAR);
|
||||
dma_status = dmaengine_tx_status(chan, chan->cookie, NULL);
|
||||
if (dma_status != expected_status)
|
||||
return -EAGAIN;
|
||||
|
||||
ret = dmaengine_pause_or_resume(chan);
|
||||
if (ret) {
|
||||
dev_err(port->dev, "DMA failed with error code: %d\n", ret);
|
||||
stm32_usart_xx_dma_terminate(stm32_port);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/* Return true when data is pending (in pio mode), and false when no data is pending. */
|
||||
@ -475,7 +501,7 @@ static unsigned int stm32_usart_receive_chars(struct uart_port *port, bool force
|
||||
}
|
||||
} else {
|
||||
/* Disable RX DMA */
|
||||
dmaengine_terminate_async(stm32_port->rx_ch);
|
||||
stm32_usart_rx_dma_terminate(stm32_port);
|
||||
stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAR);
|
||||
/* Fall back to interrupt mode */
|
||||
dev_dbg(port->dev, "DMA error, fallback to irq mode\n");
|
||||
@ -506,6 +532,22 @@ static bool stm32_usart_tx_dma_started(struct stm32_port *stm32_port)
|
||||
return stm32_port->tx_dma_busy;
|
||||
}
|
||||
|
||||
static int stm32_usart_tx_dma_pause(struct stm32_port *stm32_port)
|
||||
{
|
||||
return stm32_usart_dma_pause_resume(stm32_port, stm32_port->tx_ch,
|
||||
DMA_IN_PROGRESS, dmaengine_pause,
|
||||
stm32_usart_tx_dma_started,
|
||||
stm32_usart_tx_dma_terminate);
|
||||
}
|
||||
|
||||
static int stm32_usart_tx_dma_resume(struct stm32_port *stm32_port)
|
||||
{
|
||||
return stm32_usart_dma_pause_resume(stm32_port, stm32_port->tx_ch,
|
||||
DMA_PAUSED, dmaengine_resume,
|
||||
stm32_usart_tx_dma_started,
|
||||
stm32_usart_tx_dma_terminate);
|
||||
}
|
||||
|
||||
static void stm32_usart_tx_dma_complete(void *arg)
|
||||
{
|
||||
struct uart_port *port = arg;
|
||||
@ -606,13 +648,9 @@ static void stm32_usart_transmit_chars_dma(struct uart_port *port)
|
||||
int ret;
|
||||
|
||||
if (stm32_usart_tx_dma_started(stm32port)) {
|
||||
if (dmaengine_tx_status(stm32port->tx_ch,
|
||||
stm32port->tx_ch->cookie,
|
||||
NULL) == DMA_PAUSED) {
|
||||
ret = dmaengine_resume(stm32port->tx_ch);
|
||||
if (ret < 0)
|
||||
goto dma_err;
|
||||
}
|
||||
ret = stm32_usart_tx_dma_resume(stm32port);
|
||||
if (ret < 0 && ret != -EAGAIN)
|
||||
goto fallback_err;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -658,8 +696,12 @@ static void stm32_usart_transmit_chars_dma(struct uart_port *port)
|
||||
|
||||
/* Push current DMA TX transaction in the pending queue */
|
||||
/* DMA no yet started, safe to free resources */
|
||||
if (dma_submit_error(dmaengine_submit(desc)))
|
||||
goto dma_err;
|
||||
ret = dma_submit_error(dmaengine_submit(desc));
|
||||
if (ret) {
|
||||
dev_err(port->dev, "DMA failed with error code: %d\n", ret);
|
||||
stm32_usart_tx_dma_terminate(stm32port);
|
||||
goto fallback_err;
|
||||
}
|
||||
|
||||
/* Issue pending DMA TX requests */
|
||||
dma_async_issue_pending(stm32port->tx_ch);
|
||||
@ -668,10 +710,6 @@ static void stm32_usart_transmit_chars_dma(struct uart_port *port)
|
||||
|
||||
return;
|
||||
|
||||
dma_err:
|
||||
dev_err(port->dev, "DMA failed with error code: %d\n", ret);
|
||||
stm32_usart_tx_dma_terminate(stm32port);
|
||||
|
||||
fallback_err:
|
||||
stm32_usart_transmit_chars_pio(port);
|
||||
}
|
||||
@ -693,16 +731,9 @@ static void stm32_usart_transmit_chars(struct uart_port *port)
|
||||
}
|
||||
|
||||
if (port->x_char) {
|
||||
if (stm32_usart_tx_dma_started(stm32_port) &&
|
||||
dmaengine_tx_status(stm32_port->tx_ch,
|
||||
stm32_port->tx_ch->cookie,
|
||||
NULL) == DMA_IN_PROGRESS) {
|
||||
ret = dmaengine_pause(stm32_port->tx_ch);
|
||||
if (ret < 0) {
|
||||
dev_err(port->dev, "DMA failed with error code: %d\n", ret);
|
||||
stm32_usart_tx_dma_terminate(stm32_port);
|
||||
}
|
||||
}
|
||||
/* dma terminate may have been called in case of dma pause failure */
|
||||
stm32_usart_tx_dma_pause(stm32_port);
|
||||
|
||||
/* Check that TDR is empty before filling FIFO */
|
||||
ret =
|
||||
readl_relaxed_poll_timeout_atomic(port->membase + ofs->isr,
|
||||
@ -716,13 +747,8 @@ static void stm32_usart_transmit_chars(struct uart_port *port)
|
||||
port->x_char = 0;
|
||||
port->icount.tx++;
|
||||
|
||||
if (stm32_usart_tx_dma_started(stm32_port)) {
|
||||
ret = dmaengine_resume(stm32_port->tx_ch);
|
||||
if (ret < 0) {
|
||||
dev_err(port->dev, "DMA failed with error code: %d\n", ret);
|
||||
stm32_usart_tx_dma_terminate(stm32_port);
|
||||
}
|
||||
}
|
||||
/* dma terminate may have been called in case of dma resume failure */
|
||||
stm32_usart_tx_dma_resume(stm32_port);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -855,16 +881,11 @@ static void stm32_usart_disable_ms(struct uart_port *port)
|
||||
static void stm32_usart_stop_tx(struct uart_port *port)
|
||||
{
|
||||
struct stm32_port *stm32_port = to_stm32_port(port);
|
||||
int ret;
|
||||
|
||||
stm32_usart_tx_interrupt_disable(port);
|
||||
if (stm32_usart_tx_dma_started(stm32_port)) {
|
||||
ret = dmaengine_pause(stm32_port->tx_ch);
|
||||
if (ret < 0) {
|
||||
dev_err(port->dev, "DMA failed with error code: %d\n", ret);
|
||||
stm32_usart_tx_dma_terminate(stm32_port);
|
||||
}
|
||||
}
|
||||
|
||||
/* dma terminate may have been called in case of dma pause failure */
|
||||
stm32_usart_tx_dma_pause(stm32_port);
|
||||
|
||||
stm32_usart_rs485_rts_disable(port);
|
||||
}
|
||||
@ -965,8 +986,22 @@ static int stm32_usart_start_rx_dma_cyclic(struct uart_port *port)
|
||||
struct stm32_port *stm32_port = to_stm32_port(port);
|
||||
const struct stm32_usart_offsets *ofs = &stm32_port->info->ofs;
|
||||
struct dma_async_tx_descriptor *desc;
|
||||
enum dma_status rx_dma_status;
|
||||
int ret;
|
||||
|
||||
if (stm32_port->rx_dma_busy) {
|
||||
rx_dma_status = dmaengine_tx_status(stm32_port->rx_ch,
|
||||
stm32_port->rx_ch->cookie,
|
||||
NULL);
|
||||
if (rx_dma_status == DMA_IN_PROGRESS)
|
||||
return 0;
|
||||
|
||||
dev_err(port->dev, "DMA failed : status error.\n");
|
||||
stm32_usart_rx_dma_terminate(stm32_port);
|
||||
}
|
||||
|
||||
stm32_port->rx_dma_busy = true;
|
||||
|
||||
stm32_port->last_res = RX_BUF_L;
|
||||
/* Prepare a DMA cyclic transaction */
|
||||
desc = dmaengine_prep_dma_cyclic(stm32_port->rx_ch,
|
||||
@ -976,6 +1011,7 @@ static int stm32_usart_start_rx_dma_cyclic(struct uart_port *port)
|
||||
DMA_PREP_INTERRUPT);
|
||||
if (!desc) {
|
||||
dev_err(port->dev, "rx dma prep cyclic failed\n");
|
||||
stm32_port->rx_dma_busy = false;
|
||||
return -ENODEV;
|
||||
}
|
||||
|
||||
@ -986,6 +1022,7 @@ static int stm32_usart_start_rx_dma_cyclic(struct uart_port *port)
|
||||
ret = dma_submit_error(dmaengine_submit(desc));
|
||||
if (ret) {
|
||||
dmaengine_terminate_sync(stm32_port->rx_ch);
|
||||
stm32_port->rx_dma_busy = false;
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -1074,7 +1111,7 @@ static void stm32_usart_shutdown(struct uart_port *port)
|
||||
|
||||
/* Disable RX DMA. */
|
||||
if (stm32_port->rx_ch)
|
||||
dmaengine_terminate_async(stm32_port->rx_ch);
|
||||
stm32_usart_rx_dma_terminate(stm32_port);
|
||||
|
||||
/* flush RX & TX FIFO */
|
||||
if (ofs->rqr != UNDEF_REG)
|
||||
@ -1988,7 +2025,7 @@ static int __maybe_unused stm32_usart_serial_en_wakeup(struct uart_port *port,
|
||||
stm32_usart_clr_bits(port, ofs->cr3, USART_CR3_DMAR);
|
||||
/* Poll data from DMA RX buffer if any */
|
||||
size = stm32_usart_receive_chars(port, true);
|
||||
dmaengine_terminate_async(stm32_port->rx_ch);
|
||||
stm32_usart_rx_dma_terminate(stm32_port);
|
||||
uart_unlock_and_check_sysrq_irqrestore(port, flags);
|
||||
if (size)
|
||||
tty_flip_buffer_push(tport);
|
||||
|
@ -199,6 +199,7 @@ struct stm32_port {
|
||||
u32 cr3_irq; /* USART_CR3_RXFTIE */
|
||||
int last_res;
|
||||
bool tx_dma_busy; /* dma tx transaction in progress */
|
||||
bool rx_dma_busy; /* dma rx transaction in progress */
|
||||
bool throttled; /* port throttled */
|
||||
bool hw_flow_control;
|
||||
bool swap; /* swap RX & TX pins */
|
||||
|
Loading…
Reference in New Issue
Block a user