aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/tty
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/tty')
-rw-r--r--drivers/tty/serial/sirfsoc_uart.c195
-rw-r--r--drivers/tty/serial/sirfsoc_uart.h5
2 files changed, 62 insertions, 138 deletions
diff --git a/drivers/tty/serial/sirfsoc_uart.c b/drivers/tty/serial/sirfsoc_uart.c
index b7bfe24d4ebc..68b0fd4b9a6a 100644
--- a/drivers/tty/serial/sirfsoc_uart.c
+++ b/drivers/tty/serial/sirfsoc_uart.c
@@ -24,7 +24,6 @@
24#include <linux/dmaengine.h> 24#include <linux/dmaengine.h>
25#include <linux/dma-direction.h> 25#include <linux/dma-direction.h>
26#include <linux/dma-mapping.h> 26#include <linux/dma-mapping.h>
27#include <linux/sirfsoc_dma.h>
28#include <asm/irq.h> 27#include <asm/irq.h>
29#include <asm/mach/irq.h> 28#include <asm/mach/irq.h>
30 29
@@ -173,7 +172,7 @@ static void sirfsoc_uart_stop_tx(struct uart_port *port)
173 struct sirfsoc_register *ureg = &sirfport->uart_reg->uart_reg; 172 struct sirfsoc_register *ureg = &sirfport->uart_reg->uart_reg;
174 struct sirfsoc_int_en *uint_en = &sirfport->uart_reg->uart_int_en; 173 struct sirfsoc_int_en *uint_en = &sirfport->uart_reg->uart_int_en;
175 174
176 if (IS_DMA_CHAN_VALID(sirfport->tx_dma_no)) { 175 if (sirfport->tx_dma_chan) {
177 if (sirfport->tx_dma_state == TX_DMA_RUNNING) { 176 if (sirfport->tx_dma_state == TX_DMA_RUNNING) {
178 dmaengine_pause(sirfport->tx_dma_chan); 177 dmaengine_pause(sirfport->tx_dma_chan);
179 sirfport->tx_dma_state = TX_DMA_PAUSE; 178 sirfport->tx_dma_state = TX_DMA_PAUSE;
@@ -288,7 +287,7 @@ static void sirfsoc_uart_start_tx(struct uart_port *port)
288 struct sirfsoc_uart_port *sirfport = to_sirfport(port); 287 struct sirfsoc_uart_port *sirfport = to_sirfport(port);
289 struct sirfsoc_register *ureg = &sirfport->uart_reg->uart_reg; 288 struct sirfsoc_register *ureg = &sirfport->uart_reg->uart_reg;
290 struct sirfsoc_int_en *uint_en = &sirfport->uart_reg->uart_int_en; 289 struct sirfsoc_int_en *uint_en = &sirfport->uart_reg->uart_int_en;
291 if (IS_DMA_CHAN_VALID(sirfport->tx_dma_no)) 290 if (sirfport->tx_dma_chan)
292 sirfsoc_uart_tx_with_dma(sirfport); 291 sirfsoc_uart_tx_with_dma(sirfport);
293 else { 292 else {
294 sirfsoc_uart_pio_tx_chars(sirfport, 1); 293 sirfsoc_uart_pio_tx_chars(sirfport, 1);
@@ -310,7 +309,7 @@ static void sirfsoc_uart_stop_rx(struct uart_port *port)
310 struct sirfsoc_int_en *uint_en = &sirfport->uart_reg->uart_int_en; 309 struct sirfsoc_int_en *uint_en = &sirfport->uart_reg->uart_int_en;
311 310
312 wr_regl(port, ureg->sirfsoc_rx_fifo_op, 0); 311 wr_regl(port, ureg->sirfsoc_rx_fifo_op, 0);
313 if (IS_DMA_CHAN_VALID(sirfport->rx_dma_no)) { 312 if (sirfport->rx_dma_chan) {
314 if (!sirfport->is_marco) 313 if (!sirfport->is_marco)
315 wr_regl(port, ureg->sirfsoc_int_en_reg, 314 wr_regl(port, ureg->sirfsoc_int_en_reg,
316 rd_regl(port, ureg->sirfsoc_int_en_reg) & 315 rd_regl(port, ureg->sirfsoc_int_en_reg) &
@@ -675,7 +674,7 @@ recv_char:
675 uart_handle_cts_change(port, cts_status); 674 uart_handle_cts_change(port, cts_status);
676 wake_up_interruptible(&state->port.delta_msr_wait); 675 wake_up_interruptible(&state->port.delta_msr_wait);
677 } 676 }
678 if (IS_DMA_CHAN_VALID(sirfport->rx_dma_no)) { 677 if (sirfport->rx_dma_chan) {
679 if (intr_status & uint_st->sirfsoc_rx_timeout) 678 if (intr_status & uint_st->sirfsoc_rx_timeout)
680 sirfsoc_uart_handle_rx_tmo(sirfport); 679 sirfsoc_uart_handle_rx_tmo(sirfport);
681 if (intr_status & uint_st->sirfsoc_rx_done) 680 if (intr_status & uint_st->sirfsoc_rx_done)
@@ -686,7 +685,7 @@ recv_char:
686 SIRFSOC_UART_IO_RX_MAX_CNT); 685 SIRFSOC_UART_IO_RX_MAX_CNT);
687 } 686 }
688 if (intr_status & uint_st->sirfsoc_txfifo_empty) { 687 if (intr_status & uint_st->sirfsoc_txfifo_empty) {
689 if (IS_DMA_CHAN_VALID(sirfport->tx_dma_no)) 688 if (sirfport->tx_dma_chan)
690 sirfsoc_uart_tx_with_dma(sirfport); 689 sirfsoc_uart_tx_with_dma(sirfport);
691 else { 690 else {
692 if (uart_circ_empty(xmit) || uart_tx_stopped(port)) { 691 if (uart_circ_empty(xmit) || uart_tx_stopped(port)) {
@@ -778,7 +777,7 @@ static void sirfsoc_uart_start_rx(struct uart_port *port)
778 wr_regl(port, ureg->sirfsoc_rx_fifo_op, SIRFUART_FIFO_RESET); 777 wr_regl(port, ureg->sirfsoc_rx_fifo_op, SIRFUART_FIFO_RESET);
779 wr_regl(port, ureg->sirfsoc_rx_fifo_op, 0); 778 wr_regl(port, ureg->sirfsoc_rx_fifo_op, 0);
780 wr_regl(port, ureg->sirfsoc_rx_fifo_op, SIRFUART_FIFO_START); 779 wr_regl(port, ureg->sirfsoc_rx_fifo_op, SIRFUART_FIFO_START);
781 if (IS_DMA_CHAN_VALID(sirfport->rx_dma_no)) 780 if (sirfport->rx_dma_chan)
782 sirfsoc_uart_start_next_rx_dma(port); 781 sirfsoc_uart_start_next_rx_dma(port);
783 else { 782 else {
784 if (!sirfport->is_marco) 783 if (!sirfport->is_marco)
@@ -1014,11 +1013,11 @@ static void sirfsoc_uart_set_termios(struct uart_port *port,
1014 (sample_div_reg & SIRFSOC_USP_ASYNC_DIV2_MASK) << 1013 (sample_div_reg & SIRFSOC_USP_ASYNC_DIV2_MASK) <<
1015 SIRFSOC_USP_ASYNC_DIV2_OFFSET); 1014 SIRFSOC_USP_ASYNC_DIV2_OFFSET);
1016 } 1015 }
1017 if (IS_DMA_CHAN_VALID(sirfport->tx_dma_no)) 1016 if (sirfport->tx_dma_chan)
1018 wr_regl(port, ureg->sirfsoc_tx_dma_io_ctrl, SIRFUART_DMA_MODE); 1017 wr_regl(port, ureg->sirfsoc_tx_dma_io_ctrl, SIRFUART_DMA_MODE);
1019 else 1018 else
1020 wr_regl(port, ureg->sirfsoc_tx_dma_io_ctrl, SIRFUART_IO_MODE); 1019 wr_regl(port, ureg->sirfsoc_tx_dma_io_ctrl, SIRFUART_IO_MODE);
1021 if (IS_DMA_CHAN_VALID(sirfport->rx_dma_no)) 1020 if (sirfport->rx_dma_chan)
1022 wr_regl(port, ureg->sirfsoc_rx_dma_io_ctrl, SIRFUART_DMA_MODE); 1021 wr_regl(port, ureg->sirfsoc_rx_dma_io_ctrl, SIRFUART_DMA_MODE);
1023 else 1022 else
1024 wr_regl(port, ureg->sirfsoc_rx_dma_io_ctrl, SIRFUART_IO_MODE); 1023 wr_regl(port, ureg->sirfsoc_rx_dma_io_ctrl, SIRFUART_IO_MODE);
@@ -1049,93 +1048,6 @@ static void sirfsoc_uart_pm(struct uart_port *port, unsigned int state,
1049 clk_disable_unprepare(sirfport->clk); 1048 clk_disable_unprepare(sirfport->clk);
1050} 1049}
1051 1050
1052static unsigned int sirfsoc_uart_init_tx_dma(struct uart_port *port)
1053{
1054 struct sirfsoc_uart_port *sirfport = to_sirfport(port);
1055 dma_cap_mask_t dma_mask;
1056 struct dma_slave_config tx_slv_cfg = {
1057 .dst_maxburst = 2,
1058 };
1059
1060 dma_cap_zero(dma_mask);
1061 dma_cap_set(DMA_SLAVE, dma_mask);
1062 sirfport->tx_dma_chan = dma_request_channel(dma_mask,
1063 (dma_filter_fn)sirfsoc_dma_filter_id,
1064 (void *)sirfport->tx_dma_no);
1065 if (!sirfport->tx_dma_chan) {
1066 dev_err(port->dev, "Uart Request Dma Channel Fail %d\n",
1067 sirfport->tx_dma_no);
1068 return -EPROBE_DEFER;
1069 }
1070 dmaengine_slave_config(sirfport->tx_dma_chan, &tx_slv_cfg);
1071
1072 return 0;
1073}
1074
1075static unsigned int sirfsoc_uart_init_rx_dma(struct uart_port *port)
1076{
1077 struct sirfsoc_uart_port *sirfport = to_sirfport(port);
1078 dma_cap_mask_t dma_mask;
1079 int ret;
1080 int i, j;
1081 struct dma_slave_config slv_cfg = {
1082 .src_maxburst = 2,
1083 };
1084
1085 dma_cap_zero(dma_mask);
1086 dma_cap_set(DMA_SLAVE, dma_mask);
1087 sirfport->rx_dma_chan = dma_request_channel(dma_mask,
1088 (dma_filter_fn)sirfsoc_dma_filter_id,
1089 (void *)sirfport->rx_dma_no);
1090 if (!sirfport->rx_dma_chan) {
1091 dev_err(port->dev, "Uart Request Dma Channel Fail %d\n",
1092 sirfport->rx_dma_no);
1093 ret = -EPROBE_DEFER;
1094 goto request_err;
1095 }
1096 for (i = 0; i < SIRFSOC_RX_LOOP_BUF_CNT; i++) {
1097 sirfport->rx_dma_items[i].xmit.buf =
1098 dma_alloc_coherent(port->dev, SIRFSOC_RX_DMA_BUF_SIZE,
1099 &sirfport->rx_dma_items[i].dma_addr, GFP_KERNEL);
1100 if (!sirfport->rx_dma_items[i].xmit.buf) {
1101 dev_err(port->dev, "Uart alloc bufa failed\n");
1102 ret = -ENOMEM;
1103 goto alloc_coherent_err;
1104 }
1105 sirfport->rx_dma_items[i].xmit.head =
1106 sirfport->rx_dma_items[i].xmit.tail = 0;
1107 }
1108 dmaengine_slave_config(sirfport->rx_dma_chan, &slv_cfg);
1109
1110 return 0;
1111alloc_coherent_err:
1112 for (j = 0; j < i; j++)
1113 dma_free_coherent(port->dev, SIRFSOC_RX_DMA_BUF_SIZE,
1114 sirfport->rx_dma_items[j].xmit.buf,
1115 sirfport->rx_dma_items[j].dma_addr);
1116 dma_release_channel(sirfport->rx_dma_chan);
1117request_err:
1118 return ret;
1119}
1120
1121static void sirfsoc_uart_uninit_tx_dma(struct sirfsoc_uart_port *sirfport)
1122{
1123 dmaengine_terminate_all(sirfport->tx_dma_chan);
1124 dma_release_channel(sirfport->tx_dma_chan);
1125}
1126
1127static void sirfsoc_uart_uninit_rx_dma(struct sirfsoc_uart_port *sirfport)
1128{
1129 int i;
1130 struct uart_port *port = &sirfport->port;
1131 dmaengine_terminate_all(sirfport->rx_dma_chan);
1132 dma_release_channel(sirfport->rx_dma_chan);
1133 for (i = 0; i < SIRFSOC_RX_LOOP_BUF_CNT; i++)
1134 dma_free_coherent(port->dev, SIRFSOC_RX_DMA_BUF_SIZE,
1135 sirfport->rx_dma_items[i].xmit.buf,
1136 sirfport->rx_dma_items[i].dma_addr);
1137}
1138
1139static int sirfsoc_uart_startup(struct uart_port *port) 1051static int sirfsoc_uart_startup(struct uart_port *port)
1140{ 1052{
1141 struct sirfsoc_uart_port *sirfport = to_sirfport(port); 1053 struct sirfsoc_uart_port *sirfport = to_sirfport(port);
@@ -1174,18 +1086,12 @@ static int sirfsoc_uart_startup(struct uart_port *port)
1174 wr_regl(port, ureg->sirfsoc_rx_fifo_op, 0); 1086 wr_regl(port, ureg->sirfsoc_rx_fifo_op, 0);
1175 wr_regl(port, ureg->sirfsoc_tx_fifo_ctrl, SIRFUART_FIFO_THD(port)); 1087 wr_regl(port, ureg->sirfsoc_tx_fifo_ctrl, SIRFUART_FIFO_THD(port));
1176 wr_regl(port, ureg->sirfsoc_rx_fifo_ctrl, SIRFUART_FIFO_THD(port)); 1088 wr_regl(port, ureg->sirfsoc_rx_fifo_ctrl, SIRFUART_FIFO_THD(port));
1177 1089 if (sirfport->rx_dma_chan)
1178 if (IS_DMA_CHAN_VALID(sirfport->rx_dma_no)) {
1179 ret = sirfsoc_uart_init_rx_dma(port);
1180 if (ret)
1181 goto init_rx_err;
1182 wr_regl(port, ureg->sirfsoc_rx_fifo_level_chk, 1090 wr_regl(port, ureg->sirfsoc_rx_fifo_level_chk,
1183 SIRFUART_RX_FIFO_CHK_SC(port->line, 0x4) | 1091 SIRFUART_RX_FIFO_CHK_SC(port->line, 0x4) |
1184 SIRFUART_RX_FIFO_CHK_LC(port->line, 0xe) | 1092 SIRFUART_RX_FIFO_CHK_LC(port->line, 0xe) |
1185 SIRFUART_RX_FIFO_CHK_HC(port->line, 0x1b)); 1093 SIRFUART_RX_FIFO_CHK_HC(port->line, 0x1b));
1186 } 1094 if (sirfport->tx_dma_chan) {
1187 if (IS_DMA_CHAN_VALID(sirfport->tx_dma_no)) {
1188 sirfsoc_uart_init_tx_dma(port);
1189 sirfport->tx_dma_state = TX_DMA_IDLE; 1095 sirfport->tx_dma_state = TX_DMA_IDLE;
1190 wr_regl(port, ureg->sirfsoc_tx_fifo_level_chk, 1096 wr_regl(port, ureg->sirfsoc_tx_fifo_level_chk,
1191 SIRFUART_TX_FIFO_CHK_SC(port->line, 0x1b) | 1097 SIRFUART_TX_FIFO_CHK_SC(port->line, 0x1b) |
@@ -1232,12 +1138,8 @@ static void sirfsoc_uart_shutdown(struct uart_port *port)
1232 gpio_set_value(sirfport->rts_gpio, 1); 1138 gpio_set_value(sirfport->rts_gpio, 1);
1233 free_irq(gpio_to_irq(sirfport->cts_gpio), sirfport); 1139 free_irq(gpio_to_irq(sirfport->cts_gpio), sirfport);
1234 } 1140 }
1235 if (IS_DMA_CHAN_VALID(sirfport->rx_dma_no)) 1141 if (sirfport->tx_dma_chan)
1236 sirfsoc_uart_uninit_rx_dma(sirfport);
1237 if (IS_DMA_CHAN_VALID(sirfport->tx_dma_no)) {
1238 sirfsoc_uart_uninit_tx_dma(sirfport);
1239 sirfport->tx_dma_state = TX_DMA_IDLE; 1142 sirfport->tx_dma_state = TX_DMA_IDLE;
1240 }
1241} 1143}
1242 1144
1243static const char *sirfsoc_uart_type(struct uart_port *port) 1145static const char *sirfsoc_uart_type(struct uart_port *port)
@@ -1313,8 +1215,8 @@ sirfsoc_uart_console_setup(struct console *co, char *options)
1313 port->cons = co; 1215 port->cons = co;
1314 1216
1315 /* default console tx/rx transfer using io mode */ 1217 /* default console tx/rx transfer using io mode */
1316 sirfport->rx_dma_no = UNVALID_DMA_CHAN; 1218 sirfport->rx_dma_chan = NULL;
1317 sirfport->tx_dma_no = UNVALID_DMA_CHAN; 1219 sirfport->tx_dma_chan = NULL;
1318 return uart_set_options(port, co, baud, parity, bits, flow); 1220 return uart_set_options(port, co, baud, parity, bits, flow);
1319} 1221}
1320 1222
@@ -1382,6 +1284,13 @@ static int sirfsoc_uart_probe(struct platform_device *pdev)
1382 struct uart_port *port; 1284 struct uart_port *port;
1383 struct resource *res; 1285 struct resource *res;
1384 int ret; 1286 int ret;
1287 int i, j;
1288 struct dma_slave_config slv_cfg = {
1289 .src_maxburst = 2,
1290 };
1291 struct dma_slave_config tx_slv_cfg = {
1292 .dst_maxburst = 2,
1293 };
1385 const struct of_device_id *match; 1294 const struct of_device_id *match;
1386 1295
1387 match = of_match_node(sirfsoc_uart_ids, pdev->dev.of_node); 1296 match = of_match_node(sirfsoc_uart_ids, pdev->dev.of_node);
@@ -1402,27 +1311,10 @@ static int sirfsoc_uart_probe(struct platform_device *pdev)
1402 1311
1403 sirfport->hw_flow_ctrl = of_property_read_bool(pdev->dev.of_node, 1312 sirfport->hw_flow_ctrl = of_property_read_bool(pdev->dev.of_node,
1404 "sirf,uart-has-rtscts"); 1313 "sirf,uart-has-rtscts");
1405 if (of_device_is_compatible(pdev->dev.of_node, "sirf,prima2-uart")) { 1314 if (of_device_is_compatible(pdev->dev.of_node, "sirf,prima2-uart"))
1406 sirfport->uart_reg->uart_type = SIRF_REAL_UART; 1315 sirfport->uart_reg->uart_type = SIRF_REAL_UART;
1407 if (of_property_read_u32(pdev->dev.of_node,
1408 "sirf,uart-dma-rx-channel",
1409 &sirfport->rx_dma_no))
1410 sirfport->rx_dma_no = UNVALID_DMA_CHAN;
1411 if (of_property_read_u32(pdev->dev.of_node,
1412 "sirf,uart-dma-tx-channel",
1413 &sirfport->tx_dma_no))
1414 sirfport->tx_dma_no = UNVALID_DMA_CHAN;
1415 }
1416 if (of_device_is_compatible(pdev->dev.of_node, "sirf,prima2-usp-uart")) { 1316 if (of_device_is_compatible(pdev->dev.of_node, "sirf,prima2-usp-uart")) {
1417 sirfport->uart_reg->uart_type = SIRF_USP_UART; 1317 sirfport->uart_reg->uart_type = SIRF_USP_UART;
1418 if (of_property_read_u32(pdev->dev.of_node,
1419 "sirf,usp-dma-rx-channel",
1420 &sirfport->rx_dma_no))
1421 sirfport->rx_dma_no = UNVALID_DMA_CHAN;
1422 if (of_property_read_u32(pdev->dev.of_node,
1423 "sirf,usp-dma-tx-channel",
1424 &sirfport->tx_dma_no))
1425 sirfport->tx_dma_no = UNVALID_DMA_CHAN;
1426 if (!sirfport->hw_flow_ctrl) 1318 if (!sirfport->hw_flow_ctrl)
1427 goto usp_no_flow_control; 1319 goto usp_no_flow_control;
1428 if (of_find_property(pdev->dev.of_node, "cts-gpios", NULL)) 1320 if (of_find_property(pdev->dev.of_node, "cts-gpios", NULL))
@@ -1515,8 +1407,32 @@ usp_no_flow_control:
1515 goto port_err; 1407 goto port_err;
1516 } 1408 }
1517 1409
1518 return 0; 1410 sirfport->rx_dma_chan = dma_request_slave_channel(port->dev, "rx");
1411 for (i = 0; sirfport->rx_dma_chan && i < SIRFSOC_RX_LOOP_BUF_CNT; i++) {
1412 sirfport->rx_dma_items[i].xmit.buf =
1413 dma_alloc_coherent(port->dev, SIRFSOC_RX_DMA_BUF_SIZE,
1414 &sirfport->rx_dma_items[i].dma_addr, GFP_KERNEL);
1415 if (!sirfport->rx_dma_items[i].xmit.buf) {
1416 dev_err(port->dev, "Uart alloc bufa failed\n");
1417 ret = -ENOMEM;
1418 goto alloc_coherent_err;
1419 }
1420 sirfport->rx_dma_items[i].xmit.head =
1421 sirfport->rx_dma_items[i].xmit.tail = 0;
1422 }
1423 if (sirfport->rx_dma_chan)
1424 dmaengine_slave_config(sirfport->rx_dma_chan, &slv_cfg);
1425 sirfport->tx_dma_chan = dma_request_slave_channel(port->dev, "tx");
1426 if (sirfport->tx_dma_chan)
1427 dmaengine_slave_config(sirfport->tx_dma_chan, &tx_slv_cfg);
1519 1428
1429 return 0;
1430alloc_coherent_err:
1431 for (j = 0; j < i; j++)
1432 dma_free_coherent(port->dev, SIRFSOC_RX_DMA_BUF_SIZE,
1433 sirfport->rx_dma_items[j].xmit.buf,
1434 sirfport->rx_dma_items[j].dma_addr);
1435 dma_release_channel(sirfport->rx_dma_chan);
1520port_err: 1436port_err:
1521 clk_put(sirfport->clk); 1437 clk_put(sirfport->clk);
1522err: 1438err:
@@ -1529,6 +1445,19 @@ static int sirfsoc_uart_remove(struct platform_device *pdev)
1529 struct uart_port *port = &sirfport->port; 1445 struct uart_port *port = &sirfport->port;
1530 clk_put(sirfport->clk); 1446 clk_put(sirfport->clk);
1531 uart_remove_one_port(&sirfsoc_uart_drv, port); 1447 uart_remove_one_port(&sirfsoc_uart_drv, port);
1448 if (sirfport->rx_dma_chan) {
1449 int i;
1450 dmaengine_terminate_all(sirfport->rx_dma_chan);
1451 dma_release_channel(sirfport->rx_dma_chan);
1452 for (i = 0; i < SIRFSOC_RX_LOOP_BUF_CNT; i++)
1453 dma_free_coherent(port->dev, SIRFSOC_RX_DMA_BUF_SIZE,
1454 sirfport->rx_dma_items[i].xmit.buf,
1455 sirfport->rx_dma_items[i].dma_addr);
1456 }
1457 if (sirfport->tx_dma_chan) {
1458 dmaengine_terminate_all(sirfport->tx_dma_chan);
1459 dma_release_channel(sirfport->tx_dma_chan);
1460 }
1532 return 0; 1461 return 0;
1533} 1462}
1534 1463
diff --git a/drivers/tty/serial/sirfsoc_uart.h b/drivers/tty/serial/sirfsoc_uart.h
index b7d679c0881b..8a6eddad2f3c 100644
--- a/drivers/tty/serial/sirfsoc_uart.h
+++ b/drivers/tty/serial/sirfsoc_uart.h
@@ -392,9 +392,6 @@ struct sirfsoc_uart_register sirfsoc_uart = {
392/* Indicate how many buffers used */ 392/* Indicate how many buffers used */
393#define SIRFSOC_RX_LOOP_BUF_CNT 2 393#define SIRFSOC_RX_LOOP_BUF_CNT 2
394 394
395/* Indicate if DMA channel valid */
396#define IS_DMA_CHAN_VALID(x) ((x) != -1)
397#define UNVALID_DMA_CHAN -1
398/* For Fast Baud Rate Calculation */ 395/* For Fast Baud Rate Calculation */
399struct sirfsoc_baudrate_to_regv { 396struct sirfsoc_baudrate_to_regv {
400 unsigned int baud_rate; 397 unsigned int baud_rate;
@@ -423,8 +420,6 @@ struct sirfsoc_uart_port {
423 /* for SiRFmarco, there are SET/CLR for UART_INT_EN */ 420 /* for SiRFmarco, there are SET/CLR for UART_INT_EN */
424 bool is_marco; 421 bool is_marco;
425 struct sirfsoc_uart_register *uart_reg; 422 struct sirfsoc_uart_register *uart_reg;
426 int rx_dma_no;
427 int tx_dma_no;
428 struct dma_chan *rx_dma_chan; 423 struct dma_chan *rx_dma_chan;
429 struct dma_chan *tx_dma_chan; 424 struct dma_chan *tx_dma_chan;
430 dma_addr_t tx_dma_addr; 425 dma_addr_t tx_dma_addr;