aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/tty/serial/atmel_serial.c
diff options
context:
space:
mode:
authorElen Song <elen.song@atmel.com>2013-07-22 04:30:27 -0400
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2013-07-29 16:03:29 -0400
commit34df42f59a602241b416001516d53fcfe35072b2 (patch)
tree2e85c121218d9e4344d252a24b5b516f4aa49a75 /drivers/tty/serial/atmel_serial.c
parent08f738be88bb7a0163afd810a19b9cb13c79808f (diff)
serial: at91: add rx dma support
Request a cyclic dma channel for rx dma use. Use cyclic transfer is to prevent receive data overrun. We allocate a cycle dma cookie after request channel, after that, enable uart timeout interrupt in startup stage, when data successful received, the timeout callback will check the residual bytes and insert receiving datas into the framework during the transfer interval. When current descriptor finished, the dma callback will also check the residual bytes and filp the receiving data. Signed-off-by: Elen Song <elen.song@atmel.com> Signed-off-by: Ludovic Desroches <ludovic.desroches@atmel.com> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Diffstat (limited to 'drivers/tty/serial/atmel_serial.c')
-rw-r--r--drivers/tty/serial/atmel_serial.c224
1 files changed, 220 insertions, 4 deletions
diff --git a/drivers/tty/serial/atmel_serial.c b/drivers/tty/serial/atmel_serial.c
index 13c1d3170119..eaf7dc7795b8 100644
--- a/drivers/tty/serial/atmel_serial.c
+++ b/drivers/tty/serial/atmel_serial.c
@@ -139,6 +139,7 @@ struct atmel_uart_port {
139 u32 backup_imr; /* IMR saved during suspend */ 139 u32 backup_imr; /* IMR saved during suspend */
140 int break_active; /* break being received */ 140 int break_active; /* break being received */
141 141
142 bool use_dma_rx; /* enable DMA receiver */
142 bool use_pdc_rx; /* enable PDC receiver */ 143 bool use_pdc_rx; /* enable PDC receiver */
143 short pdc_rx_idx; /* current PDC RX buffer */ 144 short pdc_rx_idx; /* current PDC RX buffer */
144 struct atmel_dma_buffer pdc_rx[2]; /* PDC receier */ 145 struct atmel_dma_buffer pdc_rx[2]; /* PDC receier */
@@ -148,10 +149,15 @@ struct atmel_uart_port {
148 struct atmel_dma_buffer pdc_tx; /* PDC transmitter */ 149 struct atmel_dma_buffer pdc_tx; /* PDC transmitter */
149 150
150 spinlock_t lock_tx; /* port lock */ 151 spinlock_t lock_tx; /* port lock */
152 spinlock_t lock_rx; /* port lock */
151 struct dma_chan *chan_tx; 153 struct dma_chan *chan_tx;
154 struct dma_chan *chan_rx;
152 struct dma_async_tx_descriptor *desc_tx; 155 struct dma_async_tx_descriptor *desc_tx;
156 struct dma_async_tx_descriptor *desc_rx;
153 dma_cookie_t cookie_tx; 157 dma_cookie_t cookie_tx;
158 dma_cookie_t cookie_rx;
154 struct scatterlist sg_tx; 159 struct scatterlist sg_tx;
160 struct scatterlist sg_rx;
155 struct tasklet_struct tasklet; 161 struct tasklet_struct tasklet;
156 unsigned int irq_status; 162 unsigned int irq_status;
157 unsigned int irq_status_prev; 163 unsigned int irq_status_prev;
@@ -224,6 +230,13 @@ static bool atmel_use_dma_tx(struct uart_port *port)
224 return atmel_port->use_dma_tx; 230 return atmel_port->use_dma_tx;
225} 231}
226 232
233static bool atmel_use_dma_rx(struct uart_port *port)
234{
235 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
236
237 return atmel_port->use_dma_rx;
238}
239
227/* Enable or disable the rs485 support */ 240/* Enable or disable the rs485 support */
228void atmel_config_rs485(struct uart_port *port, struct serial_rs485 *rs485conf) 241void atmel_config_rs485(struct uart_port *port, struct serial_rs485 *rs485conf)
229{ 242{
@@ -758,6 +771,184 @@ chan_err:
758 return -EINVAL; 771 return -EINVAL;
759} 772}
760 773
774static void atmel_flip_buffer_rx_dma(struct uart_port *port,
775 char *buf, size_t count)
776{
777 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
778 struct tty_port *tport = &port->state->port;
779
780 dma_sync_sg_for_cpu(port->dev,
781 &atmel_port->sg_rx,
782 1,
783 DMA_DEV_TO_MEM);
784
785 tty_insert_flip_string(tport, buf, count);
786
787 dma_sync_sg_for_device(port->dev,
788 &atmel_port->sg_rx,
789 1,
790 DMA_DEV_TO_MEM);
791 /*
792 * Drop the lock here since it might end up calling
793 * uart_start(), which takes the lock.
794 */
795 spin_unlock(&port->lock);
796 tty_flip_buffer_push(tport);
797 spin_lock(&port->lock);
798}
799
800static void atmel_complete_rx_dma(void *arg)
801{
802 struct uart_port *port = arg;
803 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
804
805 tasklet_schedule(&atmel_port->tasklet);
806}
807
808static void atmel_release_rx_dma(struct uart_port *port)
809{
810 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
811 struct dma_chan *chan = atmel_port->chan_rx;
812
813 if (chan) {
814 dmaengine_terminate_all(chan);
815 dma_release_channel(chan);
816 dma_unmap_sg(port->dev, &atmel_port->sg_rx, 1,
817 DMA_DEV_TO_MEM);
818 }
819
820 atmel_port->desc_rx = NULL;
821 atmel_port->chan_rx = NULL;
822 atmel_port->cookie_rx = -EINVAL;
823}
824
825static void atmel_rx_from_dma(struct uart_port *port)
826{
827 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
828 struct circ_buf *ring = &atmel_port->rx_ring;
829 struct dma_chan *chan = atmel_port->chan_rx;
830 struct dma_tx_state state;
831 enum dma_status dmastat;
832 size_t pending, count;
833
834
835 /* Reset the UART timeout early so that we don't miss one */
836 UART_PUT_CR(port, ATMEL_US_STTTO);
837 dmastat = dmaengine_tx_status(chan,
838 atmel_port->cookie_rx,
839 &state);
840 /* Restart a new tasklet if DMA status is error */
841 if (dmastat == DMA_ERROR) {
842 dev_dbg(port->dev, "Get residue error, restart tasklet\n");
843 UART_PUT_IER(port, ATMEL_US_TIMEOUT);
844 tasklet_schedule(&atmel_port->tasklet);
845 return;
846 }
847 /* current transfer size should no larger than dma buffer */
848 pending = sg_dma_len(&atmel_port->sg_rx) - state.residue;
849 BUG_ON(pending > sg_dma_len(&atmel_port->sg_rx));
850
851 /*
852 * This will take the chars we have so far,
853 * ring->head will record the transfer size, only new bytes come
854 * will insert into the framework.
855 */
856 if (pending > ring->head) {
857 count = pending - ring->head;
858
859 atmel_flip_buffer_rx_dma(port, ring->buf + ring->head, count);
860
861 ring->head += count;
862 if (ring->head == sg_dma_len(&atmel_port->sg_rx))
863 ring->head = 0;
864
865 port->icount.rx += count;
866 }
867
868 UART_PUT_IER(port, ATMEL_US_TIMEOUT);
869}
870
871static int atmel_prepare_rx_dma(struct uart_port *port)
872{
873 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
874 struct dma_async_tx_descriptor *desc;
875 dma_cap_mask_t mask;
876 struct dma_slave_config config;
877 struct circ_buf *ring;
878 int ret, nent;
879
880 ring = &atmel_port->rx_ring;
881
882 dma_cap_zero(mask);
883 dma_cap_set(DMA_CYCLIC, mask);
884
885 atmel_port->chan_rx = dma_request_slave_channel(port->dev, "rx");
886 if (atmel_port->chan_rx == NULL)
887 goto chan_err;
888 dev_info(port->dev, "using %s for rx DMA transfers\n",
889 dma_chan_name(atmel_port->chan_rx));
890
891 spin_lock_init(&atmel_port->lock_rx);
892 sg_init_table(&atmel_port->sg_rx, 1);
893 /* UART circular rx buffer is an aligned page. */
894 BUG_ON((int)port->state->xmit.buf & ~PAGE_MASK);
895 sg_set_page(&atmel_port->sg_rx,
896 virt_to_page(ring->buf),
897 ATMEL_SERIAL_RINGSIZE,
898 (int)ring->buf & ~PAGE_MASK);
899 nent = dma_map_sg(port->dev,
900 &atmel_port->sg_rx,
901 1,
902 DMA_DEV_TO_MEM);
903
904 if (!nent) {
905 dev_dbg(port->dev, "need to release resource of dma\n");
906 goto chan_err;
907 } else {
908 dev_dbg(port->dev, "%s: mapped %d@%p to %x\n", __func__,
909 sg_dma_len(&atmel_port->sg_rx),
910 ring->buf,
911 sg_dma_address(&atmel_port->sg_rx));
912 }
913
914 /* Configure the slave DMA */
915 memset(&config, 0, sizeof(config));
916 config.direction = DMA_DEV_TO_MEM;
917 config.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
918 config.src_addr = port->mapbase + ATMEL_US_RHR;
919
920 ret = dmaengine_device_control(atmel_port->chan_rx,
921 DMA_SLAVE_CONFIG,
922 (unsigned long)&config);
923 if (ret) {
924 dev_err(port->dev, "DMA rx slave configuration failed\n");
925 goto chan_err;
926 }
927 /*
928 * Prepare a cyclic dma transfer, assign 2 descriptors,
929 * each one is half ring buffer size
930 */
931 desc = dmaengine_prep_dma_cyclic(atmel_port->chan_rx,
932 sg_dma_address(&atmel_port->sg_rx),
933 sg_dma_len(&atmel_port->sg_rx),
934 sg_dma_len(&atmel_port->sg_rx)/2,
935 DMA_DEV_TO_MEM,
936 DMA_PREP_INTERRUPT);
937 desc->callback = atmel_complete_rx_dma;
938 desc->callback_param = port;
939 atmel_port->desc_rx = desc;
940 atmel_port->cookie_rx = dmaengine_submit(desc);
941
942 return 0;
943
944chan_err:
945 dev_err(port->dev, "RX channel not available, switch to pio\n");
946 atmel_port->use_dma_rx = 0;
947 if (atmel_port->chan_rx)
948 atmel_release_rx_dma(port);
949 return -EINVAL;
950}
951
761/* 952/*
762 * receive interrupt handler. 953 * receive interrupt handler.
763 */ 954 */
@@ -785,6 +976,13 @@ atmel_handle_receive(struct uart_port *port, unsigned int pending)
785 atmel_pdc_rxerr(port, pending); 976 atmel_pdc_rxerr(port, pending);
786 } 977 }
787 978
979 if (atmel_use_dma_rx(port)) {
980 if (pending & ATMEL_US_TIMEOUT) {
981 UART_PUT_IDR(port, ATMEL_US_TIMEOUT);
982 tasklet_schedule(&atmel_port->tasklet);
983 }
984 }
985
788 /* Interrupt receive */ 986 /* Interrupt receive */
789 if (pending & ATMEL_US_RXRDY) 987 if (pending & ATMEL_US_RXRDY)
790 atmel_rx_chars(port); 988 atmel_rx_chars(port);
@@ -1176,7 +1374,11 @@ static void atmel_set_ops(struct uart_port *port)
1176{ 1374{
1177 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port); 1375 struct atmel_uart_port *atmel_port = to_atmel_uart_port(port);
1178 1376
1179 if (atmel_use_pdc_rx(port)) { 1377 if (atmel_use_dma_rx(port)) {
1378 atmel_port->prepare_rx = &atmel_prepare_rx_dma;
1379 atmel_port->schedule_rx = &atmel_rx_from_dma;
1380 atmel_port->release_rx = &atmel_release_rx_dma;
1381 } else if (atmel_use_pdc_rx(port)) {
1180 atmel_port->prepare_rx = &atmel_prepare_rx_pdc; 1382 atmel_port->prepare_rx = &atmel_prepare_rx_pdc;
1181 atmel_port->schedule_rx = &atmel_rx_from_pdc; 1383 atmel_port->schedule_rx = &atmel_rx_from_pdc;
1182 atmel_port->release_rx = &atmel_release_rx_pdc; 1384 atmel_port->release_rx = &atmel_release_rx_pdc;
@@ -1272,6 +1474,11 @@ static int atmel_startup(struct uart_port *port)
1272 UART_PUT_IER(port, ATMEL_US_ENDRX | ATMEL_US_TIMEOUT); 1474 UART_PUT_IER(port, ATMEL_US_ENDRX | ATMEL_US_TIMEOUT);
1273 /* enable PDC controller */ 1475 /* enable PDC controller */
1274 UART_PUT_PTCR(port, ATMEL_PDC_RXTEN); 1476 UART_PUT_PTCR(port, ATMEL_PDC_RXTEN);
1477 } else if (atmel_use_dma_rx(port)) {
1478 UART_PUT_RTOR(port, PDC_RX_TIMEOUT);
1479 UART_PUT_CR(port, ATMEL_US_STTTO);
1480
1481 UART_PUT_IER(port, ATMEL_US_TIMEOUT);
1275 } else { 1482 } else {
1276 /* enable receive only */ 1483 /* enable receive only */
1277 UART_PUT_IER(port, ATMEL_US_RXRDY); 1484 UART_PUT_IER(port, ATMEL_US_RXRDY);
@@ -1676,10 +1883,18 @@ static void atmel_of_init_port(struct atmel_uart_port *atmel_port,
1676 u32 rs485_delay[2]; 1883 u32 rs485_delay[2];
1677 1884
1678 /* DMA/PDC usage specification */ 1885 /* DMA/PDC usage specification */
1679 if (of_get_property(np, "atmel,use-dma-rx", NULL)) 1886 if (of_get_property(np, "atmel,use-dma-rx", NULL)) {
1680 atmel_port->use_pdc_rx = true; 1887 if (of_get_property(np, "dmas", NULL)) {
1681 else 1888 atmel_port->use_dma_rx = true;
1889 atmel_port->use_pdc_rx = false;
1890 } else {
1891 atmel_port->use_dma_rx = false;
1892 atmel_port->use_pdc_rx = true;
1893 }
1894 } else {
1895 atmel_port->use_dma_rx = false;
1682 atmel_port->use_pdc_rx = false; 1896 atmel_port->use_pdc_rx = false;
1897 }
1683 1898
1684 if (of_get_property(np, "atmel,use-dma-tx", NULL)) { 1899 if (of_get_property(np, "atmel,use-dma-tx", NULL)) {
1685 if (of_get_property(np, "dmas", NULL)) { 1900 if (of_get_property(np, "dmas", NULL)) {
@@ -1726,6 +1941,7 @@ static int atmel_init_port(struct atmel_uart_port *atmel_port,
1726 } else { 1941 } else {
1727 atmel_port->use_pdc_rx = pdata->use_dma_rx; 1942 atmel_port->use_pdc_rx = pdata->use_dma_rx;
1728 atmel_port->use_pdc_tx = pdata->use_dma_tx; 1943 atmel_port->use_pdc_tx = pdata->use_dma_tx;
1944 atmel_port->use_dma_rx = false;
1729 atmel_port->use_dma_tx = false; 1945 atmel_port->use_dma_tx = false;
1730 atmel_port->rs485 = pdata->rs485; 1946 atmel_port->rs485 = pdata->rs485;
1731 } 1947 }