aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/tty
diff options
context:
space:
mode:
authorYuan Yao <yao.yuan@freescale.com>2014-02-17 00:28:07 -0500
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2014-02-18 15:50:26 -0500
commitf1cd8c8792ccc7e9520ef8f27fbdb748448ac482 (patch)
tree817ef690e38a4b9c8e78fbf7c8c2d0364ea12047 /drivers/tty
parentc957dd494615ccdb36d4892ef9bfd2287502fbfd (diff)
serial: fsl_lpuart: add DMA support
Add dma support for lpuart. This function depend on DMA driver. You can turn on it by write both the dmas and dma-name properties in dts node. Signed-off-by: Yuan Yao <yao.yuan@freescale.com> Acked-by: Arnd Bergmann <arnd@arndb.de> Acked-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Diffstat (limited to 'drivers/tty')
-rw-r--r--drivers/tty/serial/fsl_lpuart.c430
1 files changed, 418 insertions, 12 deletions
diff --git a/drivers/tty/serial/fsl_lpuart.c b/drivers/tty/serial/fsl_lpuart.c
index 8978dc9a58b7..c5eb897de9de 100644
--- a/drivers/tty/serial/fsl_lpuart.c
+++ b/drivers/tty/serial/fsl_lpuart.c
@@ -13,14 +13,19 @@
13#define SUPPORT_SYSRQ 13#define SUPPORT_SYSRQ
14#endif 14#endif
15 15
16#include <linux/module.h> 16#include <linux/clk.h>
17#include <linux/console.h>
18#include <linux/dma-mapping.h>
19#include <linux/dmaengine.h>
20#include <linux/dmapool.h>
17#include <linux/io.h> 21#include <linux/io.h>
18#include <linux/irq.h> 22#include <linux/irq.h>
19#include <linux/clk.h> 23#include <linux/module.h>
20#include <linux/of.h> 24#include <linux/of.h>
21#include <linux/of_device.h> 25#include <linux/of_device.h>
22#include <linux/console.h> 26#include <linux/of_dma.h>
23#include <linux/serial_core.h> 27#include <linux/serial_core.h>
28#include <linux/slab.h>
24#include <linux/tty_flip.h> 29#include <linux/tty_flip.h>
25 30
26/* All registers are 8-bit width */ 31/* All registers are 8-bit width */
@@ -112,6 +117,10 @@
112#define UARTSFIFO_TXOF 0x02 117#define UARTSFIFO_TXOF 0x02
113#define UARTSFIFO_RXUF 0x01 118#define UARTSFIFO_RXUF 0x01
114 119
120#define DMA_MAXBURST 16
121#define DMA_MAXBURST_MASK (DMA_MAXBURST - 1)
122#define FSL_UART_RX_DMA_BUFFER_SIZE 64
123
115#define DRIVER_NAME "fsl-lpuart" 124#define DRIVER_NAME "fsl-lpuart"
116#define DEV_NAME "ttyLP" 125#define DEV_NAME "ttyLP"
117#define UART_NR 6 126#define UART_NR 6
@@ -121,6 +130,24 @@ struct lpuart_port {
121 struct clk *clk; 130 struct clk *clk;
122 unsigned int txfifo_size; 131 unsigned int txfifo_size;
123 unsigned int rxfifo_size; 132 unsigned int rxfifo_size;
133
134 bool lpuart_dma_use;
135 struct dma_chan *dma_tx_chan;
136 struct dma_chan *dma_rx_chan;
137 struct dma_async_tx_descriptor *dma_tx_desc;
138 struct dma_async_tx_descriptor *dma_rx_desc;
139 dma_addr_t dma_tx_buf_bus;
140 dma_addr_t dma_rx_buf_bus;
141 dma_cookie_t dma_tx_cookie;
142 dma_cookie_t dma_rx_cookie;
143 unsigned char *dma_tx_buf_virt;
144 unsigned char *dma_rx_buf_virt;
145 unsigned int dma_tx_bytes;
146 unsigned int dma_rx_bytes;
147 int dma_tx_in_progress;
148 int dma_rx_in_progress;
149 unsigned int dma_rx_timeout;
150 struct timer_list lpuart_timer;
124}; 151};
125 152
126static struct of_device_id lpuart_dt_ids[] = { 153static struct of_device_id lpuart_dt_ids[] = {
@@ -131,6 +158,10 @@ static struct of_device_id lpuart_dt_ids[] = {
131}; 158};
132MODULE_DEVICE_TABLE(of, lpuart_dt_ids); 159MODULE_DEVICE_TABLE(of, lpuart_dt_ids);
133 160
161/* Forward declare this for the dma callbacks*/
162static void lpuart_dma_tx_complete(void *arg);
163static void lpuart_dma_rx_complete(void *arg);
164
134static void lpuart_stop_tx(struct uart_port *port) 165static void lpuart_stop_tx(struct uart_port *port)
135{ 166{
136 unsigned char temp; 167 unsigned char temp;
@@ -152,6 +183,210 @@ static void lpuart_enable_ms(struct uart_port *port)
152{ 183{
153} 184}
154 185
186static void lpuart_copy_rx_to_tty(struct lpuart_port *sport,
187 struct tty_port *tty, int count)
188{
189 int copied;
190
191 sport->port.icount.rx += count;
192
193 if (!tty) {
194 dev_err(sport->port.dev, "No tty port\n");
195 return;
196 }
197
198 dma_sync_single_for_cpu(sport->port.dev, sport->dma_rx_buf_bus,
199 FSL_UART_RX_DMA_BUFFER_SIZE, DMA_FROM_DEVICE);
200 copied = tty_insert_flip_string(tty,
201 ((unsigned char *)(sport->dma_rx_buf_virt)), count);
202
203 if (copied != count) {
204 WARN_ON(1);
205 dev_err(sport->port.dev, "RxData copy to tty layer failed\n");
206 }
207
208 dma_sync_single_for_device(sport->port.dev, sport->dma_rx_buf_bus,
209 FSL_UART_RX_DMA_BUFFER_SIZE, DMA_TO_DEVICE);
210}
211
212static void lpuart_pio_tx(struct lpuart_port *sport)
213{
214 struct circ_buf *xmit = &sport->port.state->xmit;
215 unsigned long flags;
216
217 spin_lock_irqsave(&sport->port.lock, flags);
218
219 while (!uart_circ_empty(xmit) &&
220 readb(sport->port.membase + UARTTCFIFO) < sport->txfifo_size) {
221 writeb(xmit->buf[xmit->tail], sport->port.membase + UARTDR);
222 xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1);
223 sport->port.icount.tx++;
224 }
225
226 if (uart_circ_chars_pending(xmit) < WAKEUP_CHARS)
227 uart_write_wakeup(&sport->port);
228
229 if (uart_circ_empty(xmit))
230 writeb(readb(sport->port.membase + UARTCR5) | UARTCR5_TDMAS,
231 sport->port.membase + UARTCR5);
232
233 spin_unlock_irqrestore(&sport->port.lock, flags);
234}
235
236static int lpuart_dma_tx(struct lpuart_port *sport, unsigned long count)
237{
238 struct circ_buf *xmit = &sport->port.state->xmit;
239 dma_addr_t tx_bus_addr;
240
241 dma_sync_single_for_device(sport->port.dev, sport->dma_tx_buf_bus,
242 UART_XMIT_SIZE, DMA_TO_DEVICE);
243 sport->dma_tx_bytes = count & ~(DMA_MAXBURST_MASK);
244 tx_bus_addr = sport->dma_tx_buf_bus + xmit->tail;
245 sport->dma_tx_desc = dmaengine_prep_slave_single(sport->dma_tx_chan,
246 tx_bus_addr, sport->dma_tx_bytes,
247 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT);
248
249 if (!sport->dma_tx_desc) {
250 dev_err(sport->port.dev, "Not able to get desc for tx\n");
251 return -EIO;
252 }
253
254 sport->dma_tx_desc->callback = lpuart_dma_tx_complete;
255 sport->dma_tx_desc->callback_param = sport;
256 sport->dma_tx_in_progress = 1;
257 sport->dma_tx_cookie = dmaengine_submit(sport->dma_tx_desc);
258 dma_async_issue_pending(sport->dma_tx_chan);
259
260 return 0;
261}
262
263static void lpuart_prepare_tx(struct lpuart_port *sport)
264{
265 struct circ_buf *xmit = &sport->port.state->xmit;
266 unsigned long count = CIRC_CNT_TO_END(xmit->head,
267 xmit->tail, UART_XMIT_SIZE);
268
269 if (!count)
270 return;
271
272 if (count < DMA_MAXBURST)
273 writeb(readb(sport->port.membase + UARTCR5) & ~UARTCR5_TDMAS,
274 sport->port.membase + UARTCR5);
275 else {
276 writeb(readb(sport->port.membase + UARTCR5) | UARTCR5_TDMAS,
277 sport->port.membase + UARTCR5);
278 lpuart_dma_tx(sport, count);
279 }
280}
281
282static void lpuart_dma_tx_complete(void *arg)
283{
284 struct lpuart_port *sport = arg;
285 struct circ_buf *xmit = &sport->port.state->xmit;
286 unsigned long flags;
287
288 async_tx_ack(sport->dma_tx_desc);
289
290 spin_lock_irqsave(&sport->port.lock, flags);
291
292 xmit->tail = (xmit->tail + sport->dma_tx_bytes) & (UART_XMIT_SIZE - 1);
293 sport->dma_tx_in_progress = 0;
294
295 if (uart_circ_chars_pending(xmit) < WAKEUP_CHARS)
296 uart_write_wakeup(&sport->port);
297
298 lpuart_prepare_tx(sport);
299
300 spin_unlock_irqrestore(&sport->port.lock, flags);
301}
302
303static int lpuart_dma_rx(struct lpuart_port *sport)
304{
305 dma_sync_single_for_device(sport->port.dev, sport->dma_rx_buf_bus,
306 FSL_UART_RX_DMA_BUFFER_SIZE, DMA_TO_DEVICE);
307 sport->dma_rx_desc = dmaengine_prep_slave_single(sport->dma_rx_chan,
308 sport->dma_rx_buf_bus, FSL_UART_RX_DMA_BUFFER_SIZE,
309 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT);
310
311 if (!sport->dma_rx_desc) {
312 dev_err(sport->port.dev, "Not able to get desc for rx\n");
313 return -EIO;
314 }
315
316 sport->dma_rx_desc->callback = lpuart_dma_rx_complete;
317 sport->dma_rx_desc->callback_param = sport;
318 sport->dma_rx_in_progress = 1;
319 sport->dma_rx_cookie = dmaengine_submit(sport->dma_rx_desc);
320 dma_async_issue_pending(sport->dma_rx_chan);
321
322 return 0;
323}
324
325static void lpuart_dma_rx_complete(void *arg)
326{
327 struct lpuart_port *sport = arg;
328 struct tty_port *port = &sport->port.state->port;
329 unsigned long flags;
330
331 async_tx_ack(sport->dma_rx_desc);
332
333 spin_lock_irqsave(&sport->port.lock, flags);
334
335 sport->dma_rx_in_progress = 0;
336 lpuart_copy_rx_to_tty(sport, port, FSL_UART_RX_DMA_BUFFER_SIZE);
337 tty_flip_buffer_push(port);
338 lpuart_dma_rx(sport);
339
340 spin_unlock_irqrestore(&sport->port.lock, flags);
341}
342
343static void lpuart_timer_func(unsigned long data)
344{
345 struct lpuart_port *sport = (struct lpuart_port *)data;
346 struct tty_port *port = &sport->port.state->port;
347 struct dma_tx_state state;
348 unsigned long flags;
349 unsigned char temp;
350 int count;
351
352 del_timer(&sport->lpuart_timer);
353 dmaengine_pause(sport->dma_rx_chan);
354 dmaengine_tx_status(sport->dma_rx_chan, sport->dma_rx_cookie, &state);
355 dmaengine_terminate_all(sport->dma_rx_chan);
356 count = FSL_UART_RX_DMA_BUFFER_SIZE - state.residue;
357 async_tx_ack(sport->dma_rx_desc);
358
359 spin_lock_irqsave(&sport->port.lock, flags);
360
361 sport->dma_rx_in_progress = 0;
362 lpuart_copy_rx_to_tty(sport, port, count);
363 tty_flip_buffer_push(port);
364 temp = readb(sport->port.membase + UARTCR5);
365 writeb(temp & ~UARTCR5_RDMAS, sport->port.membase + UARTCR5);
366
367 spin_unlock_irqrestore(&sport->port.lock, flags);
368}
369
370static inline void lpuart_prepare_rx(struct lpuart_port *sport)
371{
372 unsigned long flags;
373 unsigned char temp;
374
375 spin_lock_irqsave(&sport->port.lock, flags);
376
377 init_timer(&sport->lpuart_timer);
378 sport->lpuart_timer.function = lpuart_timer_func;
379 sport->lpuart_timer.data = (unsigned long)sport;
380 sport->lpuart_timer.expires = jiffies + sport->dma_rx_timeout;
381 add_timer(&sport->lpuart_timer);
382
383 lpuart_dma_rx(sport);
384 temp = readb(sport->port.membase + UARTCR5);
385 writeb(temp | UARTCR5_RDMAS, sport->port.membase + UARTCR5);
386
387 spin_unlock_irqrestore(&sport->port.lock, flags);
388}
389
155static inline void lpuart_transmit_buffer(struct lpuart_port *sport) 390static inline void lpuart_transmit_buffer(struct lpuart_port *sport)
156{ 391{
157 struct circ_buf *xmit = &sport->port.state->xmit; 392 struct circ_buf *xmit = &sport->port.state->xmit;
@@ -172,14 +407,21 @@ static inline void lpuart_transmit_buffer(struct lpuart_port *sport)
172 407
173static void lpuart_start_tx(struct uart_port *port) 408static void lpuart_start_tx(struct uart_port *port)
174{ 409{
175 struct lpuart_port *sport = container_of(port, struct lpuart_port, port); 410 struct lpuart_port *sport = container_of(port,
411 struct lpuart_port, port);
412 struct circ_buf *xmit = &sport->port.state->xmit;
176 unsigned char temp; 413 unsigned char temp;
177 414
178 temp = readb(port->membase + UARTCR2); 415 temp = readb(port->membase + UARTCR2);
179 writeb(temp | UARTCR2_TIE, port->membase + UARTCR2); 416 writeb(temp | UARTCR2_TIE, port->membase + UARTCR2);
180 417
181 if (readb(port->membase + UARTSR1) & UARTSR1_TDRE) 418 if (sport->lpuart_dma_use) {
182 lpuart_transmit_buffer(sport); 419 if (!uart_circ_empty(xmit) && !sport->dma_tx_in_progress)
420 lpuart_prepare_tx(sport);
421 } else {
422 if (readb(port->membase + UARTSR1) & UARTSR1_TDRE)
423 lpuart_transmit_buffer(sport);
424 }
183} 425}
184 426
185static irqreturn_t lpuart_txint(int irq, void *dev_id) 427static irqreturn_t lpuart_txint(int irq, void *dev_id)
@@ -279,12 +521,19 @@ static irqreturn_t lpuart_int(int irq, void *dev_id)
279 521
280 sts = readb(sport->port.membase + UARTSR1); 522 sts = readb(sport->port.membase + UARTSR1);
281 523
282 if (sts & UARTSR1_RDRF) 524 if (sts & UARTSR1_RDRF) {
283 lpuart_rxint(irq, dev_id); 525 if (sport->lpuart_dma_use)
284 526 lpuart_prepare_rx(sport);
527 else
528 lpuart_rxint(irq, dev_id);
529 }
285 if (sts & UARTSR1_TDRE && 530 if (sts & UARTSR1_TDRE &&
286 !(readb(sport->port.membase + UARTCR5) & UARTCR5_TDMAS)) 531 !(readb(sport->port.membase + UARTCR5) & UARTCR5_TDMAS)) {
287 lpuart_txint(irq, dev_id); 532 if (sport->lpuart_dma_use)
533 lpuart_pio_tx(sport);
534 else
535 lpuart_txint(irq, dev_id);
536 }
288 537
289 return IRQ_HANDLED; 538 return IRQ_HANDLED;
290} 539}
@@ -366,13 +615,156 @@ static void lpuart_setup_watermark(struct lpuart_port *sport)
366 writeb(UARTCFIFO_TXFLUSH | UARTCFIFO_RXFLUSH, 615 writeb(UARTCFIFO_TXFLUSH | UARTCFIFO_RXFLUSH,
367 sport->port.membase + UARTCFIFO); 616 sport->port.membase + UARTCFIFO);
368 617
369 writeb(2, sport->port.membase + UARTTWFIFO); 618 writeb(0, sport->port.membase + UARTTWFIFO);
370 writeb(1, sport->port.membase + UARTRWFIFO); 619 writeb(1, sport->port.membase + UARTRWFIFO);
371 620
372 /* Restore cr2 */ 621 /* Restore cr2 */
373 writeb(cr2_saved, sport->port.membase + UARTCR2); 622 writeb(cr2_saved, sport->port.membase + UARTCR2);
374} 623}
375 624
625static int lpuart_dma_tx_request(struct uart_port *port)
626{
627 struct lpuart_port *sport = container_of(port,
628 struct lpuart_port, port);
629 struct dma_chan *tx_chan;
630 struct dma_slave_config dma_tx_sconfig;
631 dma_addr_t dma_bus;
632 unsigned char *dma_buf;
633 int ret;
634
635 tx_chan = dma_request_slave_channel(sport->port.dev, "tx");
636
637 if (!tx_chan) {
638 dev_err(sport->port.dev, "Dma tx channel request failed!\n");
639 return -ENODEV;
640 }
641
642 dma_bus = dma_map_single(tx_chan->device->dev,
643 sport->port.state->xmit.buf,
644 UART_XMIT_SIZE, DMA_TO_DEVICE);
645
646 if (dma_mapping_error(tx_chan->device->dev, dma_bus)) {
647 dev_err(sport->port.dev, "dma_map_single tx failed\n");
648 dma_release_channel(tx_chan);
649 return -ENOMEM;
650 }
651
652 dma_buf = sport->port.state->xmit.buf;
653 dma_tx_sconfig.dst_addr = sport->port.mapbase + UARTDR;
654 dma_tx_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
655 dma_tx_sconfig.dst_maxburst = DMA_MAXBURST;
656 dma_tx_sconfig.direction = DMA_MEM_TO_DEV;
657 ret = dmaengine_slave_config(tx_chan, &dma_tx_sconfig);
658
659 if (ret < 0) {
660 dev_err(sport->port.dev,
661 "Dma slave config failed, err = %d\n", ret);
662 dma_release_channel(tx_chan);
663 return ret;
664 }
665
666 sport->dma_tx_chan = tx_chan;
667 sport->dma_tx_buf_virt = dma_buf;
668 sport->dma_tx_buf_bus = dma_bus;
669 sport->dma_tx_in_progress = 0;
670
671 return 0;
672}
673
674static int lpuart_dma_rx_request(struct uart_port *port)
675{
676 struct lpuart_port *sport = container_of(port,
677 struct lpuart_port, port);
678 struct dma_chan *rx_chan;
679 struct dma_slave_config dma_rx_sconfig;
680 dma_addr_t dma_bus;
681 unsigned char *dma_buf;
682 int ret;
683
684 rx_chan = dma_request_slave_channel(sport->port.dev, "rx");
685
686 if (!rx_chan) {
687 dev_err(sport->port.dev, "Dma rx channel request failed!\n");
688 return -ENODEV;
689 }
690
691 dma_buf = devm_kzalloc(sport->port.dev,
692 FSL_UART_RX_DMA_BUFFER_SIZE, GFP_KERNEL);
693
694 if (!dma_buf) {
695 dev_err(sport->port.dev, "Dma rx alloc failed\n");
696 dma_release_channel(rx_chan);
697 return -ENOMEM;
698 }
699
700 dma_bus = dma_map_single(rx_chan->device->dev, dma_buf,
701 FSL_UART_RX_DMA_BUFFER_SIZE, DMA_FROM_DEVICE);
702
703 if (dma_mapping_error(rx_chan->device->dev, dma_bus)) {
704 dev_err(sport->port.dev, "dma_map_single rx failed\n");
705 dma_release_channel(rx_chan);
706 return -ENOMEM;
707 }
708
709 dma_rx_sconfig.src_addr = sport->port.mapbase + UARTDR;
710 dma_rx_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
711 dma_rx_sconfig.src_maxburst = 1;
712 dma_rx_sconfig.direction = DMA_DEV_TO_MEM;
713 ret = dmaengine_slave_config(rx_chan, &dma_rx_sconfig);
714
715 if (ret < 0) {
716 dev_err(sport->port.dev,
717 "Dma slave config failed, err = %d\n", ret);
718 dma_release_channel(rx_chan);
719 return ret;
720 }
721
722 sport->dma_rx_chan = rx_chan;
723 sport->dma_rx_buf_virt = dma_buf;
724 sport->dma_rx_buf_bus = dma_bus;
725 sport->dma_rx_in_progress = 0;
726
727 sport->dma_rx_timeout = (sport->port.timeout - HZ / 50) *
728 FSL_UART_RX_DMA_BUFFER_SIZE * 3 /
729 sport->rxfifo_size / 2;
730
731 if (sport->dma_rx_timeout < msecs_to_jiffies(20))
732 sport->dma_rx_timeout = msecs_to_jiffies(20);
733
734 return 0;
735}
736
737static void lpuart_dma_tx_free(struct uart_port *port)
738{
739 struct lpuart_port *sport = container_of(port,
740 struct lpuart_port, port);
741 struct dma_chan *dma_chan;
742
743 dma_unmap_single(sport->port.dev, sport->dma_tx_buf_bus,
744 UART_XMIT_SIZE, DMA_TO_DEVICE);
745 dma_chan = sport->dma_tx_chan;
746 sport->dma_tx_chan = NULL;
747 sport->dma_tx_buf_bus = 0;
748 sport->dma_tx_buf_virt = NULL;
749 dma_release_channel(dma_chan);
750}
751
752static void lpuart_dma_rx_free(struct uart_port *port)
753{
754 struct lpuart_port *sport = container_of(port,
755 struct lpuart_port, port);
756 struct dma_chan *dma_chan;
757
758 dma_unmap_single(sport->port.dev, sport->dma_rx_buf_bus,
759 FSL_UART_RX_DMA_BUFFER_SIZE, DMA_FROM_DEVICE);
760
761 dma_chan = sport->dma_rx_chan;
762 sport->dma_rx_chan = NULL;
763 sport->dma_rx_buf_bus = 0;
764 sport->dma_rx_buf_virt = NULL;
765 dma_release_channel(dma_chan);
766}
767
376static int lpuart_startup(struct uart_port *port) 768static int lpuart_startup(struct uart_port *port)
377{ 769{
378 struct lpuart_port *sport = container_of(port, struct lpuart_port, port); 770 struct lpuart_port *sport = container_of(port, struct lpuart_port, port);
@@ -380,6 +772,15 @@ static int lpuart_startup(struct uart_port *port)
380 unsigned long flags; 772 unsigned long flags;
381 unsigned char temp; 773 unsigned char temp;
382 774
775 /*whether use dma support by dma request results*/
776 if (lpuart_dma_tx_request(port) || lpuart_dma_rx_request(port)) {
777 sport->lpuart_dma_use = false;
778 } else {
779 sport->lpuart_dma_use = true;
780 temp = readb(port->membase + UARTCR5);
781 writeb(temp | UARTCR5_TDMAS, port->membase + UARTCR5);
782 }
783
383 ret = devm_request_irq(port->dev, port->irq, lpuart_int, 0, 784 ret = devm_request_irq(port->dev, port->irq, lpuart_int, 0,
384 DRIVER_NAME, sport); 785 DRIVER_NAME, sport);
385 if (ret) 786 if (ret)
@@ -414,6 +815,11 @@ static void lpuart_shutdown(struct uart_port *port)
414 spin_unlock_irqrestore(&port->lock, flags); 815 spin_unlock_irqrestore(&port->lock, flags);
415 816
416 devm_free_irq(port->dev, port->irq, sport); 817 devm_free_irq(port->dev, port->irq, sport);
818
819 if (sport->lpuart_dma_use) {
820 lpuart_dma_tx_free(port);
821 lpuart_dma_rx_free(port);
822 }
417} 823}
418 824
419static void 825static void