aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/infiniband/hw/mthca/mthca_cmd.c
diff options
context:
space:
mode:
authorJeff Garzik <jgarzik@pobox.com>2005-08-10 13:46:28 -0400
committerJeff Garzik <jgarzik@pobox.com>2005-08-10 13:46:28 -0400
commit2f058256cb64e346f4fb4499ff4e0f1c2791a4b4 (patch)
tree91e06602f4d3abb6812ea8c9bc9ba4501e14c84e /drivers/infiniband/hw/mthca/mthca_cmd.c
parent0274aa2506fd2fe89a58dd6cd64d3b3f7b976af8 (diff)
parent86b3786078d63242d3194ffc58ae8dae1d1bbef3 (diff)
Merge /spare/repo/linux-2.6/
Diffstat (limited to 'drivers/infiniband/hw/mthca/mthca_cmd.c')
-rw-r--r--drivers/infiniband/hw/mthca/mthca_cmd.c531
1 files changed, 227 insertions, 304 deletions
diff --git a/drivers/infiniband/hw/mthca/mthca_cmd.c b/drivers/infiniband/hw/mthca/mthca_cmd.c
index cd9ed958d92f..1557a522d831 100644
--- a/drivers/infiniband/hw/mthca/mthca_cmd.c
+++ b/drivers/infiniband/hw/mthca/mthca_cmd.c
@@ -431,6 +431,36 @@ static int mthca_cmd_imm(struct mthca_dev *dev,
431 timeout, status); 431 timeout, status);
432} 432}
433 433
434int mthca_cmd_init(struct mthca_dev *dev)
435{
436 sema_init(&dev->cmd.hcr_sem, 1);
437 sema_init(&dev->cmd.poll_sem, 1);
438 dev->cmd.use_events = 0;
439
440 dev->hcr = ioremap(pci_resource_start(dev->pdev, 0) + MTHCA_HCR_BASE,
441 MTHCA_HCR_SIZE);
442 if (!dev->hcr) {
443 mthca_err(dev, "Couldn't map command register.");
444 return -ENOMEM;
445 }
446
447 dev->cmd.pool = pci_pool_create("mthca_cmd", dev->pdev,
448 MTHCA_MAILBOX_SIZE,
449 MTHCA_MAILBOX_SIZE, 0);
450 if (!dev->cmd.pool) {
451 iounmap(dev->hcr);
452 return -ENOMEM;
453 }
454
455 return 0;
456}
457
458void mthca_cmd_cleanup(struct mthca_dev *dev)
459{
460 pci_pool_destroy(dev->cmd.pool);
461 iounmap(dev->hcr);
462}
463
434/* 464/*
435 * Switch to using events to issue FW commands (should be called after 465 * Switch to using events to issue FW commands (should be called after
436 * event queue to command events has been initialized). 466 * event queue to command events has been initialized).
@@ -489,6 +519,33 @@ void mthca_cmd_use_polling(struct mthca_dev *dev)
489 up(&dev->cmd.poll_sem); 519 up(&dev->cmd.poll_sem);
490} 520}
491 521
522struct mthca_mailbox *mthca_alloc_mailbox(struct mthca_dev *dev,
523 unsigned int gfp_mask)
524{
525 struct mthca_mailbox *mailbox;
526
527 mailbox = kmalloc(sizeof *mailbox, gfp_mask);
528 if (!mailbox)
529 return ERR_PTR(-ENOMEM);
530
531 mailbox->buf = pci_pool_alloc(dev->cmd.pool, gfp_mask, &mailbox->dma);
532 if (!mailbox->buf) {
533 kfree(mailbox);
534 return ERR_PTR(-ENOMEM);
535 }
536
537 return mailbox;
538}
539
540void mthca_free_mailbox(struct mthca_dev *dev, struct mthca_mailbox *mailbox)
541{
542 if (!mailbox)
543 return;
544
545 pci_pool_free(dev->cmd.pool, mailbox->buf, mailbox->dma);
546 kfree(mailbox);
547}
548
492int mthca_SYS_EN(struct mthca_dev *dev, u8 *status) 549int mthca_SYS_EN(struct mthca_dev *dev, u8 *status)
493{ 550{
494 u64 out; 551 u64 out;
@@ -513,20 +570,20 @@ int mthca_SYS_DIS(struct mthca_dev *dev, u8 *status)
513static int mthca_map_cmd(struct mthca_dev *dev, u16 op, struct mthca_icm *icm, 570static int mthca_map_cmd(struct mthca_dev *dev, u16 op, struct mthca_icm *icm,
514 u64 virt, u8 *status) 571 u64 virt, u8 *status)
515{ 572{
516 u32 *inbox; 573 struct mthca_mailbox *mailbox;
517 dma_addr_t indma;
518 struct mthca_icm_iter iter; 574 struct mthca_icm_iter iter;
575 __be64 *pages;
519 int lg; 576 int lg;
520 int nent = 0; 577 int nent = 0;
521 int i; 578 int i;
522 int err = 0; 579 int err = 0;
523 int ts = 0, tc = 0; 580 int ts = 0, tc = 0;
524 581
525 inbox = pci_alloc_consistent(dev->pdev, PAGE_SIZE, &indma); 582 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
526 if (!inbox) 583 if (IS_ERR(mailbox))
527 return -ENOMEM; 584 return PTR_ERR(mailbox);
528 585 memset(mailbox->buf, 0, MTHCA_MAILBOX_SIZE);
529 memset(inbox, 0, PAGE_SIZE); 586 pages = mailbox->buf;
530 587
531 for (mthca_icm_first(icm, &iter); 588 for (mthca_icm_first(icm, &iter);
532 !mthca_icm_last(&iter); 589 !mthca_icm_last(&iter);
@@ -546,19 +603,17 @@ static int mthca_map_cmd(struct mthca_dev *dev, u16 op, struct mthca_icm *icm,
546 } 603 }
547 for (i = 0; i < mthca_icm_size(&iter) / (1 << lg); ++i, ++nent) { 604 for (i = 0; i < mthca_icm_size(&iter) / (1 << lg); ++i, ++nent) {
548 if (virt != -1) { 605 if (virt != -1) {
549 *((__be64 *) (inbox + nent * 4)) = 606 pages[nent * 2] = cpu_to_be64(virt);
550 cpu_to_be64(virt);
551 virt += 1 << lg; 607 virt += 1 << lg;
552 } 608 }
553 609
554 *((__be64 *) (inbox + nent * 4 + 2)) = 610 pages[nent * 2 + 1] = cpu_to_be64((mthca_icm_addr(&iter) +
555 cpu_to_be64((mthca_icm_addr(&iter) + 611 (i << lg)) | (lg - 12));
556 (i << lg)) | (lg - 12));
557 ts += 1 << (lg - 10); 612 ts += 1 << (lg - 10);
558 ++tc; 613 ++tc;
559 614
560 if (nent == PAGE_SIZE / 16) { 615 if (nent == MTHCA_MAILBOX_SIZE / 16) {
561 err = mthca_cmd(dev, indma, nent, 0, op, 616 err = mthca_cmd(dev, mailbox->dma, nent, 0, op,
562 CMD_TIME_CLASS_B, status); 617 CMD_TIME_CLASS_B, status);
563 if (err || *status) 618 if (err || *status)
564 goto out; 619 goto out;
@@ -568,7 +623,7 @@ static int mthca_map_cmd(struct mthca_dev *dev, u16 op, struct mthca_icm *icm,
568 } 623 }
569 624
570 if (nent) 625 if (nent)
571 err = mthca_cmd(dev, indma, nent, 0, op, 626 err = mthca_cmd(dev, mailbox->dma, nent, 0, op,
572 CMD_TIME_CLASS_B, status); 627 CMD_TIME_CLASS_B, status);
573 628
574 switch (op) { 629 switch (op) {
@@ -585,7 +640,7 @@ static int mthca_map_cmd(struct mthca_dev *dev, u16 op, struct mthca_icm *icm,
585 } 640 }
586 641
587out: 642out:
588 pci_free_consistent(dev->pdev, PAGE_SIZE, inbox, indma); 643 mthca_free_mailbox(dev, mailbox);
589 return err; 644 return err;
590} 645}
591 646
@@ -606,8 +661,8 @@ int mthca_RUN_FW(struct mthca_dev *dev, u8 *status)
606 661
607int mthca_QUERY_FW(struct mthca_dev *dev, u8 *status) 662int mthca_QUERY_FW(struct mthca_dev *dev, u8 *status)
608{ 663{
664 struct mthca_mailbox *mailbox;
609 u32 *outbox; 665 u32 *outbox;
610 dma_addr_t outdma;
611 int err = 0; 666 int err = 0;
612 u8 lg; 667 u8 lg;
613 668
@@ -625,12 +680,12 @@ int mthca_QUERY_FW(struct mthca_dev *dev, u8 *status)
625#define QUERY_FW_EQ_ARM_BASE_OFFSET 0x40 680#define QUERY_FW_EQ_ARM_BASE_OFFSET 0x40
626#define QUERY_FW_EQ_SET_CI_BASE_OFFSET 0x48 681#define QUERY_FW_EQ_SET_CI_BASE_OFFSET 0x48
627 682
628 outbox = pci_alloc_consistent(dev->pdev, QUERY_FW_OUT_SIZE, &outdma); 683 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
629 if (!outbox) { 684 if (IS_ERR(mailbox))
630 return -ENOMEM; 685 return PTR_ERR(mailbox);
631 } 686 outbox = mailbox->buf;
632 687
633 err = mthca_cmd_box(dev, 0, outdma, 0, 0, CMD_QUERY_FW, 688 err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_FW,
634 CMD_TIME_CLASS_A, status); 689 CMD_TIME_CLASS_A, status);
635 690
636 if (err) 691 if (err)
@@ -681,15 +736,15 @@ int mthca_QUERY_FW(struct mthca_dev *dev, u8 *status)
681 } 736 }
682 737
683out: 738out:
684 pci_free_consistent(dev->pdev, QUERY_FW_OUT_SIZE, outbox, outdma); 739 mthca_free_mailbox(dev, mailbox);
685 return err; 740 return err;
686} 741}
687 742
688int mthca_ENABLE_LAM(struct mthca_dev *dev, u8 *status) 743int mthca_ENABLE_LAM(struct mthca_dev *dev, u8 *status)
689{ 744{
745 struct mthca_mailbox *mailbox;
690 u8 info; 746 u8 info;
691 u32 *outbox; 747 u32 *outbox;
692 dma_addr_t outdma;
693 int err = 0; 748 int err = 0;
694 749
695#define ENABLE_LAM_OUT_SIZE 0x100 750#define ENABLE_LAM_OUT_SIZE 0x100
@@ -700,11 +755,12 @@ int mthca_ENABLE_LAM(struct mthca_dev *dev, u8 *status)
700#define ENABLE_LAM_INFO_HIDDEN_FLAG (1 << 4) 755#define ENABLE_LAM_INFO_HIDDEN_FLAG (1 << 4)
701#define ENABLE_LAM_INFO_ECC_MASK 0x3 756#define ENABLE_LAM_INFO_ECC_MASK 0x3
702 757
703 outbox = pci_alloc_consistent(dev->pdev, ENABLE_LAM_OUT_SIZE, &outdma); 758 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
704 if (!outbox) 759 if (IS_ERR(mailbox))
705 return -ENOMEM; 760 return PTR_ERR(mailbox);
761 outbox = mailbox->buf;
706 762
707 err = mthca_cmd_box(dev, 0, outdma, 0, 0, CMD_ENABLE_LAM, 763 err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_ENABLE_LAM,
708 CMD_TIME_CLASS_C, status); 764 CMD_TIME_CLASS_C, status);
709 765
710 if (err) 766 if (err)
@@ -733,7 +789,7 @@ int mthca_ENABLE_LAM(struct mthca_dev *dev, u8 *status)
733 (unsigned long long) dev->ddr_end); 789 (unsigned long long) dev->ddr_end);
734 790
735out: 791out:
736 pci_free_consistent(dev->pdev, ENABLE_LAM_OUT_SIZE, outbox, outdma); 792 mthca_free_mailbox(dev, mailbox);
737 return err; 793 return err;
738} 794}
739 795
@@ -744,9 +800,9 @@ int mthca_DISABLE_LAM(struct mthca_dev *dev, u8 *status)
744 800
745int mthca_QUERY_DDR(struct mthca_dev *dev, u8 *status) 801int mthca_QUERY_DDR(struct mthca_dev *dev, u8 *status)
746{ 802{
803 struct mthca_mailbox *mailbox;
747 u8 info; 804 u8 info;
748 u32 *outbox; 805 u32 *outbox;
749 dma_addr_t outdma;
750 int err = 0; 806 int err = 0;
751 807
752#define QUERY_DDR_OUT_SIZE 0x100 808#define QUERY_DDR_OUT_SIZE 0x100
@@ -757,11 +813,12 @@ int mthca_QUERY_DDR(struct mthca_dev *dev, u8 *status)
757#define QUERY_DDR_INFO_HIDDEN_FLAG (1 << 4) 813#define QUERY_DDR_INFO_HIDDEN_FLAG (1 << 4)
758#define QUERY_DDR_INFO_ECC_MASK 0x3 814#define QUERY_DDR_INFO_ECC_MASK 0x3
759 815
760 outbox = pci_alloc_consistent(dev->pdev, QUERY_DDR_OUT_SIZE, &outdma); 816 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
761 if (!outbox) 817 if (IS_ERR(mailbox))
762 return -ENOMEM; 818 return PTR_ERR(mailbox);
819 outbox = mailbox->buf;
763 820
764 err = mthca_cmd_box(dev, 0, outdma, 0, 0, CMD_QUERY_DDR, 821 err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_DDR,
765 CMD_TIME_CLASS_A, status); 822 CMD_TIME_CLASS_A, status);
766 823
767 if (err) 824 if (err)
@@ -787,15 +844,15 @@ int mthca_QUERY_DDR(struct mthca_dev *dev, u8 *status)
787 (unsigned long long) dev->ddr_end); 844 (unsigned long long) dev->ddr_end);
788 845
789out: 846out:
790 pci_free_consistent(dev->pdev, QUERY_DDR_OUT_SIZE, outbox, outdma); 847 mthca_free_mailbox(dev, mailbox);
791 return err; 848 return err;
792} 849}
793 850
794int mthca_QUERY_DEV_LIM(struct mthca_dev *dev, 851int mthca_QUERY_DEV_LIM(struct mthca_dev *dev,
795 struct mthca_dev_lim *dev_lim, u8 *status) 852 struct mthca_dev_lim *dev_lim, u8 *status)
796{ 853{
854 struct mthca_mailbox *mailbox;
797 u32 *outbox; 855 u32 *outbox;
798 dma_addr_t outdma;
799 u8 field; 856 u8 field;
800 u16 size; 857 u16 size;
801 int err; 858 int err;
@@ -860,11 +917,12 @@ int mthca_QUERY_DEV_LIM(struct mthca_dev *dev,
860#define QUERY_DEV_LIM_LAMR_OFFSET 0x9f 917#define QUERY_DEV_LIM_LAMR_OFFSET 0x9f
861#define QUERY_DEV_LIM_MAX_ICM_SZ_OFFSET 0xa0 918#define QUERY_DEV_LIM_MAX_ICM_SZ_OFFSET 0xa0
862 919
863 outbox = pci_alloc_consistent(dev->pdev, QUERY_DEV_LIM_OUT_SIZE, &outdma); 920 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
864 if (!outbox) 921 if (IS_ERR(mailbox))
865 return -ENOMEM; 922 return PTR_ERR(mailbox);
923 outbox = mailbox->buf;
866 924
867 err = mthca_cmd_box(dev, 0, outdma, 0, 0, CMD_QUERY_DEV_LIM, 925 err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_DEV_LIM,
868 CMD_TIME_CLASS_A, status); 926 CMD_TIME_CLASS_A, status);
869 927
870 if (err) 928 if (err)
@@ -1020,15 +1078,15 @@ int mthca_QUERY_DEV_LIM(struct mthca_dev *dev,
1020 } 1078 }
1021 1079
1022out: 1080out:
1023 pci_free_consistent(dev->pdev, QUERY_DEV_LIM_OUT_SIZE, outbox, outdma); 1081 mthca_free_mailbox(dev, mailbox);
1024 return err; 1082 return err;
1025} 1083}
1026 1084
1027int mthca_QUERY_ADAPTER(struct mthca_dev *dev, 1085int mthca_QUERY_ADAPTER(struct mthca_dev *dev,
1028 struct mthca_adapter *adapter, u8 *status) 1086 struct mthca_adapter *adapter, u8 *status)
1029{ 1087{
1088 struct mthca_mailbox *mailbox;
1030 u32 *outbox; 1089 u32 *outbox;
1031 dma_addr_t outdma;
1032 int err; 1090 int err;
1033 1091
1034#define QUERY_ADAPTER_OUT_SIZE 0x100 1092#define QUERY_ADAPTER_OUT_SIZE 0x100
@@ -1037,23 +1095,24 @@ int mthca_QUERY_ADAPTER(struct mthca_dev *dev,
1037#define QUERY_ADAPTER_REVISION_ID_OFFSET 0x08 1095#define QUERY_ADAPTER_REVISION_ID_OFFSET 0x08
1038#define QUERY_ADAPTER_INTA_PIN_OFFSET 0x10 1096#define QUERY_ADAPTER_INTA_PIN_OFFSET 0x10
1039 1097
1040 outbox = pci_alloc_consistent(dev->pdev, QUERY_ADAPTER_OUT_SIZE, &outdma); 1098 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1041 if (!outbox) 1099 if (IS_ERR(mailbox))
1042 return -ENOMEM; 1100 return PTR_ERR(mailbox);
1101 outbox = mailbox->buf;
1043 1102
1044 err = mthca_cmd_box(dev, 0, outdma, 0, 0, CMD_QUERY_ADAPTER, 1103 err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_ADAPTER,
1045 CMD_TIME_CLASS_A, status); 1104 CMD_TIME_CLASS_A, status);
1046 1105
1047 if (err) 1106 if (err)
1048 goto out; 1107 goto out;
1049 1108
1050 MTHCA_GET(adapter->vendor_id, outbox, QUERY_ADAPTER_VENDOR_ID_OFFSET); 1109 MTHCA_GET(adapter->vendor_id, outbox, QUERY_ADAPTER_VENDOR_ID_OFFSET);
1051 MTHCA_GET(adapter->device_id, outbox, QUERY_ADAPTER_DEVICE_ID_OFFSET); 1110 MTHCA_GET(adapter->device_id, outbox, QUERY_ADAPTER_DEVICE_ID_OFFSET);
1052 MTHCA_GET(adapter->revision_id, outbox, QUERY_ADAPTER_REVISION_ID_OFFSET); 1111 MTHCA_GET(adapter->revision_id, outbox, QUERY_ADAPTER_REVISION_ID_OFFSET);
1053 MTHCA_GET(adapter->inta_pin, outbox, QUERY_ADAPTER_INTA_PIN_OFFSET); 1112 MTHCA_GET(adapter->inta_pin, outbox, QUERY_ADAPTER_INTA_PIN_OFFSET);
1054 1113
1055out: 1114out:
1056 pci_free_consistent(dev->pdev, QUERY_DEV_LIM_OUT_SIZE, outbox, outdma); 1115 mthca_free_mailbox(dev, mailbox);
1057 return err; 1116 return err;
1058} 1117}
1059 1118
@@ -1061,8 +1120,8 @@ int mthca_INIT_HCA(struct mthca_dev *dev,
1061 struct mthca_init_hca_param *param, 1120 struct mthca_init_hca_param *param,
1062 u8 *status) 1121 u8 *status)
1063{ 1122{
1123 struct mthca_mailbox *mailbox;
1064 u32 *inbox; 1124 u32 *inbox;
1065 dma_addr_t indma;
1066 int err; 1125 int err;
1067 1126
1068#define INIT_HCA_IN_SIZE 0x200 1127#define INIT_HCA_IN_SIZE 0x200
@@ -1102,9 +1161,10 @@ int mthca_INIT_HCA(struct mthca_dev *dev,
1102#define INIT_HCA_UAR_SCATCH_BASE_OFFSET (INIT_HCA_UAR_OFFSET + 0x10) 1161#define INIT_HCA_UAR_SCATCH_BASE_OFFSET (INIT_HCA_UAR_OFFSET + 0x10)
1103#define INIT_HCA_UAR_CTX_BASE_OFFSET (INIT_HCA_UAR_OFFSET + 0x18) 1162#define INIT_HCA_UAR_CTX_BASE_OFFSET (INIT_HCA_UAR_OFFSET + 0x18)
1104 1163
1105 inbox = pci_alloc_consistent(dev->pdev, INIT_HCA_IN_SIZE, &indma); 1164 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1106 if (!inbox) 1165 if (IS_ERR(mailbox))
1107 return -ENOMEM; 1166 return PTR_ERR(mailbox);
1167 inbox = mailbox->buf;
1108 1168
1109 memset(inbox, 0, INIT_HCA_IN_SIZE); 1169 memset(inbox, 0, INIT_HCA_IN_SIZE);
1110 1170
@@ -1167,10 +1227,9 @@ int mthca_INIT_HCA(struct mthca_dev *dev,
1167 MTHCA_PUT(inbox, param->uarc_base, INIT_HCA_UAR_CTX_BASE_OFFSET); 1227 MTHCA_PUT(inbox, param->uarc_base, INIT_HCA_UAR_CTX_BASE_OFFSET);
1168 } 1228 }
1169 1229
1170 err = mthca_cmd(dev, indma, 0, 0, CMD_INIT_HCA, 1230 err = mthca_cmd(dev, mailbox->dma, 0, 0, CMD_INIT_HCA, HZ, status);
1171 HZ, status);
1172 1231
1173 pci_free_consistent(dev->pdev, INIT_HCA_IN_SIZE, inbox, indma); 1232 mthca_free_mailbox(dev, mailbox);
1174 return err; 1233 return err;
1175} 1234}
1176 1235
@@ -1178,8 +1237,8 @@ int mthca_INIT_IB(struct mthca_dev *dev,
1178 struct mthca_init_ib_param *param, 1237 struct mthca_init_ib_param *param,
1179 int port, u8 *status) 1238 int port, u8 *status)
1180{ 1239{
1240 struct mthca_mailbox *mailbox;
1181 u32 *inbox; 1241 u32 *inbox;
1182 dma_addr_t indma;
1183 int err; 1242 int err;
1184 u32 flags; 1243 u32 flags;
1185 1244
@@ -1199,9 +1258,10 @@ int mthca_INIT_IB(struct mthca_dev *dev,
1199#define INIT_IB_NODE_GUID_OFFSET 0x18 1258#define INIT_IB_NODE_GUID_OFFSET 0x18
1200#define INIT_IB_SI_GUID_OFFSET 0x20 1259#define INIT_IB_SI_GUID_OFFSET 0x20
1201 1260
1202 inbox = pci_alloc_consistent(dev->pdev, INIT_IB_IN_SIZE, &indma); 1261 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1203 if (!inbox) 1262 if (IS_ERR(mailbox))
1204 return -ENOMEM; 1263 return PTR_ERR(mailbox);
1264 inbox = mailbox->buf;
1205 1265
1206 memset(inbox, 0, INIT_IB_IN_SIZE); 1266 memset(inbox, 0, INIT_IB_IN_SIZE);
1207 1267
@@ -1221,10 +1281,10 @@ int mthca_INIT_IB(struct mthca_dev *dev,
1221 MTHCA_PUT(inbox, param->node_guid, INIT_IB_NODE_GUID_OFFSET); 1281 MTHCA_PUT(inbox, param->node_guid, INIT_IB_NODE_GUID_OFFSET);
1222 MTHCA_PUT(inbox, param->si_guid, INIT_IB_SI_GUID_OFFSET); 1282 MTHCA_PUT(inbox, param->si_guid, INIT_IB_SI_GUID_OFFSET);
1223 1283
1224 err = mthca_cmd(dev, indma, port, 0, CMD_INIT_IB, 1284 err = mthca_cmd(dev, mailbox->dma, port, 0, CMD_INIT_IB,
1225 CMD_TIME_CLASS_A, status); 1285 CMD_TIME_CLASS_A, status);
1226 1286
1227 pci_free_consistent(dev->pdev, INIT_HCA_IN_SIZE, inbox, indma); 1287 mthca_free_mailbox(dev, mailbox);
1228 return err; 1288 return err;
1229} 1289}
1230 1290
@@ -1241,8 +1301,8 @@ int mthca_CLOSE_HCA(struct mthca_dev *dev, int panic, u8 *status)
1241int mthca_SET_IB(struct mthca_dev *dev, struct mthca_set_ib_param *param, 1301int mthca_SET_IB(struct mthca_dev *dev, struct mthca_set_ib_param *param,
1242 int port, u8 *status) 1302 int port, u8 *status)
1243{ 1303{
1304 struct mthca_mailbox *mailbox;
1244 u32 *inbox; 1305 u32 *inbox;
1245 dma_addr_t indma;
1246 int err; 1306 int err;
1247 u32 flags = 0; 1307 u32 flags = 0;
1248 1308
@@ -1253,9 +1313,10 @@ int mthca_SET_IB(struct mthca_dev *dev, struct mthca_set_ib_param *param,
1253#define SET_IB_CAP_MASK_OFFSET 0x04 1313#define SET_IB_CAP_MASK_OFFSET 0x04
1254#define SET_IB_SI_GUID_OFFSET 0x08 1314#define SET_IB_SI_GUID_OFFSET 0x08
1255 1315
1256 inbox = pci_alloc_consistent(dev->pdev, SET_IB_IN_SIZE, &indma); 1316 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1257 if (!inbox) 1317 if (IS_ERR(mailbox))
1258 return -ENOMEM; 1318 return PTR_ERR(mailbox);
1319 inbox = mailbox->buf;
1259 1320
1260 memset(inbox, 0, SET_IB_IN_SIZE); 1321 memset(inbox, 0, SET_IB_IN_SIZE);
1261 1322
@@ -1266,10 +1327,10 @@ int mthca_SET_IB(struct mthca_dev *dev, struct mthca_set_ib_param *param,
1266 MTHCA_PUT(inbox, param->cap_mask, SET_IB_CAP_MASK_OFFSET); 1327 MTHCA_PUT(inbox, param->cap_mask, SET_IB_CAP_MASK_OFFSET);
1267 MTHCA_PUT(inbox, param->si_guid, SET_IB_SI_GUID_OFFSET); 1328 MTHCA_PUT(inbox, param->si_guid, SET_IB_SI_GUID_OFFSET);
1268 1329
1269 err = mthca_cmd(dev, indma, port, 0, CMD_SET_IB, 1330 err = mthca_cmd(dev, mailbox->dma, port, 0, CMD_SET_IB,
1270 CMD_TIME_CLASS_B, status); 1331 CMD_TIME_CLASS_B, status);
1271 1332
1272 pci_free_consistent(dev->pdev, INIT_HCA_IN_SIZE, inbox, indma); 1333 mthca_free_mailbox(dev, mailbox);
1273 return err; 1334 return err;
1274} 1335}
1275 1336
@@ -1280,20 +1341,22 @@ int mthca_MAP_ICM(struct mthca_dev *dev, struct mthca_icm *icm, u64 virt, u8 *st
1280 1341
1281int mthca_MAP_ICM_page(struct mthca_dev *dev, u64 dma_addr, u64 virt, u8 *status) 1342int mthca_MAP_ICM_page(struct mthca_dev *dev, u64 dma_addr, u64 virt, u8 *status)
1282{ 1343{
1344 struct mthca_mailbox *mailbox;
1283 u64 *inbox; 1345 u64 *inbox;
1284 dma_addr_t indma;
1285 int err; 1346 int err;
1286 1347
1287 inbox = pci_alloc_consistent(dev->pdev, 16, &indma); 1348 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1288 if (!inbox) 1349 if (IS_ERR(mailbox))
1289 return -ENOMEM; 1350 return PTR_ERR(mailbox);
1351 inbox = mailbox->buf;
1290 1352
1291 inbox[0] = cpu_to_be64(virt); 1353 inbox[0] = cpu_to_be64(virt);
1292 inbox[1] = cpu_to_be64(dma_addr); 1354 inbox[1] = cpu_to_be64(dma_addr);
1293 1355
1294 err = mthca_cmd(dev, indma, 1, 0, CMD_MAP_ICM, CMD_TIME_CLASS_B, status); 1356 err = mthca_cmd(dev, mailbox->dma, 1, 0, CMD_MAP_ICM,
1357 CMD_TIME_CLASS_B, status);
1295 1358
1296 pci_free_consistent(dev->pdev, 16, inbox, indma); 1359 mthca_free_mailbox(dev, mailbox);
1297 1360
1298 if (!err) 1361 if (!err)
1299 mthca_dbg(dev, "Mapped page at %llx to %llx for ICM.\n", 1362 mthca_dbg(dev, "Mapped page at %llx to %llx for ICM.\n",
@@ -1338,69 +1401,26 @@ int mthca_SET_ICM_SIZE(struct mthca_dev *dev, u64 icm_size, u64 *aux_pages,
1338 return 0; 1401 return 0;
1339} 1402}
1340 1403
1341int mthca_SW2HW_MPT(struct mthca_dev *dev, void *mpt_entry, 1404int mthca_SW2HW_MPT(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1342 int mpt_index, u8 *status) 1405 int mpt_index, u8 *status)
1343{ 1406{
1344 dma_addr_t indma; 1407 return mthca_cmd(dev, mailbox->dma, mpt_index, 0, CMD_SW2HW_MPT,
1345 int err; 1408 CMD_TIME_CLASS_B, status);
1346
1347 indma = pci_map_single(dev->pdev, mpt_entry,
1348 MTHCA_MPT_ENTRY_SIZE,
1349 PCI_DMA_TODEVICE);
1350 if (pci_dma_mapping_error(indma))
1351 return -ENOMEM;
1352
1353 err = mthca_cmd(dev, indma, mpt_index, 0, CMD_SW2HW_MPT,
1354 CMD_TIME_CLASS_B, status);
1355
1356 pci_unmap_single(dev->pdev, indma,
1357 MTHCA_MPT_ENTRY_SIZE, PCI_DMA_TODEVICE);
1358 return err;
1359} 1409}
1360 1410
1361int mthca_HW2SW_MPT(struct mthca_dev *dev, void *mpt_entry, 1411int mthca_HW2SW_MPT(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1362 int mpt_index, u8 *status) 1412 int mpt_index, u8 *status)
1363{ 1413{
1364 dma_addr_t outdma = 0; 1414 return mthca_cmd_box(dev, 0, mailbox ? mailbox->dma : 0, mpt_index,
1365 int err; 1415 !mailbox, CMD_HW2SW_MPT,
1366 1416 CMD_TIME_CLASS_B, status);
1367 if (mpt_entry) {
1368 outdma = pci_map_single(dev->pdev, mpt_entry,
1369 MTHCA_MPT_ENTRY_SIZE,
1370 PCI_DMA_FROMDEVICE);
1371 if (pci_dma_mapping_error(outdma))
1372 return -ENOMEM;
1373 }
1374
1375 err = mthca_cmd_box(dev, 0, outdma, mpt_index, !mpt_entry,
1376 CMD_HW2SW_MPT,
1377 CMD_TIME_CLASS_B, status);
1378
1379 if (mpt_entry)
1380 pci_unmap_single(dev->pdev, outdma,
1381 MTHCA_MPT_ENTRY_SIZE,
1382 PCI_DMA_FROMDEVICE);
1383 return err;
1384} 1417}
1385 1418
1386int mthca_WRITE_MTT(struct mthca_dev *dev, u64 *mtt_entry, 1419int mthca_WRITE_MTT(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1387 int num_mtt, u8 *status) 1420 int num_mtt, u8 *status)
1388{ 1421{
1389 dma_addr_t indma; 1422 return mthca_cmd(dev, mailbox->dma, num_mtt, 0, CMD_WRITE_MTT,
1390 int err; 1423 CMD_TIME_CLASS_B, status);
1391
1392 indma = pci_map_single(dev->pdev, mtt_entry,
1393 (num_mtt + 2) * 8,
1394 PCI_DMA_TODEVICE);
1395 if (pci_dma_mapping_error(indma))
1396 return -ENOMEM;
1397
1398 err = mthca_cmd(dev, indma, num_mtt, 0, CMD_WRITE_MTT,
1399 CMD_TIME_CLASS_B, status);
1400
1401 pci_unmap_single(dev->pdev, indma,
1402 (num_mtt + 2) * 8, PCI_DMA_TODEVICE);
1403 return err;
1404} 1424}
1405 1425
1406int mthca_SYNC_TPT(struct mthca_dev *dev, u8 *status) 1426int mthca_SYNC_TPT(struct mthca_dev *dev, u8 *status)
@@ -1418,92 +1438,38 @@ int mthca_MAP_EQ(struct mthca_dev *dev, u64 event_mask, int unmap,
1418 0, CMD_MAP_EQ, CMD_TIME_CLASS_B, status); 1438 0, CMD_MAP_EQ, CMD_TIME_CLASS_B, status);
1419} 1439}
1420 1440
1421int mthca_SW2HW_EQ(struct mthca_dev *dev, void *eq_context, 1441int mthca_SW2HW_EQ(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1422 int eq_num, u8 *status) 1442 int eq_num, u8 *status)
1423{ 1443{
1424 dma_addr_t indma; 1444 return mthca_cmd(dev, mailbox->dma, eq_num, 0, CMD_SW2HW_EQ,
1425 int err; 1445 CMD_TIME_CLASS_A, status);
1426
1427 indma = pci_map_single(dev->pdev, eq_context,
1428 MTHCA_EQ_CONTEXT_SIZE,
1429 PCI_DMA_TODEVICE);
1430 if (pci_dma_mapping_error(indma))
1431 return -ENOMEM;
1432
1433 err = mthca_cmd(dev, indma, eq_num, 0, CMD_SW2HW_EQ,
1434 CMD_TIME_CLASS_A, status);
1435
1436 pci_unmap_single(dev->pdev, indma,
1437 MTHCA_EQ_CONTEXT_SIZE, PCI_DMA_TODEVICE);
1438 return err;
1439} 1446}
1440 1447
1441int mthca_HW2SW_EQ(struct mthca_dev *dev, void *eq_context, 1448int mthca_HW2SW_EQ(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1442 int eq_num, u8 *status) 1449 int eq_num, u8 *status)
1443{ 1450{
1444 dma_addr_t outdma = 0; 1451 return mthca_cmd_box(dev, 0, mailbox->dma, eq_num, 0,
1445 int err; 1452 CMD_HW2SW_EQ,
1446 1453 CMD_TIME_CLASS_A, status);
1447 outdma = pci_map_single(dev->pdev, eq_context,
1448 MTHCA_EQ_CONTEXT_SIZE,
1449 PCI_DMA_FROMDEVICE);
1450 if (pci_dma_mapping_error(outdma))
1451 return -ENOMEM;
1452
1453 err = mthca_cmd_box(dev, 0, outdma, eq_num, 0,
1454 CMD_HW2SW_EQ,
1455 CMD_TIME_CLASS_A, status);
1456
1457 pci_unmap_single(dev->pdev, outdma,
1458 MTHCA_EQ_CONTEXT_SIZE,
1459 PCI_DMA_FROMDEVICE);
1460 return err;
1461} 1454}
1462 1455
1463int mthca_SW2HW_CQ(struct mthca_dev *dev, void *cq_context, 1456int mthca_SW2HW_CQ(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1464 int cq_num, u8 *status) 1457 int cq_num, u8 *status)
1465{ 1458{
1466 dma_addr_t indma; 1459 return mthca_cmd(dev, mailbox->dma, cq_num, 0, CMD_SW2HW_CQ,
1467 int err;
1468
1469 indma = pci_map_single(dev->pdev, cq_context,
1470 MTHCA_CQ_CONTEXT_SIZE,
1471 PCI_DMA_TODEVICE);
1472 if (pci_dma_mapping_error(indma))
1473 return -ENOMEM;
1474
1475 err = mthca_cmd(dev, indma, cq_num, 0, CMD_SW2HW_CQ,
1476 CMD_TIME_CLASS_A, status); 1460 CMD_TIME_CLASS_A, status);
1477
1478 pci_unmap_single(dev->pdev, indma,
1479 MTHCA_CQ_CONTEXT_SIZE, PCI_DMA_TODEVICE);
1480 return err;
1481} 1461}
1482 1462
1483int mthca_HW2SW_CQ(struct mthca_dev *dev, void *cq_context, 1463int mthca_HW2SW_CQ(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1484 int cq_num, u8 *status) 1464 int cq_num, u8 *status)
1485{ 1465{
1486 dma_addr_t outdma = 0; 1466 return mthca_cmd_box(dev, 0, mailbox->dma, cq_num, 0,
1487 int err; 1467 CMD_HW2SW_CQ,
1488 1468 CMD_TIME_CLASS_A, status);
1489 outdma = pci_map_single(dev->pdev, cq_context,
1490 MTHCA_CQ_CONTEXT_SIZE,
1491 PCI_DMA_FROMDEVICE);
1492 if (pci_dma_mapping_error(outdma))
1493 return -ENOMEM;
1494
1495 err = mthca_cmd_box(dev, 0, outdma, cq_num, 0,
1496 CMD_HW2SW_CQ,
1497 CMD_TIME_CLASS_A, status);
1498
1499 pci_unmap_single(dev->pdev, outdma,
1500 MTHCA_CQ_CONTEXT_SIZE,
1501 PCI_DMA_FROMDEVICE);
1502 return err;
1503} 1469}
1504 1470
1505int mthca_MODIFY_QP(struct mthca_dev *dev, int trans, u32 num, 1471int mthca_MODIFY_QP(struct mthca_dev *dev, int trans, u32 num,
1506 int is_ee, void *qp_context, u32 optmask, 1472 int is_ee, struct mthca_mailbox *mailbox, u32 optmask,
1507 u8 *status) 1473 u8 *status)
1508{ 1474{
1509 static const u16 op[] = { 1475 static const u16 op[] = {
@@ -1520,36 +1486,34 @@ int mthca_MODIFY_QP(struct mthca_dev *dev, int trans, u32 num,
1520 [MTHCA_TRANS_ANY2RST] = CMD_ERR2RST_QPEE 1486 [MTHCA_TRANS_ANY2RST] = CMD_ERR2RST_QPEE
1521 }; 1487 };
1522 u8 op_mod = 0; 1488 u8 op_mod = 0;
1523 1489 int my_mailbox = 0;
1524 dma_addr_t indma;
1525 int err; 1490 int err;
1526 1491
1527 if (trans < 0 || trans >= ARRAY_SIZE(op)) 1492 if (trans < 0 || trans >= ARRAY_SIZE(op))
1528 return -EINVAL; 1493 return -EINVAL;
1529 1494
1530 if (trans == MTHCA_TRANS_ANY2RST) { 1495 if (trans == MTHCA_TRANS_ANY2RST) {
1531 indma = 0;
1532 op_mod = 3; /* don't write outbox, any->reset */ 1496 op_mod = 3; /* don't write outbox, any->reset */
1533 1497
1534 /* For debugging */ 1498 /* For debugging */
1535 qp_context = pci_alloc_consistent(dev->pdev, MTHCA_QP_CONTEXT_SIZE, 1499 if (!mailbox) {
1536 &indma); 1500 mailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1537 op_mod = 2; /* write outbox, any->reset */ 1501 if (!IS_ERR(mailbox)) {
1502 my_mailbox = 1;
1503 op_mod = 2; /* write outbox, any->reset */
1504 } else
1505 mailbox = NULL;
1506 }
1538 } else { 1507 } else {
1539 indma = pci_map_single(dev->pdev, qp_context,
1540 MTHCA_QP_CONTEXT_SIZE,
1541 PCI_DMA_TODEVICE);
1542 if (pci_dma_mapping_error(indma))
1543 return -ENOMEM;
1544
1545 if (0) { 1508 if (0) {
1546 int i; 1509 int i;
1547 mthca_dbg(dev, "Dumping QP context:\n"); 1510 mthca_dbg(dev, "Dumping QP context:\n");
1548 printk(" opt param mask: %08x\n", be32_to_cpup(qp_context)); 1511 printk(" opt param mask: %08x\n", be32_to_cpup(mailbox->buf));
1549 for (i = 0; i < 0x100 / 4; ++i) { 1512 for (i = 0; i < 0x100 / 4; ++i) {
1550 if (i % 8 == 0) 1513 if (i % 8 == 0)
1551 printk(" [%02x] ", i * 4); 1514 printk(" [%02x] ", i * 4);
1552 printk(" %08x", be32_to_cpu(((u32 *) qp_context)[i + 2])); 1515 printk(" %08x",
1516 be32_to_cpu(((u32 *) mailbox->buf)[i + 2]));
1553 if ((i + 1) % 8 == 0) 1517 if ((i + 1) % 8 == 0)
1554 printk("\n"); 1518 printk("\n");
1555 } 1519 }
@@ -1557,55 +1521,39 @@ int mthca_MODIFY_QP(struct mthca_dev *dev, int trans, u32 num,
1557 } 1521 }
1558 1522
1559 if (trans == MTHCA_TRANS_ANY2RST) { 1523 if (trans == MTHCA_TRANS_ANY2RST) {
1560 err = mthca_cmd_box(dev, 0, indma, (!!is_ee << 24) | num, 1524 err = mthca_cmd_box(dev, 0, mailbox ? mailbox->dma : 0,
1561 op_mod, op[trans], CMD_TIME_CLASS_C, status); 1525 (!!is_ee << 24) | num, op_mod,
1526 op[trans], CMD_TIME_CLASS_C, status);
1562 1527
1563 if (0) { 1528 if (0 && mailbox) {
1564 int i; 1529 int i;
1565 mthca_dbg(dev, "Dumping QP context:\n"); 1530 mthca_dbg(dev, "Dumping QP context:\n");
1566 printk(" %08x\n", be32_to_cpup(qp_context)); 1531 printk(" %08x\n", be32_to_cpup(mailbox->buf));
1567 for (i = 0; i < 0x100 / 4; ++i) { 1532 for (i = 0; i < 0x100 / 4; ++i) {
1568 if (i % 8 == 0) 1533 if (i % 8 == 0)
1569 printk("[%02x] ", i * 4); 1534 printk("[%02x] ", i * 4);
1570 printk(" %08x", be32_to_cpu(((u32 *) qp_context)[i + 2])); 1535 printk(" %08x",
1536 be32_to_cpu(((u32 *) mailbox->buf)[i + 2]));
1571 if ((i + 1) % 8 == 0) 1537 if ((i + 1) % 8 == 0)
1572 printk("\n"); 1538 printk("\n");
1573 } 1539 }
1574 } 1540 }
1575 1541
1576 } else 1542 } else
1577 err = mthca_cmd(dev, indma, (!!is_ee << 24) | num, 1543 err = mthca_cmd(dev, mailbox->dma, (!!is_ee << 24) | num,
1578 op_mod, op[trans], CMD_TIME_CLASS_C, status); 1544 op_mod, op[trans], CMD_TIME_CLASS_C, status);
1579 1545
1580 if (trans != MTHCA_TRANS_ANY2RST) 1546 if (my_mailbox)
1581 pci_unmap_single(dev->pdev, indma, 1547 mthca_free_mailbox(dev, mailbox);
1582 MTHCA_QP_CONTEXT_SIZE, PCI_DMA_TODEVICE); 1548
1583 else
1584 pci_free_consistent(dev->pdev, MTHCA_QP_CONTEXT_SIZE,
1585 qp_context, indma);
1586 return err; 1549 return err;
1587} 1550}
1588 1551
1589int mthca_QUERY_QP(struct mthca_dev *dev, u32 num, int is_ee, 1552int mthca_QUERY_QP(struct mthca_dev *dev, u32 num, int is_ee,
1590 void *qp_context, u8 *status) 1553 struct mthca_mailbox *mailbox, u8 *status)
1591{ 1554{
1592 dma_addr_t outdma = 0; 1555 return mthca_cmd_box(dev, 0, mailbox->dma, (!!is_ee << 24) | num, 0,
1593 int err; 1556 CMD_QUERY_QPEE, CMD_TIME_CLASS_A, status);
1594
1595 outdma = pci_map_single(dev->pdev, qp_context,
1596 MTHCA_QP_CONTEXT_SIZE,
1597 PCI_DMA_FROMDEVICE);
1598 if (pci_dma_mapping_error(outdma))
1599 return -ENOMEM;
1600
1601 err = mthca_cmd_box(dev, 0, outdma, (!!is_ee << 24) | num, 0,
1602 CMD_QUERY_QPEE,
1603 CMD_TIME_CLASS_A, status);
1604
1605 pci_unmap_single(dev->pdev, outdma,
1606 MTHCA_QP_CONTEXT_SIZE,
1607 PCI_DMA_FROMDEVICE);
1608 return err;
1609} 1557}
1610 1558
1611int mthca_CONF_SPECIAL_QP(struct mthca_dev *dev, int type, u32 qpn, 1559int mthca_CONF_SPECIAL_QP(struct mthca_dev *dev, int type, u32 qpn,
@@ -1635,11 +1583,11 @@ int mthca_CONF_SPECIAL_QP(struct mthca_dev *dev, int type, u32 qpn,
1635} 1583}
1636 1584
1637int mthca_MAD_IFC(struct mthca_dev *dev, int ignore_mkey, int ignore_bkey, 1585int mthca_MAD_IFC(struct mthca_dev *dev, int ignore_mkey, int ignore_bkey,
1638 int port, struct ib_wc* in_wc, struct ib_grh* in_grh, 1586 int port, struct ib_wc *in_wc, struct ib_grh *in_grh,
1639 void *in_mad, void *response_mad, u8 *status) 1587 void *in_mad, void *response_mad, u8 *status)
1640{ 1588{
1641 void *box; 1589 struct mthca_mailbox *inmailbox, *outmailbox;
1642 dma_addr_t dma; 1590 void *inbox;
1643 int err; 1591 int err;
1644 u32 in_modifier = port; 1592 u32 in_modifier = port;
1645 u8 op_modifier = 0; 1593 u8 op_modifier = 0;
@@ -1653,11 +1601,18 @@ int mthca_MAD_IFC(struct mthca_dev *dev, int ignore_mkey, int ignore_bkey,
1653#define MAD_IFC_PKEY_OFFSET 0x10e 1601#define MAD_IFC_PKEY_OFFSET 0x10e
1654#define MAD_IFC_GRH_OFFSET 0x140 1602#define MAD_IFC_GRH_OFFSET 0x140
1655 1603
1656 box = pci_alloc_consistent(dev->pdev, MAD_IFC_BOX_SIZE, &dma); 1604 inmailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1657 if (!box) 1605 if (IS_ERR(inmailbox))
1658 return -ENOMEM; 1606 return PTR_ERR(inmailbox);
1607 inbox = inmailbox->buf;
1659 1608
1660 memcpy(box, in_mad, 256); 1609 outmailbox = mthca_alloc_mailbox(dev, GFP_KERNEL);
1610 if (IS_ERR(outmailbox)) {
1611 mthca_free_mailbox(dev, inmailbox);
1612 return PTR_ERR(outmailbox);
1613 }
1614
1615 memcpy(inbox, in_mad, 256);
1661 1616
1662 /* 1617 /*
1663 * Key check traps can't be generated unless we have in_wc to 1618 * Key check traps can't be generated unless we have in_wc to
@@ -1671,97 +1626,65 @@ int mthca_MAD_IFC(struct mthca_dev *dev, int ignore_mkey, int ignore_bkey,
1671 if (in_wc) { 1626 if (in_wc) {
1672 u8 val; 1627 u8 val;
1673 1628
1674 memset(box + 256, 0, 256); 1629 memset(inbox + 256, 0, 256);
1675 1630
1676 MTHCA_PUT(box, in_wc->qp_num, MAD_IFC_MY_QPN_OFFSET); 1631 MTHCA_PUT(inbox, in_wc->qp_num, MAD_IFC_MY_QPN_OFFSET);
1677 MTHCA_PUT(box, in_wc->src_qp, MAD_IFC_RQPN_OFFSET); 1632 MTHCA_PUT(inbox, in_wc->src_qp, MAD_IFC_RQPN_OFFSET);
1678 1633
1679 val = in_wc->sl << 4; 1634 val = in_wc->sl << 4;
1680 MTHCA_PUT(box, val, MAD_IFC_SL_OFFSET); 1635 MTHCA_PUT(inbox, val, MAD_IFC_SL_OFFSET);
1681 1636
1682 val = in_wc->dlid_path_bits | 1637 val = in_wc->dlid_path_bits |
1683 (in_wc->wc_flags & IB_WC_GRH ? 0x80 : 0); 1638 (in_wc->wc_flags & IB_WC_GRH ? 0x80 : 0);
1684 MTHCA_PUT(box, val, MAD_IFC_GRH_OFFSET); 1639 MTHCA_PUT(inbox, val, MAD_IFC_GRH_OFFSET);
1685 1640
1686 MTHCA_PUT(box, in_wc->slid, MAD_IFC_RLID_OFFSET); 1641 MTHCA_PUT(inbox, in_wc->slid, MAD_IFC_RLID_OFFSET);
1687 MTHCA_PUT(box, in_wc->pkey_index, MAD_IFC_PKEY_OFFSET); 1642 MTHCA_PUT(inbox, in_wc->pkey_index, MAD_IFC_PKEY_OFFSET);
1688 1643
1689 if (in_grh) 1644 if (in_grh)
1690 memcpy((u8 *) box + MAD_IFC_GRH_OFFSET, in_grh, 40); 1645 memcpy(inbox + MAD_IFC_GRH_OFFSET, in_grh, 40);
1691 1646
1692 op_modifier |= 0x10; 1647 op_modifier |= 0x10;
1693 1648
1694 in_modifier |= in_wc->slid << 16; 1649 in_modifier |= in_wc->slid << 16;
1695 } 1650 }
1696 1651
1697 err = mthca_cmd_box(dev, dma, dma + 512, in_modifier, op_modifier, 1652 err = mthca_cmd_box(dev, inmailbox->dma, outmailbox->dma,
1653 in_modifier, op_modifier,
1698 CMD_MAD_IFC, CMD_TIME_CLASS_C, status); 1654 CMD_MAD_IFC, CMD_TIME_CLASS_C, status);
1699 1655
1700 if (!err && !*status) 1656 if (!err && !*status)
1701 memcpy(response_mad, box + 512, 256); 1657 memcpy(response_mad, outmailbox->buf, 256);
1702 1658
1703 pci_free_consistent(dev->pdev, MAD_IFC_BOX_SIZE, box, dma); 1659 mthca_free_mailbox(dev, inmailbox);
1660 mthca_free_mailbox(dev, outmailbox);
1704 return err; 1661 return err;
1705} 1662}
1706 1663
1707int mthca_READ_MGM(struct mthca_dev *dev, int index, void *mgm, 1664int mthca_READ_MGM(struct mthca_dev *dev, int index,
1708 u8 *status) 1665 struct mthca_mailbox *mailbox, u8 *status)
1709{ 1666{
1710 dma_addr_t outdma = 0; 1667 return mthca_cmd_box(dev, 0, mailbox->dma, index, 0,
1711 int err; 1668 CMD_READ_MGM, CMD_TIME_CLASS_A, status);
1712
1713 outdma = pci_map_single(dev->pdev, mgm,
1714 MTHCA_MGM_ENTRY_SIZE,
1715 PCI_DMA_FROMDEVICE);
1716 if (pci_dma_mapping_error(outdma))
1717 return -ENOMEM;
1718
1719 err = mthca_cmd_box(dev, 0, outdma, index, 0,
1720 CMD_READ_MGM,
1721 CMD_TIME_CLASS_A, status);
1722
1723 pci_unmap_single(dev->pdev, outdma,
1724 MTHCA_MGM_ENTRY_SIZE,
1725 PCI_DMA_FROMDEVICE);
1726 return err;
1727} 1669}
1728 1670
1729int mthca_WRITE_MGM(struct mthca_dev *dev, int index, void *mgm, 1671int mthca_WRITE_MGM(struct mthca_dev *dev, int index,
1730 u8 *status) 1672 struct mthca_mailbox *mailbox, u8 *status)
1731{ 1673{
1732 dma_addr_t indma; 1674 return mthca_cmd(dev, mailbox->dma, index, 0, CMD_WRITE_MGM,
1733 int err; 1675 CMD_TIME_CLASS_A, status);
1734
1735 indma = pci_map_single(dev->pdev, mgm,
1736 MTHCA_MGM_ENTRY_SIZE,
1737 PCI_DMA_TODEVICE);
1738 if (pci_dma_mapping_error(indma))
1739 return -ENOMEM;
1740
1741 err = mthca_cmd(dev, indma, index, 0, CMD_WRITE_MGM,
1742 CMD_TIME_CLASS_A, status);
1743
1744 pci_unmap_single(dev->pdev, indma,
1745 MTHCA_MGM_ENTRY_SIZE, PCI_DMA_TODEVICE);
1746 return err;
1747} 1676}
1748 1677
1749int mthca_MGID_HASH(struct mthca_dev *dev, void *gid, u16 *hash, 1678int mthca_MGID_HASH(struct mthca_dev *dev, struct mthca_mailbox *mailbox,
1750 u8 *status) 1679 u16 *hash, u8 *status)
1751{ 1680{
1752 dma_addr_t indma;
1753 u64 imm; 1681 u64 imm;
1754 int err; 1682 int err;
1755 1683
1756 indma = pci_map_single(dev->pdev, gid, 16, PCI_DMA_TODEVICE); 1684 err = mthca_cmd_imm(dev, mailbox->dma, &imm, 0, 0, CMD_MGID_HASH,
1757 if (pci_dma_mapping_error(indma))
1758 return -ENOMEM;
1759
1760 err = mthca_cmd_imm(dev, indma, &imm, 0, 0, CMD_MGID_HASH,
1761 CMD_TIME_CLASS_A, status); 1685 CMD_TIME_CLASS_A, status);
1762 *hash = imm;
1763 1686
1764 pci_unmap_single(dev->pdev, indma, 16, PCI_DMA_TODEVICE); 1687 *hash = imm;
1765 return err; 1688 return err;
1766} 1689}
1767 1690