aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c')
-rw-r--r--drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c92
1 files changed, 30 insertions, 62 deletions
diff --git a/drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c b/drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c
index 5ef216f3a60b..6f45a0303ddd 100644
--- a/drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c
+++ b/drivers/net/wireless/intel/iwlwifi/pcie/ctxt-info.c
@@ -6,6 +6,7 @@
6 * GPL LICENSE SUMMARY 6 * GPL LICENSE SUMMARY
7 * 7 *
8 * Copyright(c) 2017 Intel Deutschland GmbH 8 * Copyright(c) 2017 Intel Deutschland GmbH
9 * Copyright(c) 2018 Intel Corporation
9 * 10 *
10 * This program is free software; you can redistribute it and/or modify 11 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of version 2 of the GNU General Public License as 12 * it under the terms of version 2 of the GNU General Public License as
@@ -19,6 +20,7 @@
19 * BSD LICENSE 20 * BSD LICENSE
20 * 21 *
21 * Copyright(c) 2017 Intel Deutschland GmbH 22 * Copyright(c) 2017 Intel Deutschland GmbH
23 * Copyright(c) 2018 Intel Corporation
22 * All rights reserved. 24 * All rights reserved.
23 * 25 *
24 * Redistribution and use in source and binary forms, with or without 26 * Redistribution and use in source and binary forms, with or without
@@ -55,57 +57,6 @@
55#include "internal.h" 57#include "internal.h"
56#include "iwl-prph.h" 58#include "iwl-prph.h"
57 59
58static int iwl_pcie_get_num_sections(const struct fw_img *fw,
59 int start)
60{
61 int i = 0;
62
63 while (start < fw->num_sec &&
64 fw->sec[start].offset != CPU1_CPU2_SEPARATOR_SECTION &&
65 fw->sec[start].offset != PAGING_SEPARATOR_SECTION) {
66 start++;
67 i++;
68 }
69
70 return i;
71}
72
73static int iwl_pcie_ctxt_info_alloc_dma(struct iwl_trans *trans,
74 const struct fw_desc *sec,
75 struct iwl_dram_data *dram)
76{
77 dram->block = dma_alloc_coherent(trans->dev, sec->len,
78 &dram->physical,
79 GFP_KERNEL);
80 if (!dram->block)
81 return -ENOMEM;
82
83 dram->size = sec->len;
84 memcpy(dram->block, sec->data, sec->len);
85
86 return 0;
87}
88
89static void iwl_pcie_ctxt_info_free_fw_img(struct iwl_trans *trans)
90{
91 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans);
92 struct iwl_self_init_dram *dram = &trans_pcie->init_dram;
93 int i;
94
95 if (!dram->fw) {
96 WARN_ON(dram->fw_cnt);
97 return;
98 }
99
100 for (i = 0; i < dram->fw_cnt; i++)
101 dma_free_coherent(trans->dev, dram->fw[i].size,
102 dram->fw[i].block, dram->fw[i].physical);
103
104 kfree(dram->fw);
105 dram->fw_cnt = 0;
106 dram->fw = NULL;
107}
108
109void iwl_pcie_ctxt_info_free_paging(struct iwl_trans *trans) 60void iwl_pcie_ctxt_info_free_paging(struct iwl_trans *trans)
110{ 61{
111 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); 62 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans);
@@ -128,13 +79,12 @@ void iwl_pcie_ctxt_info_free_paging(struct iwl_trans *trans)
128 dram->paging = NULL; 79 dram->paging = NULL;
129} 80}
130 81
131static int iwl_pcie_ctxt_info_init_fw_sec(struct iwl_trans *trans, 82int iwl_pcie_init_fw_sec(struct iwl_trans *trans,
132 const struct fw_img *fw, 83 const struct fw_img *fw,
133 struct iwl_context_info *ctxt_info) 84 struct iwl_context_info_dram *ctxt_dram)
134{ 85{
135 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); 86 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans);
136 struct iwl_self_init_dram *dram = &trans_pcie->init_dram; 87 struct iwl_self_init_dram *dram = &trans_pcie->init_dram;
137 struct iwl_context_info_dram *ctxt_dram = &ctxt_info->dram;
138 int i, ret, lmac_cnt, umac_cnt, paging_cnt; 88 int i, ret, lmac_cnt, umac_cnt, paging_cnt;
139 89
140 if (WARN(dram->paging, 90 if (WARN(dram->paging,
@@ -212,7 +162,7 @@ int iwl_pcie_ctxt_info_init(struct iwl_trans *trans,
212 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); 162 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans);
213 struct iwl_context_info *ctxt_info; 163 struct iwl_context_info *ctxt_info;
214 struct iwl_context_info_rbd_cfg *rx_cfg; 164 struct iwl_context_info_rbd_cfg *rx_cfg;
215 u32 control_flags = 0; 165 u32 control_flags = 0, rb_size;
216 int ret; 166 int ret;
217 167
218 ctxt_info = dma_alloc_coherent(trans->dev, sizeof(*ctxt_info), 168 ctxt_info = dma_alloc_coherent(trans->dev, sizeof(*ctxt_info),
@@ -227,11 +177,29 @@ int iwl_pcie_ctxt_info_init(struct iwl_trans *trans,
227 /* size is in DWs */ 177 /* size is in DWs */
228 ctxt_info->version.size = cpu_to_le16(sizeof(*ctxt_info) / 4); 178 ctxt_info->version.size = cpu_to_le16(sizeof(*ctxt_info) / 4);
229 179
180 switch (trans_pcie->rx_buf_size) {
181 case IWL_AMSDU_2K:
182 rb_size = IWL_CTXT_INFO_RB_SIZE_2K;
183 break;
184 case IWL_AMSDU_4K:
185 rb_size = IWL_CTXT_INFO_RB_SIZE_4K;
186 break;
187 case IWL_AMSDU_8K:
188 rb_size = IWL_CTXT_INFO_RB_SIZE_8K;
189 break;
190 case IWL_AMSDU_12K:
191 rb_size = IWL_CTXT_INFO_RB_SIZE_12K;
192 break;
193 default:
194 WARN_ON(1);
195 rb_size = IWL_CTXT_INFO_RB_SIZE_4K;
196 }
197
230 BUILD_BUG_ON(RX_QUEUE_CB_SIZE(MQ_RX_TABLE_SIZE) > 0xF); 198 BUILD_BUG_ON(RX_QUEUE_CB_SIZE(MQ_RX_TABLE_SIZE) > 0xF);
231 control_flags = IWL_CTXT_INFO_RB_SIZE_4K | 199 control_flags = IWL_CTXT_INFO_TFD_FORMAT_LONG |
232 IWL_CTXT_INFO_TFD_FORMAT_LONG | 200 (RX_QUEUE_CB_SIZE(MQ_RX_TABLE_SIZE) <<
233 RX_QUEUE_CB_SIZE(MQ_RX_TABLE_SIZE) << 201 IWL_CTXT_INFO_RB_CB_SIZE_POS) |
234 IWL_CTXT_INFO_RB_CB_SIZE_POS; 202 (rb_size << IWL_CTXT_INFO_RB_SIZE_POS);
235 ctxt_info->control.control_flags = cpu_to_le32(control_flags); 203 ctxt_info->control.control_flags = cpu_to_le32(control_flags);
236 204
237 /* initialize RX default queue */ 205 /* initialize RX default queue */
@@ -244,10 +212,10 @@ int iwl_pcie_ctxt_info_init(struct iwl_trans *trans,
244 ctxt_info->hcmd_cfg.cmd_queue_addr = 212 ctxt_info->hcmd_cfg.cmd_queue_addr =
245 cpu_to_le64(trans_pcie->txq[trans_pcie->cmd_queue]->dma_addr); 213 cpu_to_le64(trans_pcie->txq[trans_pcie->cmd_queue]->dma_addr);
246 ctxt_info->hcmd_cfg.cmd_queue_size = 214 ctxt_info->hcmd_cfg.cmd_queue_size =
247 TFD_QUEUE_CB_SIZE(trans_pcie->tx_cmd_queue_size); 215 TFD_QUEUE_CB_SIZE(TFD_CMD_SLOTS);
248 216
249 /* allocate ucode sections in dram and set addresses */ 217 /* allocate ucode sections in dram and set addresses */
250 ret = iwl_pcie_ctxt_info_init_fw_sec(trans, fw, ctxt_info); 218 ret = iwl_pcie_init_fw_sec(trans, fw, &ctxt_info->dram);
251 if (ret) { 219 if (ret) {
252 dma_free_coherent(trans->dev, sizeof(*trans_pcie->ctxt_info), 220 dma_free_coherent(trans->dev, sizeof(*trans_pcie->ctxt_info),
253 ctxt_info, trans_pcie->ctxt_info_dma_addr); 221 ctxt_info, trans_pcie->ctxt_info_dma_addr);