diff options
Diffstat (limited to 'drivers/gpu/nvgpu/gp106/acr_gp106.c')
-rw-r--r-- | drivers/gpu/nvgpu/gp106/acr_gp106.c | 133 |
1 files changed, 126 insertions, 7 deletions
diff --git a/drivers/gpu/nvgpu/gp106/acr_gp106.c b/drivers/gpu/nvgpu/gp106/acr_gp106.c index 7bb099e5..b1150e29 100644 --- a/drivers/gpu/nvgpu/gp106/acr_gp106.c +++ b/drivers/gpu/nvgpu/gp106/acr_gp106.c | |||
@@ -93,7 +93,7 @@ int gp106_alloc_blob_space(struct gk20a *g, | |||
93 | return 0; | 93 | return 0; |
94 | } | 94 | } |
95 | 95 | ||
96 | g->ops.pmu.get_wpr(g, &wpr_inf); | 96 | g->acr.get_wpr_info(g, &wpr_inf); |
97 | 97 | ||
98 | /* | 98 | /* |
99 | * Even though this mem_desc wouldn't be used, the wpr region needs to | 99 | * Even though this mem_desc wouldn't be used, the wpr region needs to |
@@ -456,7 +456,7 @@ int gp106_prepare_ucode_blob(struct gk20a *g) | |||
456 | memset((void *)plsfm, 0, sizeof(struct ls_flcn_mgr_v1)); | 456 | memset((void *)plsfm, 0, sizeof(struct ls_flcn_mgr_v1)); |
457 | gr_gk20a_init_ctxsw_ucode(g); | 457 | gr_gk20a_init_ctxsw_ucode(g); |
458 | 458 | ||
459 | g->ops.pmu.get_wpr(g, &wpr_inf); | 459 | g->acr.get_wpr_info(g, &wpr_inf); |
460 | gp106_dbg_pmu(g, "wpr carveout base:%llx\n", (wpr_inf.wpr_base)); | 460 | gp106_dbg_pmu(g, "wpr carveout base:%llx\n", (wpr_inf.wpr_base)); |
461 | gp106_dbg_pmu(g, "wpr carveout size :%x\n", (u32)wpr_inf.size); | 461 | gp106_dbg_pmu(g, "wpr carveout size :%x\n", (u32)wpr_inf.size); |
462 | 462 | ||
@@ -479,7 +479,7 @@ int gp106_prepare_ucode_blob(struct gk20a *g) | |||
479 | } | 479 | } |
480 | 480 | ||
481 | /*Alloc memory to hold ucode blob contents*/ | 481 | /*Alloc memory to hold ucode blob contents*/ |
482 | err = g->ops.pmu.alloc_blob_space(g, plsfm->wpr_size | 482 | err = g->acr.alloc_blob_space(g, plsfm->wpr_size |
483 | ,&g->acr.ucode_blob); | 483 | ,&g->acr.ucode_blob); |
484 | if (err) { | 484 | if (err) { |
485 | goto exit_err; | 485 | goto exit_err; |
@@ -557,7 +557,7 @@ int lsfm_discover_ucode_images(struct gk20a *g, | |||
557 | 557 | ||
558 | /*0th index is always PMU which is already handled in earlier | 558 | /*0th index is always PMU which is already handled in earlier |
559 | if condition*/ | 559 | if condition*/ |
560 | for (i = 1; i < (MAX_SUPPORTED_LSFM); i++) { | 560 | for (i = 1; i < g->acr.max_supported_lsfm; i++) { |
561 | memset(&ucode_img, 0, sizeof(ucode_img)); | 561 | memset(&ucode_img, 0, sizeof(ucode_img)); |
562 | if (pmu_acr_supp_ucode_list[i](g, &ucode_img) == 0) { | 562 | if (pmu_acr_supp_ucode_list[i](g, &ucode_img) == 0) { |
563 | if (ucode_img.lsf_desc != NULL) { | 563 | if (ucode_img.lsf_desc != NULL) { |
@@ -626,7 +626,7 @@ int gp106_pmu_populate_loader_cfg(struct gk20a *g, | |||
626 | * physical addresses of each respective segment. | 626 | * physical addresses of each respective segment. |
627 | */ | 627 | */ |
628 | addr_base = p_lsfm->lsb_header.ucode_off; | 628 | addr_base = p_lsfm->lsb_header.ucode_off; |
629 | g->ops.pmu.get_wpr(g, &wpr_inf); | 629 | g->acr.get_wpr_info(g, &wpr_inf); |
630 | addr_base += (wpr_inf.wpr_base); | 630 | addr_base += (wpr_inf.wpr_base); |
631 | 631 | ||
632 | gp106_dbg_pmu(g, "pmu loader cfg addrbase 0x%llx\n", addr_base); | 632 | gp106_dbg_pmu(g, "pmu loader cfg addrbase 0x%llx\n", addr_base); |
@@ -701,7 +701,7 @@ int gp106_flcn_populate_bl_dmem_desc(struct gk20a *g, | |||
701 | * physical addresses of each respective segment. | 701 | * physical addresses of each respective segment. |
702 | */ | 702 | */ |
703 | addr_base = p_lsfm->lsb_header.ucode_off; | 703 | addr_base = p_lsfm->lsb_header.ucode_off; |
704 | g->ops.pmu.get_wpr(g, &wpr_inf); | 704 | g->acr.get_wpr_info(g, &wpr_inf); |
705 | addr_base += wpr_inf.wpr_base; | 705 | addr_base += wpr_inf.wpr_base; |
706 | 706 | ||
707 | gp106_dbg_pmu(g, "falcon ID %x", p_lsfm->wpr_header.falcon_id); | 707 | gp106_dbg_pmu(g, "falcon ID %x", p_lsfm->wpr_header.falcon_id); |
@@ -1017,7 +1017,7 @@ int lsfm_add_ucode_img(struct gk20a *g, struct ls_flcn_mgr_v1 *plsfm, | |||
1017 | 1017 | ||
1018 | /* Fill in static WPR header info*/ | 1018 | /* Fill in static WPR header info*/ |
1019 | pnode->wpr_header.falcon_id = falcon_id; | 1019 | pnode->wpr_header.falcon_id = falcon_id; |
1020 | pnode->wpr_header.bootstrap_owner = g->bootstrap_owner; | 1020 | pnode->wpr_header.bootstrap_owner = g->acr.bootstrap_owner; |
1021 | pnode->wpr_header.status = LSF_IMAGE_STATUS_COPY; | 1021 | pnode->wpr_header.status = LSF_IMAGE_STATUS_COPY; |
1022 | 1022 | ||
1023 | pnode->wpr_header.lazy_bootstrap = | 1023 | pnode->wpr_header.lazy_bootstrap = |
@@ -1030,6 +1030,7 @@ int lsfm_add_ucode_img(struct gk20a *g, struct ls_flcn_mgr_v1 *plsfm, | |||
1030 | pnode->wpr_header.bin_version = pnode->lsb_header.signature.version; | 1030 | pnode->wpr_header.bin_version = pnode->lsb_header.signature.version; |
1031 | pnode->next = plsfm->ucode_img_list; | 1031 | pnode->next = plsfm->ucode_img_list; |
1032 | plsfm->ucode_img_list = pnode; | 1032 | plsfm->ucode_img_list = pnode; |
1033 | |||
1033 | return 0; | 1034 | return 0; |
1034 | } | 1035 | } |
1035 | 1036 | ||
@@ -1191,3 +1192,121 @@ int lsf_gen_wpr_requirements(struct gk20a *g, | |||
1191 | plsfm->wpr_size = wpr_offset; | 1192 | plsfm->wpr_size = wpr_offset; |
1192 | return 0; | 1193 | return 0; |
1193 | } | 1194 | } |
1195 | |||
1196 | int gp106_acr_patch_wpr_info_to_ucode(struct gk20a *g, struct nvgpu_acr *acr, | ||
1197 | struct hs_acr *acr_desc, bool is_recovery) | ||
1198 | { | ||
1199 | struct nvgpu_firmware *acr_fw = acr_desc->acr_fw; | ||
1200 | struct acr_fw_header *acr_fw_hdr = NULL; | ||
1201 | struct bin_hdr *acr_fw_bin_hdr = NULL; | ||
1202 | struct flcn_acr_desc_v1 *acr_dmem_desc; | ||
1203 | struct wpr_carveout_info wpr_inf; | ||
1204 | u32 *acr_ucode_header = NULL; | ||
1205 | u32 *acr_ucode_data = NULL; | ||
1206 | |||
1207 | nvgpu_log_fn(g, " "); | ||
1208 | |||
1209 | acr_fw_bin_hdr = (struct bin_hdr *)acr_fw->data; | ||
1210 | acr_fw_hdr = (struct acr_fw_header *) | ||
1211 | (acr_fw->data + acr_fw_bin_hdr->header_offset); | ||
1212 | |||
1213 | acr_ucode_data = (u32 *)(acr_fw->data + acr_fw_bin_hdr->data_offset); | ||
1214 | acr_ucode_header = (u32 *)(acr_fw->data + acr_fw_hdr->hdr_offset); | ||
1215 | |||
1216 | acr->get_wpr_info(g, &wpr_inf); | ||
1217 | |||
1218 | acr_dmem_desc = (struct flcn_acr_desc_v1 *) | ||
1219 | &(((u8 *)acr_ucode_data)[acr_ucode_header[2U]]); | ||
1220 | |||
1221 | acr_dmem_desc->nonwpr_ucode_blob_start = wpr_inf.nonwpr_base; | ||
1222 | acr_dmem_desc->nonwpr_ucode_blob_size = wpr_inf.size; | ||
1223 | acr_dmem_desc->regions.no_regions = 1U; | ||
1224 | acr_dmem_desc->wpr_offset = 0U; | ||
1225 | |||
1226 | acr_dmem_desc->wpr_region_id = 1U; | ||
1227 | acr_dmem_desc->regions.region_props[0U].region_id = 1U; | ||
1228 | acr_dmem_desc->regions.region_props[0U].start_addr = | ||
1229 | (wpr_inf.wpr_base) >> 8U; | ||
1230 | acr_dmem_desc->regions.region_props[0U].end_addr = | ||
1231 | ((wpr_inf.wpr_base) + wpr_inf.size) >> 8U; | ||
1232 | acr_dmem_desc->regions.region_props[0U].shadowmMem_startaddress = | ||
1233 | wpr_inf.nonwpr_base >> 8U; | ||
1234 | |||
1235 | return 0; | ||
1236 | } | ||
1237 | |||
1238 | int gp106_acr_fill_bl_dmem_desc(struct gk20a *g, | ||
1239 | struct nvgpu_acr *acr, struct hs_acr *acr_desc, | ||
1240 | u32 *acr_ucode_header) | ||
1241 | { | ||
1242 | struct nvgpu_mem *acr_ucode_mem = &acr_desc->acr_ucode; | ||
1243 | struct flcn_bl_dmem_desc_v1 *bl_dmem_desc = | ||
1244 | &acr_desc->bl_dmem_desc_v1; | ||
1245 | |||
1246 | nvgpu_log_fn(g, " "); | ||
1247 | |||
1248 | memset(bl_dmem_desc, 0U, sizeof(struct flcn_bl_dmem_desc_v1)); | ||
1249 | |||
1250 | bl_dmem_desc->signature[0] = 0U; | ||
1251 | bl_dmem_desc->signature[1] = 0U; | ||
1252 | bl_dmem_desc->signature[2] = 0U; | ||
1253 | bl_dmem_desc->signature[3] = 0U; | ||
1254 | bl_dmem_desc->ctx_dma = GK20A_PMU_DMAIDX_VIRT; | ||
1255 | |||
1256 | flcn64_set_dma(&bl_dmem_desc->code_dma_base, | ||
1257 | acr_ucode_mem->gpu_va); | ||
1258 | |||
1259 | bl_dmem_desc->non_sec_code_off = acr_ucode_header[0U]; | ||
1260 | bl_dmem_desc->non_sec_code_size = acr_ucode_header[1U]; | ||
1261 | bl_dmem_desc->sec_code_off = acr_ucode_header[5U]; | ||
1262 | bl_dmem_desc->sec_code_size = acr_ucode_header[6U]; | ||
1263 | bl_dmem_desc->code_entry_point = 0U; | ||
1264 | |||
1265 | flcn64_set_dma(&bl_dmem_desc->data_dma_base, | ||
1266 | acr_ucode_mem->gpu_va + acr_ucode_header[2U]); | ||
1267 | |||
1268 | bl_dmem_desc->data_size = acr_ucode_header[3U]; | ||
1269 | |||
1270 | return 0; | ||
1271 | } | ||
1272 | |||
1273 | static void nvgpu_gp106_acr_default_sw_init(struct gk20a *g, struct hs_acr *hs_acr) | ||
1274 | { | ||
1275 | struct hs_flcn_bl *hs_bl = &hs_acr->acr_hs_bl; | ||
1276 | |||
1277 | nvgpu_log_fn(g, " "); | ||
1278 | |||
1279 | hs_bl->bl_fw_name = HSBIN_ACR_BL_UCODE_IMAGE; | ||
1280 | |||
1281 | hs_acr->acr_type = ACR_DEFAULT; | ||
1282 | hs_acr->acr_fw_name = HSBIN_ACR_UCODE_IMAGE; | ||
1283 | |||
1284 | hs_acr->ptr_bl_dmem_desc = &hs_acr->bl_dmem_desc_v1; | ||
1285 | hs_acr->bl_dmem_desc_size = sizeof(struct flcn_bl_dmem_desc_v1); | ||
1286 | |||
1287 | hs_acr->acr_flcn = &g->sec2_flcn; | ||
1288 | hs_acr->acr_flcn_setup_hw_and_bl_bootstrap = | ||
1289 | gp106_sec2_setup_hw_and_bl_bootstrap; | ||
1290 | } | ||
1291 | |||
1292 | void nvgpu_gp106_acr_sw_init(struct gk20a *g, struct nvgpu_acr *acr) | ||
1293 | { | ||
1294 | nvgpu_log_fn(g, " "); | ||
1295 | |||
1296 | acr->g = g; | ||
1297 | |||
1298 | acr->bootstrap_owner = LSF_FALCON_ID_SEC2; | ||
1299 | acr->max_supported_lsfm = MAX_SUPPORTED_LSFM; | ||
1300 | |||
1301 | nvgpu_gp106_acr_default_sw_init(g, &acr->acr); | ||
1302 | |||
1303 | acr->get_wpr_info = gp106_wpr_info; | ||
1304 | acr->alloc_blob_space = gp106_alloc_blob_space; | ||
1305 | acr->bootstrap_hs_acr = gm20b_bootstrap_hs_acr; | ||
1306 | acr->patch_wpr_info_to_ucode = | ||
1307 | gp106_acr_patch_wpr_info_to_ucode; | ||
1308 | acr->acr_fill_bl_dmem_desc = | ||
1309 | gp106_acr_fill_bl_dmem_desc; | ||
1310 | |||
1311 | acr->remove_support = gm20b_remove_acr_support; | ||
1312 | } | ||