@@ -34,6 +34,7 @@ struct icp_qat_fw_loader_chip_info {
u32 wakeup_event_val;
bool fw_auth;
bool css_3k;
+ bool tgroup_share_ustore;
u32 fcu_ctl_csr;
u32 fcu_sts_csr;
u32 fcu_dram_addr_hi;
@@ -707,6 +707,7 @@ static int qat_hal_chip_init(struct icp_qat_fw_loader_handle *handle,
handle->chip_info->wakeup_event_val = WAKEUP_EVENT;
handle->chip_info->fw_auth = true;
handle->chip_info->css_3k = false;
+ handle->chip_info->tgroup_share_ustore = false;
handle->chip_info->fcu_ctl_csr = FCU_CONTROL;
handle->chip_info->fcu_sts_csr = FCU_STATUS;
handle->chip_info->fcu_dram_addr_hi = FCU_DRAM_ADDR_HI;
@@ -725,6 +726,7 @@ static int qat_hal_chip_init(struct icp_qat_fw_loader_handle *handle,
handle->chip_info->wakeup_event_val = WAKEUP_EVENT;
handle->chip_info->fw_auth = false;
handle->chip_info->css_3k = false;
+ handle->chip_info->tgroup_share_ustore = false;
handle->chip_info->fcu_ctl_csr = 0;
handle->chip_info->fcu_sts_csr = 0;
handle->chip_info->fcu_dram_addr_hi = 0;
@@ -1180,21 +1180,24 @@ static int qat_uclo_map_suof(struct icp_qat_fw_loader_handle *handle,
if (!suof_img_hdr)
return -ENOMEM;
suof_handle->img_table.simg_hdr = suof_img_hdr;
- }
- for (i = 0; i < suof_handle->img_table.num_simgs; i++) {
- qat_uclo_map_simg(handle, &suof_img_hdr[i],
- &suof_chunk_hdr[1 + i]);
- ret = qat_uclo_check_simg_compat(handle,
- &suof_img_hdr[i]);
- if (ret)
- return ret;
- suof_img_hdr[i].ae_mask &= handle->cfg_ae_mask;
- if ((suof_img_hdr[i].ae_mask & 0x1) != 0)
- ae0_img = i;
+ for (i = 0; i < suof_handle->img_table.num_simgs; i++) {
+ qat_uclo_map_simg(handle, &suof_img_hdr[i],
+ &suof_chunk_hdr[1 + i]);
+ ret = qat_uclo_check_simg_compat(handle,
+ &suof_img_hdr[i]);
+ if (ret)
+ return ret;
+ suof_img_hdr[i].ae_mask &= handle->cfg_ae_mask;
+ if ((suof_img_hdr[i].ae_mask & 0x1) != 0)
+ ae0_img = i;
+ }
+
+ if (!handle->chip_info->tgroup_share_ustore) {
+ qat_uclo_tail_img(suof_img_hdr, ae0_img,
+ suof_handle->img_table.num_simgs);
+ }
}
- qat_uclo_tail_img(suof_img_hdr, ae0_img,
- suof_handle->img_table.num_simgs);
return 0;
}