From 1da37801a8b0fffb024fea594c7f1d7867ed8aa0 Mon Sep 17 00:00:00 2001 From: Bhawanpreet Lakha Date: Wed, 6 Nov 2019 14:38:55 -0500 Subject: drm/amd/display: Drop CONFIG_DRM_AMD_DC_DCN2_0 and DSC_SUPPORTED [Why] DCN2 and DSC are stable enough to be build by default. So drop the flags. [How] Remove them using the unifdef tool. The following commands were executed in sequence: $ find -name '*.c' -exec unifdef -m -DCONFIG_DRM_AMD_DC_DSC_SUPPORT -DCONFIG_DRM_AMD_DC_DCN2_0 -UCONFIG_TRIM_DRM_AMD_DC_DCN2_0 '{}' ';' $ find -name '*.h' -exec unifdef -m -DCONFIG_DRM_AMD_DC_DSC_SUPPORT -DCONFIG_DRM_AMD_DC_DCN2_0 -UCONFIG_TRIM_DRM_AMD_DC_DCN2_0 '{}' ';' In addition: * Remove from kconfig, and replace any dependencies with DCN1_0. * Remove from any makefiles. * Fix and cleanup NV defninitions in dal_asic_id.h * Expand DCN1 ifdef to include DCN2 code in the following files: * clk_mgr/clk_mgr.c: dc_clk_mgr_create() * core/dc_resources.c: dc_create_resource_pool() * dce/dce_dmcu.c: dcn20_*lock_phy() * dce/dce_dmcu.c: dcn20_funcs * dce/dce_dmcu.c: dcn20_dmcu_create() * gpio/hw_factory.c: dal_hw_factory_init() * gpio/hw_translate.c: dal_hw_translate_init() Signed-off-by: Leo Li Signed-off-by: Bhawanpreet Lakha Reviewed-by: Alex Deucher Signed-off-by: Alex Deucher --- drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h | 4 ---- 1 file changed, 4 deletions(-) (limited to 'drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h') diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h index 809b62b51a43..62b2d24cd1d3 100644 --- a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h +++ b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h @@ -38,9 +38,7 @@ enum cursor_pitch { }; enum cursor_lines_per_chunk { -#if defined(CONFIG_DRM_AMD_DC_DCN2_0) CURSOR_LINE_PER_CHUNK_1 = 0, /* new for DCN2 */ -#endif CURSOR_LINE_PER_CHUNK_2 = 1, CURSOR_LINE_PER_CHUNK_4, CURSOR_LINE_PER_CHUNK_8, @@ -139,7 +137,6 @@ struct hubp_funcs { unsigned int (*hubp_get_underflow_status)(struct hubp *hubp); void (*hubp_init)(struct hubp *hubp); -#if defined(CONFIG_DRM_AMD_DC_DCN2_0) void (*dmdata_set_attributes)( struct hubp *hubp, const struct dc_dmdata_attributes *attr); @@ -159,7 +156,6 @@ struct hubp_funcs { void (*hubp_set_flip_control_surface_gsl)( struct hubp *hubp, bool enable); -#endif }; -- cgit v1.2.3 From b9fe5151052f9d1123027e2de1e6372d884887de Mon Sep 17 00:00:00 2001 From: Jaehyun Chung Date: Thu, 31 Oct 2019 15:53:24 -0400 Subject: drm/amd/display: DML Validation Dump/Check with Logging [Why] Need validation that we are programming the expected values (rq, ttu, dlg) from DML. This debug feature will output logs if we are programming incorrect values and may help differentiate DAL issues from HW issues. [How] Dump relevant registers for each pipe with active stream. Compare current reg values with the converted DML output. Log mismatches when found. Signed-off-by: Jaehyun Chung Reviewed-by: Alvin Lee Acked-by: Rodrigo Siqueira Signed-off-by: Alex Deucher --- drivers/gpu/drm/amd/display/dc/core/dc.c | 18 +- drivers/gpu/drm/amd/display/dc/dc.h | 1 + drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c | 310 +++++++++++++++++++ drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c | 345 ++++++++++++++++++++++ drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h | 7 + 5 files changed, 680 insertions(+), 1 deletion(-) (limited to 'drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h') diff --git a/drivers/gpu/drm/amd/display/dc/core/dc.c b/drivers/gpu/drm/amd/display/dc/core/dc.c index 1395aff39980..61dd373b4715 100644 --- a/drivers/gpu/drm/amd/display/dc/core/dc.c +++ b/drivers/gpu/drm/amd/display/dc/core/dc.c @@ -2162,8 +2162,24 @@ static void commit_planes_for_stream(struct dc *dc, dc, pipe_ctx->stream, stream_status->plane_count, context); } } - if (dc->hwss.program_front_end_for_ctx && update_type != UPDATE_TYPE_FAST) + if (dc->hwss.program_front_end_for_ctx && update_type != UPDATE_TYPE_FAST) { dc->hwss.program_front_end_for_ctx(dc, context); +#ifdef CONFIG_DRM_AMD_DC_DCN1_0 + if (dc->debug.validate_dml_output) { + for (i = 0; i < dc->res_pool->pipe_count; i++) { + struct pipe_ctx cur_pipe = context->res_ctx.pipe_ctx[i]; + if (cur_pipe.stream == NULL) + continue; + + cur_pipe.plane_res.hubp->funcs->validate_dml_output( + cur_pipe.plane_res.hubp, dc->ctx, + &context->res_ctx.pipe_ctx[i].rq_regs, + &context->res_ctx.pipe_ctx[i].dlg_regs, + &context->res_ctx.pipe_ctx[i].ttu_regs); + } + } +#endif + } // Update Type FAST, Surface updates if (update_type == UPDATE_TYPE_FAST) { diff --git a/drivers/gpu/drm/amd/display/dc/dc.h b/drivers/gpu/drm/amd/display/dc/dc.h index 3cb361917b4b..f30c77e44bb4 100644 --- a/drivers/gpu/drm/amd/display/dc/dc.h +++ b/drivers/gpu/drm/amd/display/dc/dc.h @@ -412,6 +412,7 @@ struct dc_debug_options { bool nv12_iflip_vm_wa; bool disable_dram_clock_change_vactive_support; + bool validate_dml_output; }; struct dc_debug_data { diff --git a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c index 7d9ffb81584a..2823be75b071 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c @@ -30,6 +30,8 @@ #include "reg_helper.h" #include "basics/conversion.h" +#define DC_LOGGER_INIT(logger) + #define REG(reg)\ hubp2->hubp_regs->reg @@ -1244,6 +1246,313 @@ void hubp2_read_state(struct hubp *hubp) } +void hubp2_validate_dml_output(struct hubp *hubp, + struct dc_context *ctx, + struct _vcs_dpi_display_rq_regs_st *dml_rq_regs, + struct _vcs_dpi_display_dlg_regs_st *dml_dlg_attr, + struct _vcs_dpi_display_ttu_regs_st *dml_ttu_attr) +{ + struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); + struct _vcs_dpi_display_rq_regs_st rq_regs = {0}; + struct _vcs_dpi_display_dlg_regs_st dlg_attr = {0}; + struct _vcs_dpi_display_ttu_regs_st ttu_attr = {0}; + DC_LOGGER_INIT(ctx->logger); + + /* Requestor Regs */ + REG_GET(HUBPRET_CONTROL, + DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs.plane1_base_address); + REG_GET_4(DCN_EXPANSION_MODE, + DRQ_EXPANSION_MODE, &rq_regs.drq_expansion_mode, + PRQ_EXPANSION_MODE, &rq_regs.prq_expansion_mode, + MRQ_EXPANSION_MODE, &rq_regs.mrq_expansion_mode, + CRQ_EXPANSION_MODE, &rq_regs.crq_expansion_mode); + REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, + CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, + MIN_CHUNK_SIZE, &rq_regs.rq_regs_l.min_chunk_size, + META_CHUNK_SIZE, &rq_regs.rq_regs_l.meta_chunk_size, + MIN_META_CHUNK_SIZE, &rq_regs.rq_regs_l.min_meta_chunk_size, + DPTE_GROUP_SIZE, &rq_regs.rq_regs_l.dpte_group_size, + MPTE_GROUP_SIZE, &rq_regs.rq_regs_l.mpte_group_size, + SWATH_HEIGHT, &rq_regs.rq_regs_l.swath_height, + PTE_ROW_HEIGHT_LINEAR, &rq_regs.rq_regs_l.pte_row_height_linear); + REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C, + CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, + MIN_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_chunk_size, + META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.meta_chunk_size, + MIN_META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_meta_chunk_size, + DPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.dpte_group_size, + MPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.mpte_group_size, + SWATH_HEIGHT_C, &rq_regs.rq_regs_c.swath_height, + PTE_ROW_HEIGHT_LINEAR_C, &rq_regs.rq_regs_c.pte_row_height_linear); + + if (rq_regs.plane1_base_address != dml_rq_regs->plane1_base_address) + DC_LOG_DEBUG("DML Validation | HUBPRET_CONTROL:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", + dml_rq_regs->plane1_base_address, rq_regs.plane1_base_address); + if (rq_regs.drq_expansion_mode != dml_rq_regs->drq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", + dml_rq_regs->drq_expansion_mode, rq_regs.drq_expansion_mode); + if (rq_regs.prq_expansion_mode != dml_rq_regs->prq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:MRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", + dml_rq_regs->prq_expansion_mode, rq_regs.prq_expansion_mode); + if (rq_regs.mrq_expansion_mode != dml_rq_regs->mrq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", + dml_rq_regs->mrq_expansion_mode, rq_regs.mrq_expansion_mode); + if (rq_regs.crq_expansion_mode != dml_rq_regs->crq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:CRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", + dml_rq_regs->crq_expansion_mode, rq_regs.crq_expansion_mode); + + if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); + if (rq_regs.rq_regs_l.min_chunk_size != dml_rq_regs->rq_regs_l.min_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.min_chunk_size, rq_regs.rq_regs_l.min_chunk_size); + if (rq_regs.rq_regs_l.meta_chunk_size != dml_rq_regs->rq_regs_l.meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:META_CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.meta_chunk_size, rq_regs.rq_regs_l.meta_chunk_size); + if (rq_regs.rq_regs_l.min_meta_chunk_size != dml_rq_regs->rq_regs_l.min_meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_META_CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.min_meta_chunk_size, rq_regs.rq_regs_l.min_meta_chunk_size); + if (rq_regs.rq_regs_l.dpte_group_size != dml_rq_regs->rq_regs_l.dpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:DPTE_GROUP_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.dpte_group_size, rq_regs.rq_regs_l.dpte_group_size); + if (rq_regs.rq_regs_l.mpte_group_size != dml_rq_regs->rq_regs_l.mpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MPTE_GROUP_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.mpte_group_size, rq_regs.rq_regs_l.mpte_group_size); + if (rq_regs.rq_regs_l.swath_height != dml_rq_regs->rq_regs_l.swath_height) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:SWATH_HEIGHT - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.swath_height, rq_regs.rq_regs_l.swath_height); + if (rq_regs.rq_regs_l.pte_row_height_linear != dml_rq_regs->rq_regs_l.pte_row_height_linear) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:PTE_ROW_HEIGHT_LINEAR - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.pte_row_height_linear, rq_regs.rq_regs_l.pte_row_height_linear); + + if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); + if (rq_regs.rq_regs_c.min_chunk_size != dml_rq_regs->rq_regs_c.min_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.min_chunk_size, rq_regs.rq_regs_c.min_chunk_size); + if (rq_regs.rq_regs_c.meta_chunk_size != dml_rq_regs->rq_regs_c.meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.meta_chunk_size, rq_regs.rq_regs_c.meta_chunk_size); + if (rq_regs.rq_regs_c.min_meta_chunk_size != dml_rq_regs->rq_regs_c.min_meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.min_meta_chunk_size, rq_regs.rq_regs_c.min_meta_chunk_size); + if (rq_regs.rq_regs_c.dpte_group_size != dml_rq_regs->rq_regs_c.dpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:DPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.dpte_group_size, rq_regs.rq_regs_c.dpte_group_size); + if (rq_regs.rq_regs_c.mpte_group_size != dml_rq_regs->rq_regs_c.mpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.mpte_group_size, rq_regs.rq_regs_c.mpte_group_size); + if (rq_regs.rq_regs_c.swath_height != dml_rq_regs->rq_regs_c.swath_height) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:SWATH_HEIGHT_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.swath_height, rq_regs.rq_regs_c.swath_height); + if (rq_regs.rq_regs_c.pte_row_height_linear != dml_rq_regs->rq_regs_c.pte_row_height_linear) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:PTE_ROW_HEIGHT_LINEAR_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.pte_row_height_linear, rq_regs.rq_regs_c.pte_row_height_linear); + + /* DLG - Per hubp */ + REG_GET_2(BLANK_OFFSET_0, + REFCYC_H_BLANK_END, &dlg_attr.refcyc_h_blank_end, + DLG_V_BLANK_END, &dlg_attr.dlg_vblank_end); + REG_GET(BLANK_OFFSET_1, + MIN_DST_Y_NEXT_START, &dlg_attr.min_dst_y_next_start); + REG_GET(DST_DIMENSIONS, + REFCYC_PER_HTOTAL, &dlg_attr.refcyc_per_htotal); + REG_GET_2(DST_AFTER_SCALER, + REFCYC_X_AFTER_SCALER, &dlg_attr.refcyc_x_after_scaler, + DST_Y_AFTER_SCALER, &dlg_attr.dst_y_after_scaler); + REG_GET(REF_FREQ_TO_PIX_FREQ, + REF_FREQ_TO_PIX_FREQ, &dlg_attr.ref_freq_to_pix_freq); + + if (dlg_attr.refcyc_h_blank_end != dml_dlg_attr->refcyc_h_blank_end) + DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:REFCYC_H_BLANK_END - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_h_blank_end, dlg_attr.refcyc_h_blank_end); + if (dlg_attr.dlg_vblank_end != dml_dlg_attr->dlg_vblank_end) + DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:DLG_V_BLANK_END - Expected: %u Actual: %u\n", + dml_dlg_attr->dlg_vblank_end, dlg_attr.dlg_vblank_end); + if (dlg_attr.min_dst_y_next_start != dml_dlg_attr->min_dst_y_next_start) + DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_1:MIN_DST_Y_NEXT_START - Expected: %u Actual: %u\n", + dml_dlg_attr->min_dst_y_next_start, dlg_attr.min_dst_y_next_start); + if (dlg_attr.refcyc_per_htotal != dml_dlg_attr->refcyc_per_htotal) + DC_LOG_DEBUG("DML Validation | DST_DIMENSIONS:REFCYC_PER_HTOTAL - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_htotal, dlg_attr.refcyc_per_htotal); + if (dlg_attr.refcyc_x_after_scaler != dml_dlg_attr->refcyc_x_after_scaler) + DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:REFCYC_X_AFTER_SCALER - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_x_after_scaler, dlg_attr.refcyc_x_after_scaler); + if (dlg_attr.dst_y_after_scaler != dml_dlg_attr->dst_y_after_scaler) + DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:DST_Y_AFTER_SCALER - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_after_scaler, dlg_attr.dst_y_after_scaler); + if (dlg_attr.ref_freq_to_pix_freq != dml_dlg_attr->ref_freq_to_pix_freq) + DC_LOG_DEBUG("DML Validation | REF_FREQ_TO_PIX_FREQ:REF_FREQ_TO_PIX_FREQ - Expected: %u Actual: %u\n", + dml_dlg_attr->ref_freq_to_pix_freq, dlg_attr.ref_freq_to_pix_freq); + + /* DLG - Per luma/chroma */ + REG_GET(VBLANK_PARAMETERS_1, + REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr.refcyc_per_pte_group_vblank_l); + if (REG(NOM_PARAMETERS_0)) + REG_GET(NOM_PARAMETERS_0, + DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr.dst_y_per_pte_row_nom_l); + if (REG(NOM_PARAMETERS_1)) + REG_GET(NOM_PARAMETERS_1, + REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr.refcyc_per_pte_group_nom_l); + REG_GET(NOM_PARAMETERS_4, + DST_Y_PER_META_ROW_NOM_L, &dlg_attr.dst_y_per_meta_row_nom_l); + REG_GET(NOM_PARAMETERS_5, + REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr.refcyc_per_meta_chunk_nom_l); + REG_GET_2(PER_LINE_DELIVERY, + REFCYC_PER_LINE_DELIVERY_L, &dlg_attr.refcyc_per_line_delivery_l, + REFCYC_PER_LINE_DELIVERY_C, &dlg_attr.refcyc_per_line_delivery_c); + REG_GET_2(PER_LINE_DELIVERY_PRE, + REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr.refcyc_per_line_delivery_pre_l, + REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr.refcyc_per_line_delivery_pre_c); + REG_GET(VBLANK_PARAMETERS_2, + REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr.refcyc_per_pte_group_vblank_c); + if (REG(NOM_PARAMETERS_2)) + REG_GET(NOM_PARAMETERS_2, + DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr.dst_y_per_pte_row_nom_c); + if (REG(NOM_PARAMETERS_3)) + REG_GET(NOM_PARAMETERS_3, + REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr.refcyc_per_pte_group_nom_c); + REG_GET(NOM_PARAMETERS_6, + DST_Y_PER_META_ROW_NOM_C, &dlg_attr.dst_y_per_meta_row_nom_c); + REG_GET(NOM_PARAMETERS_7, + REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr.refcyc_per_meta_chunk_nom_c); + REG_GET(VBLANK_PARAMETERS_3, + REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr.refcyc_per_meta_chunk_vblank_l); + REG_GET(VBLANK_PARAMETERS_4, + REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr.refcyc_per_meta_chunk_vblank_c); + + if (dlg_attr.refcyc_per_pte_group_vblank_l != dml_dlg_attr->refcyc_per_pte_group_vblank_l) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_1:REFCYC_PER_PTE_GROUP_VBLANK_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_vblank_l, dlg_attr.refcyc_per_pte_group_vblank_l); + if (dlg_attr.dst_y_per_pte_row_nom_l != dml_dlg_attr->dst_y_per_pte_row_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_0:DST_Y_PER_PTE_ROW_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_pte_row_nom_l, dlg_attr.dst_y_per_pte_row_nom_l); + if (dlg_attr.refcyc_per_pte_group_nom_l != dml_dlg_attr->refcyc_per_pte_group_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_1:REFCYC_PER_PTE_GROUP_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_nom_l, dlg_attr.refcyc_per_pte_group_nom_l); + if (dlg_attr.dst_y_per_meta_row_nom_l != dml_dlg_attr->dst_y_per_meta_row_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_4:DST_Y_PER_META_ROW_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_meta_row_nom_l, dlg_attr.dst_y_per_meta_row_nom_l); + if (dlg_attr.refcyc_per_meta_chunk_nom_l != dml_dlg_attr->refcyc_per_meta_chunk_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_5:REFCYC_PER_META_CHUNK_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_nom_l, dlg_attr.refcyc_per_meta_chunk_nom_l); + if (dlg_attr.refcyc_per_line_delivery_l != dml_dlg_attr->refcyc_per_line_delivery_l) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_l, dlg_attr.refcyc_per_line_delivery_l); + if (dlg_attr.refcyc_per_line_delivery_c != dml_dlg_attr->refcyc_per_line_delivery_c) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_c, dlg_attr.refcyc_per_line_delivery_c); + if (dlg_attr.refcyc_per_pte_group_vblank_c != dml_dlg_attr->refcyc_per_pte_group_vblank_c) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_2:REFCYC_PER_PTE_GROUP_VBLANK_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_vblank_c, dlg_attr.refcyc_per_pte_group_vblank_c); + if (dlg_attr.dst_y_per_pte_row_nom_c != dml_dlg_attr->dst_y_per_pte_row_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_2:DST_Y_PER_PTE_ROW_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_pte_row_nom_c, dlg_attr.dst_y_per_pte_row_nom_c); + if (dlg_attr.refcyc_per_pte_group_nom_c != dml_dlg_attr->refcyc_per_pte_group_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_3:REFCYC_PER_PTE_GROUP_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_nom_c, dlg_attr.refcyc_per_pte_group_nom_c); + if (dlg_attr.dst_y_per_meta_row_nom_c != dml_dlg_attr->dst_y_per_meta_row_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_6:DST_Y_PER_META_ROW_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_meta_row_nom_c, dlg_attr.dst_y_per_meta_row_nom_c); + if (dlg_attr.refcyc_per_meta_chunk_nom_c != dml_dlg_attr->refcyc_per_meta_chunk_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_7:REFCYC_PER_META_CHUNK_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_nom_c, dlg_attr.refcyc_per_meta_chunk_nom_c); + if (dlg_attr.refcyc_per_line_delivery_pre_l != dml_dlg_attr->refcyc_per_line_delivery_pre_l) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_pre_l, dlg_attr.refcyc_per_line_delivery_pre_l); + if (dlg_attr.refcyc_per_line_delivery_pre_c != dml_dlg_attr->refcyc_per_line_delivery_pre_c) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_pre_c, dlg_attr.refcyc_per_line_delivery_pre_c); + if (dlg_attr.refcyc_per_meta_chunk_vblank_l != dml_dlg_attr->refcyc_per_meta_chunk_vblank_l) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_3:REFCYC_PER_META_CHUNK_VBLANK_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_vblank_l, dlg_attr.refcyc_per_meta_chunk_vblank_l); + if (dlg_attr.refcyc_per_meta_chunk_vblank_c != dml_dlg_attr->refcyc_per_meta_chunk_vblank_c) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_4:REFCYC_PER_META_CHUNK_VBLANK_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_vblank_c, dlg_attr.refcyc_per_meta_chunk_vblank_c); + + /* TTU - per hubp */ + REG_GET_2(DCN_TTU_QOS_WM, + QoS_LEVEL_LOW_WM, &ttu_attr.qos_level_low_wm, + QoS_LEVEL_HIGH_WM, &ttu_attr.qos_level_high_wm); + + if (ttu_attr.qos_level_low_wm != dml_ttu_attr->qos_level_low_wm) + DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_LOW_WM - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_low_wm, ttu_attr.qos_level_low_wm); + if (ttu_attr.qos_level_high_wm != dml_ttu_attr->qos_level_high_wm) + DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_HIGH_WM - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_high_wm, ttu_attr.qos_level_high_wm); + + /* TTU - per luma/chroma */ + /* Assumed surf0 is luma and 1 is chroma */ + REG_GET_3(DCN_SURF0_TTU_CNTL0, + REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_l, + QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_l, + QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_l); + REG_GET_3(DCN_SURF1_TTU_CNTL0, + REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_c, + QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_c, + QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_c); + REG_GET_3(DCN_CUR0_TTU_CNTL0, + REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_cur0, + QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_cur0, + QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_cur0); + REG_GET(FLIP_PARAMETERS_1, + REFCYC_PER_PTE_GROUP_FLIP_L, &dlg_attr.refcyc_per_pte_group_flip_l); + REG_GET(DCN_CUR0_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur0); + REG_GET(DCN_CUR1_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur1); + REG_GET(DCN_SURF0_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_l); + REG_GET(DCN_SURF1_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_c); + + if (ttu_attr.refcyc_per_req_delivery_l != dml_ttu_attr->refcyc_per_req_delivery_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_l, ttu_attr.refcyc_per_req_delivery_l); + if (ttu_attr.qos_level_fixed_l != dml_ttu_attr->qos_level_fixed_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_fixed_l, ttu_attr.qos_level_fixed_l); + if (ttu_attr.qos_ramp_disable_l != dml_ttu_attr->qos_ramp_disable_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_ramp_disable_l, ttu_attr.qos_ramp_disable_l); + if (ttu_attr.refcyc_per_req_delivery_c != dml_ttu_attr->refcyc_per_req_delivery_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_c, ttu_attr.refcyc_per_req_delivery_c); + if (ttu_attr.qos_level_fixed_c != dml_ttu_attr->qos_level_fixed_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_fixed_c, ttu_attr.qos_level_fixed_c); + if (ttu_attr.qos_ramp_disable_c != dml_ttu_attr->qos_ramp_disable_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_ramp_disable_c, ttu_attr.qos_ramp_disable_c); + if (ttu_attr.refcyc_per_req_delivery_cur0 != dml_ttu_attr->refcyc_per_req_delivery_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_cur0, ttu_attr.refcyc_per_req_delivery_cur0); + if (ttu_attr.qos_level_fixed_cur0 != dml_ttu_attr->qos_level_fixed_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_fixed_cur0, ttu_attr.qos_level_fixed_cur0); + if (ttu_attr.qos_ramp_disable_cur0 != dml_ttu_attr->qos_ramp_disable_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_ramp_disable_cur0, ttu_attr.qos_ramp_disable_cur0); + if (dlg_attr.refcyc_per_pte_group_flip_l != dml_dlg_attr->refcyc_per_pte_group_flip_l) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_1:REFCYC_PER_PTE_GROUP_FLIP_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_flip_l, dlg_attr.refcyc_per_pte_group_flip_l); + if (ttu_attr.refcyc_per_req_delivery_pre_cur0 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_cur0, ttu_attr.refcyc_per_req_delivery_pre_cur0); + if (ttu_attr.refcyc_per_req_delivery_pre_cur1 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur1) + DC_LOG_DEBUG("DML Validation | DCN_CUR1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_cur1, ttu_attr.refcyc_per_req_delivery_pre_cur1); + if (ttu_attr.refcyc_per_req_delivery_pre_l != dml_ttu_attr->refcyc_per_req_delivery_pre_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_l, ttu_attr.refcyc_per_req_delivery_pre_l); + if (ttu_attr.refcyc_per_req_delivery_pre_c != dml_ttu_attr->refcyc_per_req_delivery_pre_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_c, ttu_attr.refcyc_per_req_delivery_pre_c); +} + static struct hubp_funcs dcn20_hubp_funcs = { .hubp_enable_tripleBuffer = hubp2_enable_triplebuffer, .hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled, @@ -1267,6 +1576,7 @@ static struct hubp_funcs dcn20_hubp_funcs = { .hubp_clear_underflow = hubp2_clear_underflow, .hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl, .hubp_init = hubp1_init, + .validate_dml_output = hubp2_validate_dml_output, }; diff --git a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c index 32e8b589aeb5..0be1c917b242 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c @@ -29,6 +29,8 @@ #include "dm_services.h" #include "reg_helper.h" +#define DC_LOGGER_INIT(logger) + #define REG(reg)\ hubp21->hubp_regs->reg @@ -254,6 +256,348 @@ void hubp21_set_vm_system_aperture_settings(struct hubp *hubp, SYSTEM_ACCESS_MODE, 0x3); } +void hubp21_validate_dml_output(struct hubp *hubp, + struct dc_context *ctx, + struct _vcs_dpi_display_rq_regs_st *dml_rq_regs, + struct _vcs_dpi_display_dlg_regs_st *dml_dlg_attr, + struct _vcs_dpi_display_ttu_regs_st *dml_ttu_attr) +{ + struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); + struct _vcs_dpi_display_rq_regs_st rq_regs = {0}; + struct _vcs_dpi_display_dlg_regs_st dlg_attr = {0}; + struct _vcs_dpi_display_ttu_regs_st ttu_attr = {0}; + DC_LOGGER_INIT(ctx->logger); + + /* Requester - Per hubp */ + REG_GET(HUBPRET_CONTROL, + DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs.plane1_base_address); + REG_GET_4(DCN_EXPANSION_MODE, + DRQ_EXPANSION_MODE, &rq_regs.drq_expansion_mode, + PRQ_EXPANSION_MODE, &rq_regs.prq_expansion_mode, + MRQ_EXPANSION_MODE, &rq_regs.mrq_expansion_mode, + CRQ_EXPANSION_MODE, &rq_regs.crq_expansion_mode); + REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, + CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, + MIN_CHUNK_SIZE, &rq_regs.rq_regs_l.min_chunk_size, + META_CHUNK_SIZE, &rq_regs.rq_regs_l.meta_chunk_size, + MIN_META_CHUNK_SIZE, &rq_regs.rq_regs_l.min_meta_chunk_size, + DPTE_GROUP_SIZE, &rq_regs.rq_regs_l.dpte_group_size, + VM_GROUP_SIZE, &rq_regs.rq_regs_l.mpte_group_size, + SWATH_HEIGHT, &rq_regs.rq_regs_l.swath_height, + PTE_ROW_HEIGHT_LINEAR, &rq_regs.rq_regs_l.pte_row_height_linear); + REG_GET_7(DCHUBP_REQ_SIZE_CONFIG_C, + CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, + MIN_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_chunk_size, + META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.meta_chunk_size, + MIN_META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_meta_chunk_size, + DPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.dpte_group_size, + SWATH_HEIGHT_C, &rq_regs.rq_regs_c.swath_height, + PTE_ROW_HEIGHT_LINEAR_C, &rq_regs.rq_regs_c.pte_row_height_linear); + + if (rq_regs.plane1_base_address != dml_rq_regs->plane1_base_address) + DC_LOG_DEBUG("DML Validation | HUBPRET_CONTROL:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", + dml_rq_regs->plane1_base_address, rq_regs.plane1_base_address); + if (rq_regs.drq_expansion_mode != dml_rq_regs->drq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", + dml_rq_regs->drq_expansion_mode, rq_regs.drq_expansion_mode); + if (rq_regs.prq_expansion_mode != dml_rq_regs->prq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:MRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", + dml_rq_regs->prq_expansion_mode, rq_regs.prq_expansion_mode); + if (rq_regs.mrq_expansion_mode != dml_rq_regs->mrq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", + dml_rq_regs->mrq_expansion_mode, rq_regs.mrq_expansion_mode); + if (rq_regs.crq_expansion_mode != dml_rq_regs->crq_expansion_mode) + DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:CRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", + dml_rq_regs->crq_expansion_mode, rq_regs.crq_expansion_mode); + + if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); + if (rq_regs.rq_regs_l.min_chunk_size != dml_rq_regs->rq_regs_l.min_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.min_chunk_size, rq_regs.rq_regs_l.min_chunk_size); + if (rq_regs.rq_regs_l.meta_chunk_size != dml_rq_regs->rq_regs_l.meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:META_CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.meta_chunk_size, rq_regs.rq_regs_l.meta_chunk_size); + if (rq_regs.rq_regs_l.min_meta_chunk_size != dml_rq_regs->rq_regs_l.min_meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_META_CHUNK_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.min_meta_chunk_size, rq_regs.rq_regs_l.min_meta_chunk_size); + if (rq_regs.rq_regs_l.dpte_group_size != dml_rq_regs->rq_regs_l.dpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:DPTE_GROUP_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.dpte_group_size, rq_regs.rq_regs_l.dpte_group_size); + if (rq_regs.rq_regs_l.mpte_group_size != dml_rq_regs->rq_regs_l.mpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:VM_GROUP_SIZE - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.mpte_group_size, rq_regs.rq_regs_l.mpte_group_size); + if (rq_regs.rq_regs_l.swath_height != dml_rq_regs->rq_regs_l.swath_height) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:SWATH_HEIGHT - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.swath_height, rq_regs.rq_regs_l.swath_height); + if (rq_regs.rq_regs_l.pte_row_height_linear != dml_rq_regs->rq_regs_l.pte_row_height_linear) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:PTE_ROW_HEIGHT_LINEAR - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_l.pte_row_height_linear, rq_regs.rq_regs_l.pte_row_height_linear); + + if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); + if (rq_regs.rq_regs_c.min_chunk_size != dml_rq_regs->rq_regs_c.min_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.min_chunk_size, rq_regs.rq_regs_c.min_chunk_size); + if (rq_regs.rq_regs_c.meta_chunk_size != dml_rq_regs->rq_regs_c.meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.meta_chunk_size, rq_regs.rq_regs_c.meta_chunk_size); + if (rq_regs.rq_regs_c.min_meta_chunk_size != dml_rq_regs->rq_regs_c.min_meta_chunk_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.min_meta_chunk_size, rq_regs.rq_regs_c.min_meta_chunk_size); + if (rq_regs.rq_regs_c.dpte_group_size != dml_rq_regs->rq_regs_c.dpte_group_size) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:DPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.dpte_group_size, rq_regs.rq_regs_c.dpte_group_size); + if (rq_regs.rq_regs_c.swath_height != dml_rq_regs->rq_regs_c.swath_height) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:SWATH_HEIGHT_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.swath_height, rq_regs.rq_regs_c.swath_height); + if (rq_regs.rq_regs_c.pte_row_height_linear != dml_rq_regs->rq_regs_c.pte_row_height_linear) + DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:PTE_ROW_HEIGHT_LINEAR_C - Expected: %u Actual: %u\n", + dml_rq_regs->rq_regs_c.pte_row_height_linear, rq_regs.rq_regs_c.pte_row_height_linear); + + + /* DLG - Per hubp */ + REG_GET_2(BLANK_OFFSET_0, + REFCYC_H_BLANK_END, &dlg_attr.refcyc_h_blank_end, + DLG_V_BLANK_END, &dlg_attr.dlg_vblank_end); + REG_GET(BLANK_OFFSET_1, + MIN_DST_Y_NEXT_START, &dlg_attr.min_dst_y_next_start); + REG_GET(DST_DIMENSIONS, + REFCYC_PER_HTOTAL, &dlg_attr.refcyc_per_htotal); + REG_GET_2(DST_AFTER_SCALER, + REFCYC_X_AFTER_SCALER, &dlg_attr.refcyc_x_after_scaler, + DST_Y_AFTER_SCALER, &dlg_attr.dst_y_after_scaler); + REG_GET(REF_FREQ_TO_PIX_FREQ, + REF_FREQ_TO_PIX_FREQ, &dlg_attr.ref_freq_to_pix_freq); + + if (dlg_attr.refcyc_h_blank_end != dml_dlg_attr->refcyc_h_blank_end) + DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:REFCYC_H_BLANK_END - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_h_blank_end, dlg_attr.refcyc_h_blank_end); + if (dlg_attr.dlg_vblank_end != dml_dlg_attr->dlg_vblank_end) + DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:DLG_V_BLANK_END - Expected: %u Actual: %u\n", + dml_dlg_attr->dlg_vblank_end, dlg_attr.dlg_vblank_end); + if (dlg_attr.min_dst_y_next_start != dml_dlg_attr->min_dst_y_next_start) + DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_1:MIN_DST_Y_NEXT_START - Expected: %u Actual: %u\n", + dml_dlg_attr->min_dst_y_next_start, dlg_attr.min_dst_y_next_start); + if (dlg_attr.refcyc_per_htotal != dml_dlg_attr->refcyc_per_htotal) + DC_LOG_DEBUG("DML Validation | DST_DIMENSIONS:REFCYC_PER_HTOTAL - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_htotal, dlg_attr.refcyc_per_htotal); + if (dlg_attr.refcyc_x_after_scaler != dml_dlg_attr->refcyc_x_after_scaler) + DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:REFCYC_X_AFTER_SCALER - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_x_after_scaler, dlg_attr.refcyc_x_after_scaler); + if (dlg_attr.dst_y_after_scaler != dml_dlg_attr->dst_y_after_scaler) + DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:DST_Y_AFTER_SCALER - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_after_scaler, dlg_attr.dst_y_after_scaler); + if (dlg_attr.ref_freq_to_pix_freq != dml_dlg_attr->ref_freq_to_pix_freq) + DC_LOG_DEBUG("DML Validation | REF_FREQ_TO_PIX_FREQ:REF_FREQ_TO_PIX_FREQ - Expected: %u Actual: %u\n", + dml_dlg_attr->ref_freq_to_pix_freq, dlg_attr.ref_freq_to_pix_freq); + + /* DLG - Per luma/chroma */ + REG_GET(VBLANK_PARAMETERS_1, + REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr.refcyc_per_pte_group_vblank_l); + if (REG(NOM_PARAMETERS_0)) + REG_GET(NOM_PARAMETERS_0, + DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr.dst_y_per_pte_row_nom_l); + if (REG(NOM_PARAMETERS_1)) + REG_GET(NOM_PARAMETERS_1, + REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr.refcyc_per_pte_group_nom_l); + REG_GET(NOM_PARAMETERS_4, + DST_Y_PER_META_ROW_NOM_L, &dlg_attr.dst_y_per_meta_row_nom_l); + REG_GET(NOM_PARAMETERS_5, + REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr.refcyc_per_meta_chunk_nom_l); + REG_GET_2(PER_LINE_DELIVERY, + REFCYC_PER_LINE_DELIVERY_L, &dlg_attr.refcyc_per_line_delivery_l, + REFCYC_PER_LINE_DELIVERY_C, &dlg_attr.refcyc_per_line_delivery_c); + REG_GET_2(PER_LINE_DELIVERY_PRE, + REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr.refcyc_per_line_delivery_pre_l, + REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr.refcyc_per_line_delivery_pre_c); + REG_GET(VBLANK_PARAMETERS_2, + REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr.refcyc_per_pte_group_vblank_c); + if (REG(NOM_PARAMETERS_2)) + REG_GET(NOM_PARAMETERS_2, + DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr.dst_y_per_pte_row_nom_c); + if (REG(NOM_PARAMETERS_3)) + REG_GET(NOM_PARAMETERS_3, + REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr.refcyc_per_pte_group_nom_c); + REG_GET(NOM_PARAMETERS_6, + DST_Y_PER_META_ROW_NOM_C, &dlg_attr.dst_y_per_meta_row_nom_c); + REG_GET(NOM_PARAMETERS_7, + REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr.refcyc_per_meta_chunk_nom_c); + REG_GET(VBLANK_PARAMETERS_3, + REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr.refcyc_per_meta_chunk_vblank_l); + REG_GET(VBLANK_PARAMETERS_4, + REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr.refcyc_per_meta_chunk_vblank_c); + + if (dlg_attr.refcyc_per_pte_group_vblank_l != dml_dlg_attr->refcyc_per_pte_group_vblank_l) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_1:REFCYC_PER_PTE_GROUP_VBLANK_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_vblank_l, dlg_attr.refcyc_per_pte_group_vblank_l); + if (dlg_attr.dst_y_per_pte_row_nom_l != dml_dlg_attr->dst_y_per_pte_row_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_0:DST_Y_PER_PTE_ROW_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_pte_row_nom_l, dlg_attr.dst_y_per_pte_row_nom_l); + if (dlg_attr.refcyc_per_pte_group_nom_l != dml_dlg_attr->refcyc_per_pte_group_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_1:REFCYC_PER_PTE_GROUP_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_nom_l, dlg_attr.refcyc_per_pte_group_nom_l); + if (dlg_attr.dst_y_per_meta_row_nom_l != dml_dlg_attr->dst_y_per_meta_row_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_4:DST_Y_PER_META_ROW_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_meta_row_nom_l, dlg_attr.dst_y_per_meta_row_nom_l); + if (dlg_attr.refcyc_per_meta_chunk_nom_l != dml_dlg_attr->refcyc_per_meta_chunk_nom_l) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_5:REFCYC_PER_META_CHUNK_NOM_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_nom_l, dlg_attr.refcyc_per_meta_chunk_nom_l); + if (dlg_attr.refcyc_per_line_delivery_l != dml_dlg_attr->refcyc_per_line_delivery_l) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_l, dlg_attr.refcyc_per_line_delivery_l); + if (dlg_attr.refcyc_per_line_delivery_c != dml_dlg_attr->refcyc_per_line_delivery_c) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_c, dlg_attr.refcyc_per_line_delivery_c); + if (dlg_attr.refcyc_per_pte_group_vblank_c != dml_dlg_attr->refcyc_per_pte_group_vblank_c) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_2:REFCYC_PER_PTE_GROUP_VBLANK_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_vblank_c, dlg_attr.refcyc_per_pte_group_vblank_c); + if (dlg_attr.dst_y_per_pte_row_nom_c != dml_dlg_attr->dst_y_per_pte_row_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_2:DST_Y_PER_PTE_ROW_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_pte_row_nom_c, dlg_attr.dst_y_per_pte_row_nom_c); + if (dlg_attr.refcyc_per_pte_group_nom_c != dml_dlg_attr->refcyc_per_pte_group_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_3:REFCYC_PER_PTE_GROUP_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_nom_c, dlg_attr.refcyc_per_pte_group_nom_c); + if (dlg_attr.dst_y_per_meta_row_nom_c != dml_dlg_attr->dst_y_per_meta_row_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_6:DST_Y_PER_META_ROW_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->dst_y_per_meta_row_nom_c, dlg_attr.dst_y_per_meta_row_nom_c); + if (dlg_attr.refcyc_per_meta_chunk_nom_c != dml_dlg_attr->refcyc_per_meta_chunk_nom_c) + DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_7:REFCYC_PER_META_CHUNK_NOM_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_nom_c, dlg_attr.refcyc_per_meta_chunk_nom_c); + if (dlg_attr.refcyc_per_line_delivery_pre_l != dml_dlg_attr->refcyc_per_line_delivery_pre_l) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_pre_l, dlg_attr.refcyc_per_line_delivery_pre_l); + if (dlg_attr.refcyc_per_line_delivery_pre_c != dml_dlg_attr->refcyc_per_line_delivery_pre_c) + DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_line_delivery_pre_c, dlg_attr.refcyc_per_line_delivery_pre_c); + if (dlg_attr.refcyc_per_meta_chunk_vblank_l != dml_dlg_attr->refcyc_per_meta_chunk_vblank_l) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_3:REFCYC_PER_META_CHUNK_VBLANK_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_vblank_l, dlg_attr.refcyc_per_meta_chunk_vblank_l); + if (dlg_attr.refcyc_per_meta_chunk_vblank_c != dml_dlg_attr->refcyc_per_meta_chunk_vblank_c) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_4:REFCYC_PER_META_CHUNK_VBLANK_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_vblank_c, dlg_attr.refcyc_per_meta_chunk_vblank_c); + + /* TTU - per hubp */ + REG_GET_2(DCN_TTU_QOS_WM, + QoS_LEVEL_LOW_WM, &ttu_attr.qos_level_low_wm, + QoS_LEVEL_HIGH_WM, &ttu_attr.qos_level_high_wm); + + if (ttu_attr.qos_level_low_wm != dml_ttu_attr->qos_level_low_wm) + DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_LOW_WM - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_low_wm, ttu_attr.qos_level_low_wm); + if (ttu_attr.qos_level_high_wm != dml_ttu_attr->qos_level_high_wm) + DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_HIGH_WM - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_high_wm, ttu_attr.qos_level_high_wm); + + /* TTU - per luma/chroma */ + /* Assumed surf0 is luma and 1 is chroma */ + REG_GET_3(DCN_SURF0_TTU_CNTL0, + REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_l, + QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_l, + QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_l); + REG_GET_3(DCN_SURF1_TTU_CNTL0, + REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_c, + QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_c, + QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_c); + REG_GET_3(DCN_CUR0_TTU_CNTL0, + REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_cur0, + QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_cur0, + QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_cur0); + REG_GET(FLIP_PARAMETERS_1, + REFCYC_PER_PTE_GROUP_FLIP_L, &dlg_attr.refcyc_per_pte_group_flip_l); + REG_GET(DCN_CUR0_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur0); + REG_GET(DCN_CUR1_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur1); + REG_GET(DCN_SURF0_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_l); + REG_GET(DCN_SURF1_TTU_CNTL1, + REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_c); + + if (ttu_attr.refcyc_per_req_delivery_l != dml_ttu_attr->refcyc_per_req_delivery_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_l, ttu_attr.refcyc_per_req_delivery_l); + if (ttu_attr.qos_level_fixed_l != dml_ttu_attr->qos_level_fixed_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_fixed_l, ttu_attr.qos_level_fixed_l); + if (ttu_attr.qos_ramp_disable_l != dml_ttu_attr->qos_ramp_disable_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_ramp_disable_l, ttu_attr.qos_ramp_disable_l); + if (ttu_attr.refcyc_per_req_delivery_c != dml_ttu_attr->refcyc_per_req_delivery_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_c, ttu_attr.refcyc_per_req_delivery_c); + if (ttu_attr.qos_level_fixed_c != dml_ttu_attr->qos_level_fixed_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_fixed_c, ttu_attr.qos_level_fixed_c); + if (ttu_attr.qos_ramp_disable_c != dml_ttu_attr->qos_ramp_disable_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_ramp_disable_c, ttu_attr.qos_ramp_disable_c); + if (ttu_attr.refcyc_per_req_delivery_cur0 != dml_ttu_attr->refcyc_per_req_delivery_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_cur0, ttu_attr.refcyc_per_req_delivery_cur0); + if (ttu_attr.qos_level_fixed_cur0 != dml_ttu_attr->qos_level_fixed_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_level_fixed_cur0, ttu_attr.qos_level_fixed_cur0); + if (ttu_attr.qos_ramp_disable_cur0 != dml_ttu_attr->qos_ramp_disable_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", + dml_ttu_attr->qos_ramp_disable_cur0, ttu_attr.qos_ramp_disable_cur0); + if (dlg_attr.refcyc_per_pte_group_flip_l != dml_dlg_attr->refcyc_per_pte_group_flip_l) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_1:REFCYC_PER_PTE_GROUP_FLIP_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_flip_l, dlg_attr.refcyc_per_pte_group_flip_l); + if (ttu_attr.refcyc_per_req_delivery_pre_cur0 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur0) + DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_cur0, ttu_attr.refcyc_per_req_delivery_pre_cur0); + if (ttu_attr.refcyc_per_req_delivery_pre_cur1 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur1) + DC_LOG_DEBUG("DML Validation | DCN_CUR1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_cur1, ttu_attr.refcyc_per_req_delivery_pre_cur1); + if (ttu_attr.refcyc_per_req_delivery_pre_l != dml_ttu_attr->refcyc_per_req_delivery_pre_l) + DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_l, ttu_attr.refcyc_per_req_delivery_pre_l); + if (ttu_attr.refcyc_per_req_delivery_pre_c != dml_ttu_attr->refcyc_per_req_delivery_pre_c) + DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", + dml_ttu_attr->refcyc_per_req_delivery_pre_c, ttu_attr.refcyc_per_req_delivery_pre_c); + + /* Host VM deadline regs */ + REG_GET(VBLANK_PARAMETERS_5, + REFCYC_PER_VM_GROUP_VBLANK, &dlg_attr.refcyc_per_vm_group_vblank); + REG_GET(VBLANK_PARAMETERS_6, + REFCYC_PER_VM_REQ_VBLANK, &dlg_attr.refcyc_per_vm_req_vblank); + REG_GET(FLIP_PARAMETERS_3, + REFCYC_PER_VM_GROUP_FLIP, &dlg_attr.refcyc_per_vm_group_flip); + REG_GET(FLIP_PARAMETERS_4, + REFCYC_PER_VM_REQ_FLIP, &dlg_attr.refcyc_per_vm_req_flip); + REG_GET(FLIP_PARAMETERS_5, + REFCYC_PER_PTE_GROUP_FLIP_C, &dlg_attr.refcyc_per_pte_group_flip_c); + REG_GET(FLIP_PARAMETERS_6, + REFCYC_PER_META_CHUNK_FLIP_C, &dlg_attr.refcyc_per_meta_chunk_flip_c); + REG_GET(FLIP_PARAMETERS_2, + REFCYC_PER_META_CHUNK_FLIP_L, &dlg_attr.refcyc_per_meta_chunk_flip_l); + + if (dlg_attr.refcyc_per_vm_group_vblank != dml_dlg_attr->refcyc_per_vm_group_vblank) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_5:REFCYC_PER_VM_GROUP_VBLANK - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_vm_group_vblank, dlg_attr.refcyc_per_vm_group_vblank); + if (dlg_attr.refcyc_per_vm_req_vblank != dml_dlg_attr->refcyc_per_vm_req_vblank) + DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_6:REFCYC_PER_VM_REQ_VBLANK - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_vm_req_vblank, dlg_attr.refcyc_per_vm_req_vblank); + if (dlg_attr.refcyc_per_vm_group_flip != dml_dlg_attr->refcyc_per_vm_group_flip) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_3:REFCYC_PER_VM_GROUP_FLIP - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_vm_group_flip, dlg_attr.refcyc_per_vm_group_flip); + if (dlg_attr.refcyc_per_vm_req_flip != dml_dlg_attr->refcyc_per_vm_req_flip) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_4:REFCYC_PER_VM_REQ_FLIP - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_vm_req_flip, dlg_attr.refcyc_per_vm_req_flip); + if (dlg_attr.refcyc_per_pte_group_flip_c != dml_dlg_attr->refcyc_per_pte_group_flip_c) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_5:REFCYC_PER_PTE_GROUP_FLIP_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_pte_group_flip_c, dlg_attr.refcyc_per_pte_group_flip_c); + if (dlg_attr.refcyc_per_meta_chunk_flip_c != dml_dlg_attr->refcyc_per_meta_chunk_flip_c) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_6:REFCYC_PER_META_CHUNK_FLIP_C - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_flip_c, dlg_attr.refcyc_per_meta_chunk_flip_c); + if (dlg_attr.refcyc_per_meta_chunk_flip_l != dml_dlg_attr->refcyc_per_meta_chunk_flip_l) + DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_2:REFCYC_PER_META_CHUNK_FLIP_L - Expected: %u Actual: %u\n", + dml_dlg_attr->refcyc_per_meta_chunk_flip_l, dlg_attr.refcyc_per_meta_chunk_flip_l); +} + void hubp21_init(struct hubp *hubp) { // DEDCN21-133: Inconsistent row starting line for flip between DPTE and Meta @@ -286,6 +630,7 @@ static struct hubp_funcs dcn21_hubp_funcs = { .hubp_clear_underflow = hubp1_clear_underflow, .hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl, .hubp_init = hubp21_init, + .validate_dml_output = hubp21_validate_dml_output, }; bool hubp21_construct( diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h index 62b2d24cd1d3..9793da0f3c7e 100644 --- a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h +++ b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h @@ -157,6 +157,13 @@ struct hubp_funcs { struct hubp *hubp, bool enable); + void (*validate_dml_output)( + struct hubp *hubp, + struct dc_context *ctx, + struct _vcs_dpi_display_rq_regs_st *dml_rq_regs, + struct _vcs_dpi_display_dlg_regs_st *dml_dlg_attr, + struct _vcs_dpi_display_ttu_regs_st *dml_ttu_attr); + }; #endif -- cgit v1.2.3 From 1ba2a48305715f5832fac023ddc0d4ceba7a8491 Mon Sep 17 00:00:00 2001 From: Michael Strauss Date: Sun, 10 Nov 2019 15:22:15 -0500 Subject: drm/amd/display: Disable chroma viewport w/a when rotated 180 degrees [WHY] Previous Renoir chroma viewport workaround fixed an MPO flicker by increasing the chroma viewport size. However, when the MPO plane is rotated 180 degrees, the viewport is read in reverse. Since the workaround increases viewport size, when reading in reverse it causes a vertical chroma offset. [HOW] Pass rotation value to viewport set functions Temporarily disable the chroma viewport w/a when hubp is rotated 180 degrees Signed-off-by: Michael Strauss Reviewed-by: Tony Cheng Acked-by: Leo Li Signed-off-by: Alex Deucher --- drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c | 3 ++- drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h | 4 +++- drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c | 3 ++- drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c | 3 ++- drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c | 7 +++++-- drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h | 4 +++- 6 files changed, 17 insertions(+), 7 deletions(-) (limited to 'drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h') diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c index 31b64733d693..4d1301e5eaf5 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c @@ -810,7 +810,8 @@ static void hubp1_set_vm_context0_settings(struct hubp *hubp, void min_set_viewport( struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c) + const struct rect *viewport_c, + enum dc_rotation_angle rotation) { struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp); diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h index 780af5b3c16f..e44eaae5033b 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h +++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h @@ -749,7 +749,9 @@ void hubp1_set_blank(struct hubp *hubp, bool blank); void min_set_viewport(struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c); + const struct rect *viewport_c, + enum dc_rotation_angle rotation); +/* rotation angle added for use by hubp21_set_viewport */ void hubp1_clk_cntl(struct hubp *hubp, bool enable); void hubp1_vtg_sel(struct hubp *hubp, uint32_t otg_inst); diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c index 2b3081ee0e07..2440e28493e7 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c +++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c @@ -2286,7 +2286,8 @@ static void dcn10_update_dchubp_dpp( hubp->funcs->mem_program_viewport( hubp, &pipe_ctx->plane_res.scl_data.viewport, - &pipe_ctx->plane_res.scl_data.viewport_c); + &pipe_ctx->plane_res.scl_data.viewport_c, + plane_state->rotation); } if (pipe_ctx->stream->cursor_attributes.address.quad_part != 0) { diff --git a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c index 619af3b3029d..392542f84ca2 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c +++ b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c @@ -1382,7 +1382,8 @@ static void dcn20_update_dchubp_dpp( hubp->funcs->mem_program_viewport( hubp, &pipe_ctx->plane_res.scl_data.viewport, - &pipe_ctx->plane_res.scl_data.viewport_c); + &pipe_ctx->plane_res.scl_data.viewport_c, + plane_state->rotation); /* Any updates are handled in dc interface, just need to apply existing for plane enable */ if ((pipe_ctx->update_flags.bits.enable || pipe_ctx->update_flags.bits.opp_changed) diff --git a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c index 4408aed5087b..38661b9c61f8 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c @@ -169,7 +169,8 @@ static void hubp21_setup( void hubp21_set_viewport( struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c) + const struct rect *viewport_c, + enum dc_rotation_angle rotation) { struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); int patched_viewport_height = 0; @@ -196,9 +197,11 @@ void hubp21_set_viewport( * Work around for underflow issue with NV12 + rIOMMU translation * + immediate flip. This will cause hubp underflow, but will not * be user visible since underflow is in blank region + * Disable w/a when rotated 180 degrees, causes vertical chroma offset */ patched_viewport_height = viewport_c->height; - if (viewport_c->height != 0 && debug->nv12_iflip_vm_wa) { + if (viewport_c->height != 0 && debug->nv12_iflip_vm_wa && + rotation != ROTATION_ANGLE_180) { int pte_row_height = 0; int pte_rows = 0; diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h index 9793da0f3c7e..85a34dde8526 100644 --- a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h +++ b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h @@ -82,7 +82,9 @@ struct hubp_funcs { void (*mem_program_viewport)( struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c); + const struct rect *viewport_c, + enum dc_rotation_angle rotation); + /* rotation needed for Renoir workaround */ bool (*hubp_program_surface_flip_and_addr)( struct hubp *hubp, -- cgit v1.2.3 From cf27a6d15d950ed1beb3926469c9eaa6907bbf88 Mon Sep 17 00:00:00 2001 From: Eric Yang Date: Mon, 18 Nov 2019 15:41:19 -0500 Subject: drm/amd/display: update chroma viewport wa [Why] Need previously implemented chroma vp wa to work for rotation cases. [How] Implement rotation specific wa. Signed-off-by: Eric Yang Reviewed-by: Tony Cheng Acked-by: Rodrigo Siqueira Signed-off-by: Alex Deucher --- drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c | 3 +- drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h | 4 +- .../drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c | 3 +- drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c | 14 +- drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c | 314 +++++++++++++++++++-- drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.h | 1 + drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h | 7 +- 7 files changed, 305 insertions(+), 41 deletions(-) (limited to 'drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h') diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c index 4d1301e5eaf5..31b64733d693 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c @@ -810,8 +810,7 @@ static void hubp1_set_vm_context0_settings(struct hubp *hubp, void min_set_viewport( struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c, - enum dc_rotation_angle rotation) + const struct rect *viewport_c) { struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp); diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h index e44eaae5033b..780af5b3c16f 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h +++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h @@ -749,9 +749,7 @@ void hubp1_set_blank(struct hubp *hubp, bool blank); void min_set_viewport(struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c, - enum dc_rotation_angle rotation); -/* rotation angle added for use by hubp21_set_viewport */ + const struct rect *viewport_c); void hubp1_clk_cntl(struct hubp *hubp, bool enable); void hubp1_vtg_sel(struct hubp *hubp, uint32_t otg_inst); diff --git a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c index 3996fef56948..c9f7c0af58e3 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c +++ b/drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c @@ -2291,8 +2291,7 @@ static void dcn10_update_dchubp_dpp( hubp->funcs->mem_program_viewport( hubp, &pipe_ctx->plane_res.scl_data.viewport, - &pipe_ctx->plane_res.scl_data.viewport_c, - plane_state->rotation); + &pipe_ctx->plane_res.scl_data.viewport_c); } if (pipe_ctx->stream->cursor_attributes.address.quad_part != 0) { diff --git a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c index cafbd08f1cf2..8d779062a4e8 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c +++ b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c @@ -1305,6 +1305,7 @@ static void dcn20_update_dchubp_dpp( struct hubp *hubp = pipe_ctx->plane_res.hubp; struct dpp *dpp = pipe_ctx->plane_res.dpp; struct dc_plane_state *plane_state = pipe_ctx->plane_state; + bool viewport_changed = false; if (pipe_ctx->update_flags.bits.dppclk) dpp->funcs->dpp_dppclk_control(dpp, false, true); @@ -1383,12 +1384,14 @@ static void dcn20_update_dchubp_dpp( if (pipe_ctx->update_flags.bits.viewport || (context == dc->current_state && plane_state->update_flags.bits.scaling_change) || - (context == dc->current_state && pipe_ctx->stream->update_flags.bits.scaling)) + (context == dc->current_state && pipe_ctx->stream->update_flags.bits.scaling)) { + hubp->funcs->mem_program_viewport( hubp, &pipe_ctx->plane_res.scl_data.viewport, - &pipe_ctx->plane_res.scl_data.viewport_c, - plane_state->rotation); + &pipe_ctx->plane_res.scl_data.viewport_c); + viewport_changed = true; + } /* Any updates are handled in dc interface, just need to apply existing for plane enable */ if ((pipe_ctx->update_flags.bits.enable || pipe_ctx->update_flags.bits.opp_changed) @@ -1441,9 +1444,14 @@ static void dcn20_update_dchubp_dpp( hubp->power_gated = false; } + if (hubp->funcs->apply_PLAT_54186_wa && viewport_changed) + hubp->funcs->apply_PLAT_54186_wa(hubp, &plane_state->address); + if (pipe_ctx->update_flags.bits.enable || plane_state->update_flags.bits.addr_update) hws->funcs.update_plane_addr(dc, pipe_ctx); + + if (pipe_ctx->update_flags.bits.enable) hubp->funcs->set_blank(hubp, false); } diff --git a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c index 332bf3d3a664..216ae170bc50 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c @@ -169,12 +169,9 @@ static void hubp21_setup( void hubp21_set_viewport( struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c, - enum dc_rotation_angle rotation) + const struct rect *viewport_c) { struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); - int patched_viewport_height = 0; - struct dc_debug_options *debug = &hubp->ctx->dc->debug; REG_SET_2(DCSURF_PRI_VIEWPORT_DIMENSION, 0, PRI_VIEWPORT_WIDTH, viewport->width, @@ -193,31 +190,10 @@ void hubp21_set_viewport( SEC_VIEWPORT_X_START, viewport->x, SEC_VIEWPORT_Y_START, viewport->y); - /* - * Work around for underflow issue with NV12 + rIOMMU translation - * + immediate flip. This will cause hubp underflow, but will not - * be user visible since underflow is in blank region - * Disable w/a when rotated 180 degrees, causes vertical chroma offset - */ - patched_viewport_height = viewport_c->height; - if (debug->nv12_iflip_vm_wa && viewport_c->height > 512 && - rotation != ROTATION_ANGLE_180) { - int pte_row_height = 0; - int pte_rows = 0; - - REG_GET(DCHUBP_REQ_SIZE_CONFIG_C, - PTE_ROW_HEIGHT_LINEAR_C, &pte_row_height); - - pte_row_height = 1 << (pte_row_height + 3); - pte_rows = (viewport_c->height / pte_row_height) + 1; - patched_viewport_height = pte_rows * pte_row_height + 1; - } - - /* DC supports NV12 only at the moment */ REG_SET_2(DCSURF_PRI_VIEWPORT_DIMENSION_C, 0, PRI_VIEWPORT_WIDTH_C, viewport_c->width, - PRI_VIEWPORT_HEIGHT_C, patched_viewport_height); + PRI_VIEWPORT_HEIGHT_C, viewport_c->height); REG_SET_2(DCSURF_PRI_VIEWPORT_START_C, 0, PRI_VIEWPORT_X_START_C, viewport_c->x, @@ -225,13 +201,113 @@ void hubp21_set_viewport( REG_SET_2(DCSURF_SEC_VIEWPORT_DIMENSION_C, 0, SEC_VIEWPORT_WIDTH_C, viewport_c->width, - SEC_VIEWPORT_HEIGHT_C, patched_viewport_height); + SEC_VIEWPORT_HEIGHT_C, viewport_c->height); REG_SET_2(DCSURF_SEC_VIEWPORT_START_C, 0, SEC_VIEWPORT_X_START_C, viewport_c->x, SEC_VIEWPORT_Y_START_C, viewport_c->y); } +static void hubp21_apply_PLAT_54186_wa( + struct hubp *hubp, + const struct dc_plane_address *address) +{ + struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); + struct dc_debug_options *debug = &hubp->ctx->dc->debug; + unsigned int chroma_bpe = 2; + unsigned int luma_addr_high_part = 0; + unsigned int row_height = 0; + unsigned int chroma_pitch = 0; + unsigned int viewport_c_height = 0; + unsigned int viewport_c_width = 0; + unsigned int patched_viewport_height = 0; + unsigned int patched_viewport_width = 0; + unsigned int rotation_angle = 0; + unsigned int pix_format = 0; + unsigned int h_mirror_en = 0; + unsigned int tile_blk_size = 64 * 1024; /* 64KB for 64KB SW, 4KB for 4KB SW */ + + + if (!debug->nv12_iflip_vm_wa) + return; + + REG_GET(DCHUBP_REQ_SIZE_CONFIG_C, + PTE_ROW_HEIGHT_LINEAR_C, &row_height); + + REG_GET_2(DCSURF_PRI_VIEWPORT_DIMENSION_C, + PRI_VIEWPORT_WIDTH_C, &viewport_c_width, + PRI_VIEWPORT_HEIGHT_C, &viewport_c_height); + + REG_GET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, + PRIMARY_SURFACE_ADDRESS_HIGH_C, &luma_addr_high_part); + + REG_GET(DCSURF_SURFACE_PITCH_C, + PITCH_C, &chroma_pitch); + + chroma_pitch += 1; + + REG_GET_3(DCSURF_SURFACE_CONFIG, + SURFACE_PIXEL_FORMAT, &pix_format, + ROTATION_ANGLE, &rotation_angle, + H_MIRROR_EN, &h_mirror_en); + + /* apply wa only for NV12 surface with scatter gather enabled with view port > 512 */ + if (address->type != PLN_ADDR_TYPE_VIDEO_PROGRESSIVE || + address->video_progressive.luma_addr.high_part == 0xf4 + || viewport_c_height <= 512) + return; + + switch (rotation_angle) { + case 0: /* 0 degree rotation */ + row_height = 128; + patched_viewport_height = (viewport_c_height / row_height + 1) * row_height + 1; + patched_viewport_width = viewport_c_width; + hubp21->PLAT_54186_wa_chroma_addr_offset = 0; + break; + case 2: /* 180 degree rotation */ + row_height = 128; + patched_viewport_height = viewport_c_height + row_height; + patched_viewport_width = viewport_c_width; + hubp21->PLAT_54186_wa_chroma_addr_offset = 0 - chroma_pitch * row_height * chroma_bpe; + break; + case 1: /* 90 degree rotation */ + row_height = 256; + if (h_mirror_en) { + patched_viewport_height = viewport_c_height; + patched_viewport_width = viewport_c_width + row_height; + hubp21->PLAT_54186_wa_chroma_addr_offset = 0; + } else { + patched_viewport_height = viewport_c_height; + patched_viewport_width = viewport_c_width + row_height; + hubp21->PLAT_54186_wa_chroma_addr_offset = 0 - tile_blk_size; + } + break; + case 3: /* 270 degree rotation */ + row_height = 256; + if (h_mirror_en) { + patched_viewport_height = viewport_c_height; + patched_viewport_width = viewport_c_width + row_height; + hubp21->PLAT_54186_wa_chroma_addr_offset = 0 - tile_blk_size; + } else { + patched_viewport_height = viewport_c_height; + patched_viewport_width = viewport_c_width + row_height; + hubp21->PLAT_54186_wa_chroma_addr_offset = 0; + } + break; + default: + ASSERT(0); + break; + } + + /* catch cases where viewport keep growing */ + ASSERT(patched_viewport_height && patched_viewport_height < 5000); + ASSERT(patched_viewport_width && patched_viewport_width < 5000); + + REG_UPDATE_2(DCSURF_PRI_VIEWPORT_DIMENSION_C, + PRI_VIEWPORT_WIDTH_C, patched_viewport_width, + PRI_VIEWPORT_HEIGHT_C, patched_viewport_height); +} + void hubp21_set_vm_system_aperture_settings(struct hubp *hubp, struct vm_system_aperture_param *apt) { @@ -602,6 +678,187 @@ void hubp21_validate_dml_output(struct hubp *hubp, dml_dlg_attr->refcyc_per_meta_chunk_flip_l, dlg_attr.refcyc_per_meta_chunk_flip_l); } +bool hubp21_program_surface_flip_and_addr( + struct hubp *hubp, + const struct dc_plane_address *address, + bool flip_immediate) +{ + struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); + struct dc_debug_options *debug = &hubp->ctx->dc->debug; + + //program flip type + REG_UPDATE(DCSURF_FLIP_CONTROL, + SURFACE_FLIP_TYPE, flip_immediate); + + // Program VMID reg + REG_UPDATE(VMID_SETTINGS_0, + VMID, address->vmid); + + if (address->type == PLN_ADDR_TYPE_GRPH_STEREO) { + REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x1); + REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x1); + + } else { + // turn off stereo if not in stereo + REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x0); + REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x0); + } + + + + /* HW automatically latch rest of address register on write to + * DCSURF_PRIMARY_SURFACE_ADDRESS if SURFACE_UPDATE_LOCK is not used + * + * program high first and then the low addr, order matters! + */ + switch (address->type) { + case PLN_ADDR_TYPE_GRAPHICS: + /* DCN1.0 does not support const color + * TODO: program DCHUBBUB_RET_PATH_DCC_CFGx_0/1 + * base on address->grph.dcc_const_color + * x = 0, 2, 4, 6 for pipe 0, 1, 2, 3 for rgb and luma + * x = 1, 3, 5, 7 for pipe 0, 1, 2, 3 for chroma + */ + + if (address->grph.addr.quad_part == 0) + break; + + REG_UPDATE_2(DCSURF_SURFACE_CONTROL, + PRIMARY_SURFACE_TMZ, address->tmz_surface, + PRIMARY_META_SURFACE_TMZ, address->tmz_surface); + + if (address->grph.meta_addr.quad_part != 0) { + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, + PRIMARY_META_SURFACE_ADDRESS_HIGH, + address->grph.meta_addr.high_part); + + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, + PRIMARY_META_SURFACE_ADDRESS, + address->grph.meta_addr.low_part); + } + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, + PRIMARY_SURFACE_ADDRESS_HIGH, + address->grph.addr.high_part); + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, + PRIMARY_SURFACE_ADDRESS, + address->grph.addr.low_part); + break; + case PLN_ADDR_TYPE_VIDEO_PROGRESSIVE: + if (address->video_progressive.luma_addr.quad_part == 0 + || address->video_progressive.chroma_addr.quad_part == 0) + break; + + REG_UPDATE_4(DCSURF_SURFACE_CONTROL, + PRIMARY_SURFACE_TMZ, address->tmz_surface, + PRIMARY_SURFACE_TMZ_C, address->tmz_surface, + PRIMARY_META_SURFACE_TMZ, address->tmz_surface, + PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface); + + if (address->video_progressive.luma_meta_addr.quad_part != 0) { + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 0, + PRIMARY_META_SURFACE_ADDRESS_HIGH_C, + address->video_progressive.chroma_meta_addr.high_part); + + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_C, 0, + PRIMARY_META_SURFACE_ADDRESS_C, + address->video_progressive.chroma_meta_addr.low_part); + + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, + PRIMARY_META_SURFACE_ADDRESS_HIGH, + address->video_progressive.luma_meta_addr.high_part); + + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, + PRIMARY_META_SURFACE_ADDRESS, + address->video_progressive.luma_meta_addr.low_part); + } + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, 0, + PRIMARY_SURFACE_ADDRESS_HIGH_C, + address->video_progressive.chroma_addr.high_part); + + if (debug->nv12_iflip_vm_wa) { + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0, + PRIMARY_SURFACE_ADDRESS_C, + address->video_progressive.chroma_addr.low_part + hubp21->PLAT_54186_wa_chroma_addr_offset); + } else { + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0, + PRIMARY_SURFACE_ADDRESS_C, + address->video_progressive.chroma_addr.low_part); + } + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, + PRIMARY_SURFACE_ADDRESS_HIGH, + address->video_progressive.luma_addr.high_part); + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, + PRIMARY_SURFACE_ADDRESS, + address->video_progressive.luma_addr.low_part); + break; + case PLN_ADDR_TYPE_GRPH_STEREO: + if (address->grph_stereo.left_addr.quad_part == 0) + break; + if (address->grph_stereo.right_addr.quad_part == 0) + break; + + REG_UPDATE_8(DCSURF_SURFACE_CONTROL, + PRIMARY_SURFACE_TMZ, address->tmz_surface, + PRIMARY_SURFACE_TMZ_C, address->tmz_surface, + PRIMARY_META_SURFACE_TMZ, address->tmz_surface, + PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface, + SECONDARY_SURFACE_TMZ, address->tmz_surface, + SECONDARY_SURFACE_TMZ_C, address->tmz_surface, + SECONDARY_META_SURFACE_TMZ, address->tmz_surface, + SECONDARY_META_SURFACE_TMZ_C, address->tmz_surface); + + if (address->grph_stereo.right_meta_addr.quad_part != 0) { + + REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH, 0, + SECONDARY_META_SURFACE_ADDRESS_HIGH, + address->grph_stereo.right_meta_addr.high_part); + + REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS, 0, + SECONDARY_META_SURFACE_ADDRESS, + address->grph_stereo.right_meta_addr.low_part); + } + if (address->grph_stereo.left_meta_addr.quad_part != 0) { + + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, + PRIMARY_META_SURFACE_ADDRESS_HIGH, + address->grph_stereo.left_meta_addr.high_part); + + REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, + PRIMARY_META_SURFACE_ADDRESS, + address->grph_stereo.left_meta_addr.low_part); + } + + REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH, 0, + SECONDARY_SURFACE_ADDRESS_HIGH, + address->grph_stereo.right_addr.high_part); + + REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS, 0, + SECONDARY_SURFACE_ADDRESS, + address->grph_stereo.right_addr.low_part); + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, + PRIMARY_SURFACE_ADDRESS_HIGH, + address->grph_stereo.left_addr.high_part); + + REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, + PRIMARY_SURFACE_ADDRESS, + address->grph_stereo.left_addr.low_part); + break; + default: + BREAK_TO_DEBUGGER(); + break; + } + + hubp->request_address = *address; + + return true; +} + void hubp21_init(struct hubp *hubp) { // DEDCN21-133: Inconsistent row starting line for flip between DPTE and Meta @@ -614,7 +871,7 @@ void hubp21_init(struct hubp *hubp) static struct hubp_funcs dcn21_hubp_funcs = { .hubp_enable_tripleBuffer = hubp2_enable_triplebuffer, .hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled, - .hubp_program_surface_flip_and_addr = hubp2_program_surface_flip_and_addr, + .hubp_program_surface_flip_and_addr = hubp21_program_surface_flip_and_addr, .hubp_program_surface_config = hubp1_program_surface_config, .hubp_is_flip_pending = hubp1_is_flip_pending, .hubp_setup = hubp21_setup, @@ -623,6 +880,7 @@ static struct hubp_funcs dcn21_hubp_funcs = { .set_blank = hubp1_set_blank, .dcc_control = hubp1_dcc_control, .mem_program_viewport = hubp21_set_viewport, + .apply_PLAT_54186_wa = hubp21_apply_PLAT_54186_wa, .set_cursor_attributes = hubp2_cursor_set_attributes, .set_cursor_position = hubp1_cursor_set_position, .hubp_clk_cntl = hubp1_clk_cntl, diff --git a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.h b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.h index aeda719a2a13..9873b6cbc5ba 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.h +++ b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.h @@ -108,6 +108,7 @@ struct dcn21_hubp { const struct dcn_hubp2_registers *hubp_regs; const struct dcn_hubp2_shift *hubp_shift; const struct dcn_hubp2_mask *hubp_mask; + int PLAT_54186_wa_chroma_addr_offset; }; bool hubp21_construct( diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h index 85a34dde8526..686145933335 100644 --- a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h +++ b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h @@ -82,9 +82,10 @@ struct hubp_funcs { void (*mem_program_viewport)( struct hubp *hubp, const struct rect *viewport, - const struct rect *viewport_c, - enum dc_rotation_angle rotation); - /* rotation needed for Renoir workaround */ + const struct rect *viewport_c); + + void (*apply_PLAT_54186_wa)(struct hubp *hubp, + const struct dc_plane_address *address); bool (*hubp_program_surface_flip_and_addr)( struct hubp *hubp, -- cgit v1.2.3 From bae9c49bf0703ecb214e84f889e84fe447e4eda9 Mon Sep 17 00:00:00 2001 From: Yongqiang Sun Date: Wed, 18 Dec 2019 15:01:17 -0500 Subject: drm/amd/display: Only program surface flip for video plane via dmcub Only need to do surface flip for video plane via dmcub. Signed-off-by: Yongqiang Sun Reviewed-by: Tony Cheng Acked-by: Harry Wentland Acked-by: Rodrigo Siqueira Signed-off-by: Alex Deucher --- drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c | 130 +++++++++++----------- drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h | 20 ++++ drivers/gpu/drm/amd/display/dmub/inc/dmub_cmd.h | 19 +--- 3 files changed, 94 insertions(+), 75 deletions(-) (limited to 'drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h') diff --git a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c index 12396c371569..da63fc53cc4a 100644 --- a/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c +++ b/drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c @@ -690,86 +690,98 @@ void hubp21_validate_dml_output(struct hubp *hubp, dml_dlg_attr->refcyc_per_meta_chunk_flip_l, dlg_attr.refcyc_per_meta_chunk_flip_l); } -static void program_surface_flip_and_addr(struct hubp *hubp, struct dmub_rb_cmd_flip *surface_flip) +static void program_surface_flip_and_addr(struct hubp *hubp, struct surface_flip_registers *flip_regs) { struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); REG_UPDATE_3(DCSURF_FLIP_CONTROL, - SURFACE_FLIP_TYPE, surface_flip->flip.flip_params.immediate, - SURFACE_FLIP_MODE_FOR_STEREOSYNC, surface_flip->flip.flip_params.grph_stereo, - SURFACE_FLIP_IN_STEREOSYNC, surface_flip->flip.flip_params.grph_stereo); + SURFACE_FLIP_TYPE, flip_regs->immediate, + SURFACE_FLIP_MODE_FOR_STEREOSYNC, flip_regs->grph_stereo, + SURFACE_FLIP_IN_STEREOSYNC, flip_regs->grph_stereo); REG_UPDATE(VMID_SETTINGS_0, - VMID, surface_flip->flip.flip_params.vmid); + VMID, flip_regs->vmid); REG_UPDATE_8(DCSURF_SURFACE_CONTROL, - PRIMARY_SURFACE_TMZ, surface_flip->flip.flip_params.tmz_surface, - PRIMARY_SURFACE_TMZ_C, surface_flip->flip.flip_params.tmz_surface, - PRIMARY_META_SURFACE_TMZ, surface_flip->flip.flip_params.tmz_surface, - PRIMARY_META_SURFACE_TMZ_C, surface_flip->flip.flip_params.tmz_surface, - SECONDARY_SURFACE_TMZ, surface_flip->flip.flip_params.tmz_surface, - SECONDARY_SURFACE_TMZ_C, surface_flip->flip.flip_params.tmz_surface, - SECONDARY_META_SURFACE_TMZ, surface_flip->flip.flip_params.tmz_surface, - SECONDARY_META_SURFACE_TMZ_C, surface_flip->flip.flip_params.tmz_surface); + PRIMARY_SURFACE_TMZ, flip_regs->tmz_surface, + PRIMARY_SURFACE_TMZ_C, flip_regs->tmz_surface, + PRIMARY_META_SURFACE_TMZ, flip_regs->tmz_surface, + PRIMARY_META_SURFACE_TMZ_C, flip_regs->tmz_surface, + SECONDARY_SURFACE_TMZ, flip_regs->tmz_surface, + SECONDARY_SURFACE_TMZ_C, flip_regs->tmz_surface, + SECONDARY_META_SURFACE_TMZ, flip_regs->tmz_surface, + SECONDARY_META_SURFACE_TMZ_C, flip_regs->tmz_surface); REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 0, PRIMARY_META_SURFACE_ADDRESS_HIGH_C, - surface_flip->flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C); + flip_regs->DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C); REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_C, 0, PRIMARY_META_SURFACE_ADDRESS_C, - surface_flip->flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_C); + flip_regs->DCSURF_PRIMARY_META_SURFACE_ADDRESS_C); REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, PRIMARY_META_SURFACE_ADDRESS_HIGH, - surface_flip->flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH); + flip_regs->DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH); REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, PRIMARY_META_SURFACE_ADDRESS, - surface_flip->flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS); + flip_regs->DCSURF_PRIMARY_META_SURFACE_ADDRESS); REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH, 0, SECONDARY_META_SURFACE_ADDRESS_HIGH, - surface_flip->flip.DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH); + flip_regs->DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH); REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS, 0, SECONDARY_META_SURFACE_ADDRESS, - surface_flip->flip.DCSURF_SECONDARY_META_SURFACE_ADDRESS); + flip_regs->DCSURF_SECONDARY_META_SURFACE_ADDRESS); REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH, 0, SECONDARY_SURFACE_ADDRESS_HIGH, - surface_flip->flip.DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH); + flip_regs->DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH); REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS, 0, SECONDARY_SURFACE_ADDRESS, - surface_flip->flip.DCSURF_SECONDARY_SURFACE_ADDRESS); + flip_regs->DCSURF_SECONDARY_SURFACE_ADDRESS); REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, 0, PRIMARY_SURFACE_ADDRESS_HIGH_C, - surface_flip->flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C); + flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C); REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0, PRIMARY_SURFACE_ADDRESS_C, - surface_flip->flip.DCSURF_PRIMARY_SURFACE_ADDRESS_C); + flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS_C); REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, PRIMARY_SURFACE_ADDRESS_HIGH, - surface_flip->flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH); + flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH); REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, PRIMARY_SURFACE_ADDRESS, - surface_flip->flip.DCSURF_PRIMARY_SURFACE_ADDRESS); + flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS); } -void program_surface_flip_and_addr_dmcub(struct hubp *hubp, struct dmub_rb_cmd_flip *surface_flip) +void dmcub_PLAT_54186_wa(struct hubp *hubp, struct surface_flip_registers *flip_regs) { struct dc_dmub_srv *dmcub = hubp->ctx->dmub_srv; struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); + struct dmub_rb_cmd_PLAT_54186_wa PLAT_54186_wa = { 0 }; + + PLAT_54186_wa.header.type = DMUB_CMD__PLAT_54186_WA; + PLAT_54186_wa.flip.DCSURF_PRIMARY_SURFACE_ADDRESS = flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS; + PLAT_54186_wa.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_C = flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS_C; + PLAT_54186_wa.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH; + PLAT_54186_wa.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C = flip_regs->DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C; + PLAT_54186_wa.flip.flip_params.grph_stereo = flip_regs->grph_stereo; + PLAT_54186_wa.flip.flip_params.hubp_inst = hubp->inst; + PLAT_54186_wa.flip.flip_params.immediate = flip_regs->immediate; + PLAT_54186_wa.flip.flip_params.tmz_surface = flip_regs->tmz_surface; + PLAT_54186_wa.flip.flip_params.vmid = flip_regs->vmid; PERF_TRACE(); // TODO: remove after performance is stable. - dc_dmub_srv_cmd_queue(dmcub, &surface_flip->header); + dc_dmub_srv_cmd_queue(dmcub, &PLAT_54186_wa.header); PERF_TRACE(); // TODO: remove after performance is stable. dc_dmub_srv_cmd_execute(dmcub); PERF_TRACE(); // TODO: remove after performance is stable. @@ -782,15 +794,11 @@ bool hubp21_program_surface_flip_and_addr( const struct dc_plane_address *address, bool flip_immediate) { - struct dmub_rb_cmd_flip surface_flip = { 0 }; - bool grph_stereo = false; struct dc_debug_options *debug = &hubp->ctx->dc->debug; struct dcn21_hubp *hubp21 = TO_DCN21_HUBP(hubp); + struct surface_flip_registers flip_regs = { 0 }; - surface_flip.header.type = DMUB_CMD__SURFACE_FLIP; - - surface_flip.flip.flip_params.vmid = address->vmid; - surface_flip.flip.flip_params.hubp_inst = hubp->inst; + flip_regs.vmid = address->vmid; switch (address->type) { case PLN_ADDR_TYPE_GRAPHICS: @@ -800,15 +808,15 @@ bool hubp21_program_surface_flip_and_addr( } if (address->grph.meta_addr.quad_part != 0) { - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS = address->grph.meta_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH = address->grph.meta_addr.high_part; } - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS = address->grph.addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = address->grph.addr.high_part; break; case PLN_ADDR_TYPE_VIDEO_PROGRESSIVE: @@ -817,30 +825,30 @@ bool hubp21_program_surface_flip_and_addr( break; if (address->video_progressive.luma_meta_addr.quad_part != 0) { - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS = address->video_progressive.luma_meta_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH = address->video_progressive.luma_meta_addr.high_part; - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_C = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS_C = address->video_progressive.chroma_meta_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C = address->video_progressive.chroma_meta_addr.high_part; } - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS = address->video_progressive.luma_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = address->video_progressive.luma_addr.high_part; if (debug->nv12_iflip_vm_wa) { - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_C = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS_C = address->video_progressive.chroma_addr.low_part + hubp21->PLAT_54186_wa_chroma_addr_offset; } else - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_C = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS_C = address->video_progressive.chroma_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C = address->video_progressive.chroma_addr.high_part; break; @@ -850,30 +858,30 @@ bool hubp21_program_surface_flip_and_addr( if (address->grph_stereo.right_addr.quad_part == 0) break; - grph_stereo = true; + flip_regs.grph_stereo = true; if (address->grph_stereo.right_meta_addr.quad_part != 0) { - surface_flip.flip.DCSURF_SECONDARY_META_SURFACE_ADDRESS = + flip_regs.DCSURF_SECONDARY_META_SURFACE_ADDRESS = address->grph_stereo.right_meta_addr.low_part; - surface_flip.flip.DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH = address->grph_stereo.right_meta_addr.high_part; } if (address->grph_stereo.left_meta_addr.quad_part != 0) { - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS = address->grph_stereo.left_meta_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH = address->grph_stereo.left_meta_addr.high_part; } - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS = address->grph_stereo.left_addr.low_part; - surface_flip.flip.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH = address->grph_stereo.left_addr.high_part; - surface_flip.flip.DCSURF_SECONDARY_SURFACE_ADDRESS = + flip_regs.DCSURF_SECONDARY_SURFACE_ADDRESS = address->grph_stereo.right_addr.low_part; - surface_flip.flip.DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH = + flip_regs.DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH = address->grph_stereo.right_addr.high_part; break; @@ -882,15 +890,13 @@ bool hubp21_program_surface_flip_and_addr( break; } - surface_flip.flip.flip_params.vmid = address->vmid; - surface_flip.flip.flip_params.grph_stereo = grph_stereo; - surface_flip.flip.flip_params.tmz_surface = address->tmz_surface; - surface_flip.flip.flip_params.immediate = flip_immediate; + flip_regs.tmz_surface = address->tmz_surface; + flip_regs.immediate = flip_immediate; - if (hubp->ctx->dc->debug.enable_dmcub_surface_flip) - program_surface_flip_and_addr_dmcub(hubp, &surface_flip); + if (hubp->ctx->dc->debug.enable_dmcub_surface_flip && address->type == PLN_ADDR_TYPE_VIDEO_PROGRESSIVE) + dmcub_PLAT_54186_wa(hubp, &flip_regs); else - program_surface_flip_and_addr(hubp, &surface_flip); + program_surface_flip_and_addr(hubp, &flip_regs); hubp->request_address = *address; diff --git a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h index 686145933335..2cb8466e657b 100644 --- a/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h +++ b/drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h @@ -63,6 +63,26 @@ struct hubp { bool power_gated; }; +struct surface_flip_registers { + uint32_t DCSURF_SURFACE_CONTROL; + uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH; + uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS; + uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH; + uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS; + uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C; + uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS_C; + uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C; + uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS_C; + uint32_t DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH; + uint32_t DCSURF_SECONDARY_META_SURFACE_ADDRESS; + uint32_t DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH; + uint32_t DCSURF_SECONDARY_SURFACE_ADDRESS; + bool tmz_surface; + bool immediate; + uint8_t vmid; + bool grph_stereo; +}; + struct hubp_funcs { void (*hubp_setup)( struct hubp *hubp, diff --git a/drivers/gpu/drm/amd/display/dmub/inc/dmub_cmd.h b/drivers/gpu/drm/amd/display/dmub/inc/dmub_cmd.h index 0c3ae02148a6..cd9532b4f14d 100644 --- a/drivers/gpu/drm/amd/display/dmub/inc/dmub_cmd.h +++ b/drivers/gpu/drm/amd/display/dmub/inc/dmub_cmd.h @@ -48,7 +48,7 @@ enum dmub_cmd_type { DMUB_CMD__REG_SEQ_FIELD_UPDATE_SEQ = 2, DMUB_CMD__REG_SEQ_BURST_WRITE = 3, DMUB_CMD__REG_REG_WAIT = 4, - DMUB_CMD__SURFACE_FLIP = 5, + DMUB_CMD__PLAT_54186_WA = 5, DMUB_CMD__PSR = 64, DMUB_CMD__VBIOS = 128, }; @@ -151,20 +151,12 @@ struct dmub_rb_cmd_reg_wait { #define PHYSICAL_ADDRESS_LOC union large_integer #endif -struct dmub_cmd_surface_flip { +struct dmub_cmd_PLAT_54186_wa { uint32_t DCSURF_SURFACE_CONTROL; - uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH; - uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS; uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH; uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS; - uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C; - uint32_t DCSURF_PRIMARY_META_SURFACE_ADDRESS_C; uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C; uint32_t DCSURF_PRIMARY_SURFACE_ADDRESS_C; - uint32_t DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH; - uint32_t DCSURF_SECONDARY_META_SURFACE_ADDRESS; - uint32_t DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH; - uint32_t DCSURF_SECONDARY_SURFACE_ADDRESS; struct { uint8_t hubp_inst : 4; uint8_t tmz_surface : 1; @@ -173,11 +165,12 @@ struct dmub_cmd_surface_flip { uint8_t grph_stereo : 1; uint32_t reserved : 21; } flip_params; + uint32_t reserved[9]; }; -struct dmub_rb_cmd_flip { +struct dmub_rb_cmd_PLAT_54186_wa { struct dmub_cmd_header header; - struct dmub_cmd_surface_flip flip; + struct dmub_cmd_PLAT_54186_wa flip; }; struct dmub_cmd_digx_encoder_control_data { @@ -287,7 +280,7 @@ union dmub_rb_cmd { struct dmub_rb_cmd_psr_enable psr_enable; struct dmub_rb_cmd_psr_copy_settings psr_copy_settings; struct dmub_rb_cmd_psr_set_level psr_set_level; - struct dmub_rb_cmd_flip surface_flip; + struct dmub_rb_cmd_PLAT_54186_wa PLAT_54186_wa; struct dmub_rb_cmd_psr_setup psr_setup; }; -- cgit v1.2.3