.underflow_assert_delay_us = 0xFFFFFFFF,
.dwb_fi_phase = -1, // -1 = disable,
.dmub_command_table = true,
- .pstate_enabled = false,
+ .pstate_enabled = true,
.use_max_lb = true,
.enable_mem_low_power = {
.bits = {
/* Use pipe context based otg sync logic */
dc->config.use_pipe_ctx_sync_logic = true;
- dc->config.use_default_clock_table = true;
+ dc->config.use_default_clock_table = false;
/* read VBIOS LTTPR caps */
{
if (ctx->dc_bios->funcs->get_lttpr_caps) {
//TODO
}
-void dcn35_patch_dpm_table(struct clk_bw_params *bw_params)
-{
- int i;
- unsigned int max_dcfclk_mhz = 0, max_dispclk_mhz = 0, max_dppclk_mhz = 0,
- max_phyclk_mhz = 0, max_dtbclk_mhz = 0, max_fclk_mhz = 0, max_uclk_mhz = 0;
-
- for (i = 0; i < MAX_NUM_DPM_LVL; i++) {
- if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz)
- max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
- if (bw_params->clk_table.entries[i].fclk_mhz > max_fclk_mhz)
- max_fclk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
- if (bw_params->clk_table.entries[i].memclk_mhz > max_uclk_mhz)
- max_uclk_mhz = bw_params->clk_table.entries[i].memclk_mhz;
- if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz)
- max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
- if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz)
- max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
- if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz)
- max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
- if (bw_params->clk_table.entries[i].dtbclk_mhz > max_dtbclk_mhz)
- max_dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
- }
-}
+
/*
* dcn35_update_bw_bounding_box
*
void dcn35_update_bw_bounding_box_fpu(struct dc *dc,
struct clk_bw_params *bw_params);
-void dcn35_patch_dpm_table(struct clk_bw_params *bw_params);
-
int dcn35_populate_dml_pipes_from_context_fpu(struct dc *dc,
struct dc_state *context,
display_e2e_pipe_params_st *pipes,