Commit a0867053 authored by Eric Yang's avatar Eric Yang Committed by Alex Deucher

drm/amd/display: remove deprecated pplib interface

[Why]
The new interface now replaces the old interface for all known
configurations.
Signed-off-by: default avatarEric Yang <Eric.Yang2@amd.com>
Reviewed-by: default avatarTony Cheng <Tony.Cheng@amd.com>
Acked-by: default avatarLeo Li <sunpeng.li@amd.com>
Signed-off-by: default avatarAlex Deucher <alexander.deucher@amd.com>
parent 27eaa492
...@@ -151,9 +151,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -151,9 +151,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
struct dc *dc = clk_mgr->ctx->dc; struct dc *dc = clk_mgr->ctx->dc;
struct dc_debug_options *debug = &dc->debug; struct dc_debug_options *debug = &dc->debug;
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk; struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
struct pp_smu_display_requirement_rv *smu_req_cur =
&dc->res_pool->pp_smu_req;
struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
struct pp_smu_funcs_rv *pp_smu = NULL; struct pp_smu_funcs_rv *pp_smu = NULL;
bool send_request_to_increase = false; bool send_request_to_increase = false;
bool send_request_to_lower = false; bool send_request_to_lower = false;
...@@ -175,8 +172,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -175,8 +172,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
*/ */
if (pp_smu && pp_smu->set_display_count) if (pp_smu && pp_smu->set_display_count)
pp_smu->set_display_count(&pp_smu->pp_smu, display_count); pp_smu->set_display_count(&pp_smu->pp_smu, display_count);
smu_req.display_count = display_count;
} }
if (new_clocks->dispclk_khz > clk_mgr->clks.dispclk_khz if (new_clocks->dispclk_khz > clk_mgr->clks.dispclk_khz
...@@ -187,7 +182,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -187,7 +182,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, clk_mgr->clks.phyclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, clk_mgr->clks.phyclk_khz)) {
clk_mgr->clks.phyclk_khz = new_clocks->phyclk_khz; clk_mgr->clks.phyclk_khz = new_clocks->phyclk_khz;
send_request_to_lower = true; send_request_to_lower = true;
} }
...@@ -197,24 +191,18 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -197,24 +191,18 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
if (should_set_clock(safe_to_lower, new_clocks->fclk_khz, clk_mgr->clks.fclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->fclk_khz, clk_mgr->clks.fclk_khz)) {
clk_mgr->clks.fclk_khz = new_clocks->fclk_khz; clk_mgr->clks.fclk_khz = new_clocks->fclk_khz;
smu_req.hard_min_fclk_mhz = new_clocks->fclk_khz / 1000;
send_request_to_lower = true; send_request_to_lower = true;
} }
//DCF Clock //DCF Clock
if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr->clks.dcfclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr->clks.dcfclk_khz)) {
clk_mgr->clks.dcfclk_khz = new_clocks->dcfclk_khz; clk_mgr->clks.dcfclk_khz = new_clocks->dcfclk_khz;
smu_req.hard_min_dcefclk_mhz = new_clocks->dcfclk_khz / 1000;
send_request_to_lower = true; send_request_to_lower = true;
} }
if (should_set_clock(safe_to_lower, if (should_set_clock(safe_to_lower,
new_clocks->dcfclk_deep_sleep_khz, clk_mgr->clks.dcfclk_deep_sleep_khz)) { new_clocks->dcfclk_deep_sleep_khz, clk_mgr->clks.dcfclk_deep_sleep_khz)) {
clk_mgr->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz; clk_mgr->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz;
smu_req.min_deep_sleep_dcefclk_mhz = (new_clocks->dcfclk_deep_sleep_khz + 999) / 1000;
send_request_to_lower = true; send_request_to_lower = true;
} }
...@@ -227,9 +215,9 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -227,9 +215,9 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
pp_smu->set_hard_min_dcfclk_by_freq && pp_smu->set_hard_min_dcfclk_by_freq &&
pp_smu->set_min_deep_sleep_dcfclk) { pp_smu->set_min_deep_sleep_dcfclk) {
pp_smu->set_hard_min_fclk_by_freq(&pp_smu->pp_smu, smu_req.hard_min_fclk_mhz); pp_smu->set_hard_min_fclk_by_freq(&pp_smu->pp_smu, new_clocks->fclk_khz / 1000);
pp_smu->set_hard_min_dcfclk_by_freq(&pp_smu->pp_smu, smu_req.hard_min_dcefclk_mhz); pp_smu->set_hard_min_dcfclk_by_freq(&pp_smu->pp_smu, new_clocks->dcfclk_khz / 1000);
pp_smu->set_min_deep_sleep_dcfclk(&pp_smu->pp_smu, smu_req.min_deep_sleep_dcefclk_mhz); pp_smu->set_min_deep_sleep_dcfclk(&pp_smu->pp_smu, (new_clocks->dcfclk_deep_sleep_khz + 999) / 1000);
} }
} }
...@@ -239,7 +227,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -239,7 +227,6 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
|| new_clocks->dispclk_khz == clk_mgr->clks.dispclk_khz) { || new_clocks->dispclk_khz == clk_mgr->clks.dispclk_khz) {
dcn1_ramp_up_dispclk_with_dpp(clk_mgr, new_clocks); dcn1_ramp_up_dispclk_with_dpp(clk_mgr, new_clocks);
clk_mgr->clks.dispclk_khz = new_clocks->dispclk_khz; clk_mgr->clks.dispclk_khz = new_clocks->dispclk_khz;
send_request_to_lower = true; send_request_to_lower = true;
} }
...@@ -249,13 +236,11 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr, ...@@ -249,13 +236,11 @@ static void dcn1_update_clocks(struct clk_mgr *clk_mgr,
pp_smu->set_hard_min_dcfclk_by_freq && pp_smu->set_hard_min_dcfclk_by_freq &&
pp_smu->set_min_deep_sleep_dcfclk) { pp_smu->set_min_deep_sleep_dcfclk) {
pp_smu->set_hard_min_fclk_by_freq(&pp_smu->pp_smu, smu_req.hard_min_fclk_mhz); pp_smu->set_hard_min_fclk_by_freq(&pp_smu->pp_smu, new_clocks->fclk_khz / 1000);
pp_smu->set_hard_min_dcfclk_by_freq(&pp_smu->pp_smu, smu_req.hard_min_dcefclk_mhz); pp_smu->set_hard_min_dcfclk_by_freq(&pp_smu->pp_smu, new_clocks->dcfclk_khz / 1000);
pp_smu->set_min_deep_sleep_dcfclk(&pp_smu->pp_smu, smu_req.min_deep_sleep_dcefclk_mhz); pp_smu->set_min_deep_sleep_dcfclk(&pp_smu->pp_smu, (new_clocks->dcfclk_deep_sleep_khz + 999) / 1000);
} }
} }
*smu_req_cur = smu_req;
} }
static const struct clk_mgr_funcs dcn1_funcs = { static const struct clk_mgr_funcs dcn1_funcs = {
.get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz, .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
......
...@@ -74,29 +74,6 @@ struct pp_smu_wm_range_sets { ...@@ -74,29 +74,6 @@ struct pp_smu_wm_range_sets {
struct pp_smu_wm_set_range writer_wm_sets[MAX_WATERMARK_SETS]; struct pp_smu_wm_set_range writer_wm_sets[MAX_WATERMARK_SETS];
}; };
struct pp_smu_display_requirement_rv {
/* PPSMC_MSG_SetDisplayCount: count
* 0 triggers S0i2 optimization
*/
unsigned int display_count;
/* PPSMC_MSG_SetHardMinFclkByFreq: mhz
* FCLK will vary with DPM, but never below requested hard min
*/
unsigned int hard_min_fclk_mhz;
/* PPSMC_MSG_SetHardMinDcefclkByFreq: mhz
* fixed clock at requested freq, either from FCH bypass or DFS
*/
unsigned int hard_min_dcefclk_mhz;
/* PPSMC_MSG_SetMinDeepSleepDcefclk: mhz
* when DF is in cstate, dcf clock is further divided down
* to just above given frequency
*/
unsigned int min_deep_sleep_dcefclk_mhz;
};
struct pp_smu_funcs_rv { struct pp_smu_funcs_rv {
struct pp_smu pp_smu; struct pp_smu pp_smu;
......
...@@ -145,7 +145,6 @@ struct resource_pool { ...@@ -145,7 +145,6 @@ struct resource_pool {
struct hubbub *hubbub; struct hubbub *hubbub;
struct mpc *mpc; struct mpc *mpc;
struct pp_smu_funcs *pp_smu; struct pp_smu_funcs *pp_smu;
struct pp_smu_display_requirement_rv pp_smu_req;
struct dce_aux *engines[MAX_PIPES]; struct dce_aux *engines[MAX_PIPES];
struct dce_i2c_hw *hw_i2cs[MAX_PIPES]; struct dce_i2c_hw *hw_i2cs[MAX_PIPES];
struct dce_i2c_sw *sw_i2cs[MAX_PIPES]; struct dce_i2c_sw *sw_i2cs[MAX_PIPES];
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment