Commit f7f38ffe authored by Jun Lei's avatar Jun Lei Committed by Alex Deucher

drm/amd/display: fixup DPP programming sequence

[why]
DC does not correct account for the fact that DPP DTO is double buffered while DPP ref is not.
This means that when DPP ref clock is lowered when it's "safe to lower", the DPP blocks that need
an increased divider will temporarily have actual DPP clock drop below minimum while DTO
double buffering takes effect.  This results in temporary underflow.

[how]
To fix this, DPP clock cannot be programmed atomically, but rather be broken up into the DTO and the
ref.  Each has a separate "safe to lower" logic.  When doing "prepare" the ref and dividers may only increase.
When doing "optimize", both may decrease.  It is guaranteed that we won't exceed max DPP clock because
we do not use dividers larger than 1.
Signed-off-by: default avatarJun Lei <Jun.Lei@amd.com>
Reviewed-by: default avatarEric Yang <eric.yang2@amd.com>
Acked-by: default avatarLeo Li <sunpeng.li@amd.com>
Signed-off-by: default avatarAlex Deucher <alexander.deucher@amd.com>
parent 9adc8050
...@@ -104,7 +104,6 @@ void dcn20_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr, ...@@ -104,7 +104,6 @@ void dcn20_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr,
{ {
int i; int i;
clk_mgr->dccg->ref_dppclk = clk_mgr->base.clks.dppclk_khz;
for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) { for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
int dpp_inst, dppclk_khz; int dpp_inst, dppclk_khz;
...@@ -114,28 +113,75 @@ void dcn20_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr, ...@@ -114,28 +113,75 @@ void dcn20_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr,
dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst; dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz; dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
clk_mgr->dccg->funcs->update_dpp_dto( clk_mgr->dccg->funcs->update_dpp_dto(
clk_mgr->dccg, dpp_inst, dppclk_khz); clk_mgr->dccg, dpp_inst, dppclk_khz, false);
} }
} }
void dcn20_update_clocks_update_dentist(struct clk_mgr_internal *clk_mgr) static void update_global_dpp_clk(struct clk_mgr_internal *clk_mgr, unsigned int khz)
{ {
int dpp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR int dpp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr->dentist_vco_freq_khz / clk_mgr->base.clks.dppclk_khz; * clk_mgr->dentist_vco_freq_khz / khz;
int disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr->dentist_vco_freq_khz / clk_mgr->base.clks.dispclk_khz;
uint32_t dppclk_wdivider = dentist_get_did_from_divider(dpp_divider); uint32_t dppclk_wdivider = dentist_get_did_from_divider(dpp_divider);
uint32_t dispclk_wdivider = dentist_get_did_from_divider(disp_divider);
REG_UPDATE(DENTIST_DISPCLK_CNTL,
DENTIST_DISPCLK_WDIVIDER, dispclk_wdivider);
// REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_DONE, 1, 5, 100);
REG_UPDATE(DENTIST_DISPCLK_CNTL, REG_UPDATE(DENTIST_DISPCLK_CNTL,
DENTIST_DPPCLK_WDIVIDER, dppclk_wdivider); DENTIST_DPPCLK_WDIVIDER, dppclk_wdivider);
REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DPPCLK_CHG_DONE, 1, 5, 100); REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DPPCLK_CHG_DONE, 1, 5, 100);
} }
static void update_display_clk(struct clk_mgr_internal *clk_mgr, unsigned int khz)
{
int disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR
* clk_mgr->dentist_vco_freq_khz / khz;
uint32_t dispclk_wdivider = dentist_get_did_from_divider(disp_divider);
REG_UPDATE(DENTIST_DISPCLK_CNTL,
DENTIST_DISPCLK_WDIVIDER, dispclk_wdivider);
}
static void request_voltage_and_program_disp_clk(struct clk_mgr *clk_mgr_base, unsigned int khz)
{
struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
struct dc *dc = clk_mgr_base->ctx->dc;
struct pp_smu_funcs_nv *pp_smu = NULL;
bool going_up = clk_mgr->base.clks.dispclk_khz < khz;
if (dc->res_pool->pp_smu)
pp_smu = &dc->res_pool->pp_smu->nv_funcs;
clk_mgr->base.clks.dispclk_khz = khz;
if (going_up && pp_smu && pp_smu->set_voltage_by_freq)
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_DISPCLK, clk_mgr_base->clks.dispclk_khz / 1000);
update_display_clk(clk_mgr, khz);
if (!going_up && pp_smu && pp_smu->set_voltage_by_freq)
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_DISPCLK, clk_mgr_base->clks.dispclk_khz / 1000);
}
static void request_voltage_and_program_global_dpp_clk(struct clk_mgr *clk_mgr_base, unsigned int khz)
{
struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
struct dc *dc = clk_mgr_base->ctx->dc;
struct pp_smu_funcs_nv *pp_smu = NULL;
bool going_up = clk_mgr->base.clks.dppclk_khz < khz;
if (dc->res_pool->pp_smu)
pp_smu = &dc->res_pool->pp_smu->nv_funcs;
clk_mgr->base.clks.dppclk_khz = khz;
clk_mgr->dccg->ref_dppclk = khz;
if (going_up && pp_smu && pp_smu->set_voltage_by_freq)
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PIXELCLK, clk_mgr_base->clks.dppclk_khz / 1000);
update_global_dpp_clk(clk_mgr, khz);
if (!going_up && pp_smu && pp_smu->set_voltage_by_freq)
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PIXELCLK, clk_mgr_base->clks.dppclk_khz / 1000);
}
void dcn2_update_clocks(struct clk_mgr *clk_mgr_base, void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
struct dc_state *context, struct dc_state *context,
...@@ -146,12 +192,11 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -146,12 +192,11 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
struct dc *dc = clk_mgr_base->ctx->dc; struct dc *dc = clk_mgr_base->ctx->dc;
struct pp_smu_funcs_nv *pp_smu = NULL; struct pp_smu_funcs_nv *pp_smu = NULL;
int display_count; int display_count;
bool update_dppclk = false;
bool update_dispclk = false; bool update_dispclk = false;
bool enter_display_off = false; bool enter_display_off = false;
bool dpp_clock_lowered = false;
struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu; struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu;
bool force_reset = false; bool force_reset = false;
int i;
if (clk_mgr_base->clks.dispclk_khz == 0 || if (clk_mgr_base->clks.dispclk_khz == 0 ||
dc->debug.force_clock_mode & 0x1) { dc->debug.force_clock_mode & 0x1) {
...@@ -177,6 +222,7 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -177,6 +222,7 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PHYCLK, clk_mgr_base->clks.phyclk_khz / 1000); pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PHYCLK, clk_mgr_base->clks.phyclk_khz / 1000);
} }
if (dc->debug.force_min_dcfclk_mhz > 0) if (dc->debug.force_min_dcfclk_mhz > 0)
new_clocks->dcfclk_khz = (new_clocks->dcfclk_khz > (dc->debug.force_min_dcfclk_mhz * 1000)) ? new_clocks->dcfclk_khz = (new_clocks->dcfclk_khz > (dc->debug.force_min_dcfclk_mhz * 1000)) ?
new_clocks->dcfclk_khz : (dc->debug.force_min_dcfclk_mhz * 1000); new_clocks->dcfclk_khz : (dc->debug.force_min_dcfclk_mhz * 1000);
...@@ -202,10 +248,12 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -202,10 +248,12 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
if (should_update_pstate_support(safe_to_lower, new_clocks->p_state_change_support, clk_mgr_base->clks.p_state_change_support)) { if (should_update_pstate_support(safe_to_lower, new_clocks->p_state_change_support, clk_mgr_base->clks.p_state_change_support)) {
clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support; clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support;
clk_mgr_base->clks.p_state_change_support = new_clocks->p_state_change_support; clk_mgr_base->clks.p_state_change_support = new_clocks->p_state_change_support;
if (pp_smu && pp_smu->set_pstate_handshake_support) if (pp_smu && pp_smu->set_pstate_handshake_support)
pp_smu->set_pstate_handshake_support(&pp_smu->pp_smu, clk_mgr_base->clks.p_state_change_support); pp_smu->set_pstate_handshake_support(&pp_smu->pp_smu, clk_mgr_base->clks.p_state_change_support);
} }
clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support;
if (should_set_clock(safe_to_lower, new_clocks->dramclk_khz, clk_mgr_base->clks.dramclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dramclk_khz, clk_mgr_base->clks.dramclk_khz)) {
clk_mgr_base->clks.dramclk_khz = new_clocks->dramclk_khz; clk_mgr_base->clks.dramclk_khz = new_clocks->dramclk_khz;
...@@ -213,35 +261,48 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -213,35 +261,48 @@ void dcn2_update_clocks(struct clk_mgr *clk_mgr_base,
pp_smu->set_hard_min_uclk_by_freq(&pp_smu->pp_smu, clk_mgr_base->clks.dramclk_khz / 1000); pp_smu->set_hard_min_uclk_by_freq(&pp_smu->pp_smu, clk_mgr_base->clks.dramclk_khz / 1000);
} }
if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr->base.clks.dppclk_khz)) { if (dc->config.forced_clocks == false) {
if (clk_mgr->base.clks.dppclk_khz > new_clocks->dppclk_khz) // First update display clock
dpp_clock_lowered = true; if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz))
clk_mgr->base.clks.dppclk_khz = new_clocks->dppclk_khz; request_voltage_and_program_disp_clk(clk_mgr_base, new_clocks->dispclk_khz);
if (pp_smu && pp_smu->set_voltage_by_freq) // Updating DPP clock requires some more logic
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_PIXELCLK, clk_mgr_base->clks.dppclk_khz / 1000); if (!safe_to_lower) {
// For pre-programming, we need to make sure any DPP clock that will go up has to go up
update_dppclk = true; // First raise the global reference if needed
} if (new_clocks->dppclk_khz > clk_mgr_base->clks.dppclk_khz)
request_voltage_and_program_global_dpp_clk(clk_mgr_base, new_clocks->dppclk_khz);
if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) { // Then raise any dividers that need raising
clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz; for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
if (pp_smu && pp_smu->set_voltage_by_freq) int dpp_inst, dppclk_khz;
pp_smu->set_voltage_by_freq(&pp_smu->pp_smu, PP_SMU_NV_DISPCLK, clk_mgr_base->clks.dispclk_khz / 1000);
update_dispclk = true; if (!context->res_ctx.pipe_ctx[i].plane_state)
} continue;
if (dc->config.forced_clocks == false || (force_reset && safe_to_lower)) {
if (dpp_clock_lowered) { dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
// if clock is being lowered, increase DTO before lowering refclk dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
dcn20_update_clocks_update_dpp_dto(clk_mgr, context);
dcn20_update_clocks_update_dentist(clk_mgr); clk_mgr->dccg->funcs->update_dpp_dto(clk_mgr->dccg, dpp_inst, dppclk_khz, true);
}
} else { } else {
// if clock is being raised, increase refclk before lowering DTO // For post-programming, we can lower ref clk if needed, and unconditionally set all the DTOs
if (update_dppclk || update_dispclk)
dcn20_update_clocks_update_dentist(clk_mgr); if (new_clocks->dppclk_khz < clk_mgr_base->clks.dppclk_khz)
if (update_dppclk) request_voltage_and_program_global_dpp_clk(clk_mgr_base, new_clocks->dppclk_khz);
dcn20_update_clocks_update_dpp_dto(clk_mgr, context);
for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
int dpp_inst, dppclk_khz;
if (!context->res_ctx.pipe_ctx[i].plane_state)
continue;
dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
clk_mgr->dccg->funcs->update_dpp_dto(clk_mgr->dccg, dpp_inst, dppclk_khz, false);
}
} }
} }
if (update_dispclk && if (update_dispclk &&
......
...@@ -1603,6 +1603,9 @@ enum surface_update_type dc_check_update_surfaces_for_stream( ...@@ -1603,6 +1603,9 @@ enum surface_update_type dc_check_update_surfaces_for_stream(
for (i = 0; i < surface_count; i++) for (i = 0; i < surface_count; i++)
updates[i].surface->update_flags.raw = 0xFFFFFFFF; updates[i].surface->update_flags.raw = 0xFFFFFFFF;
if (type == UPDATE_TYPE_FAST && memcmp(&dc->current_state->bw_ctx.bw.dcn.clk, &dc->clk_mgr->clks, offsetof(struct dc_clocks, prev_p_state_change_support)) != 0)
dc->optimized_required = true;
return type; return type;
} }
......
...@@ -2155,7 +2155,8 @@ void update_dchubp_dpp( ...@@ -2155,7 +2155,8 @@ void update_dchubp_dpp(
dc->res_pool->dccg->funcs->update_dpp_dto( dc->res_pool->dccg->funcs->update_dpp_dto(
dc->res_pool->dccg, dc->res_pool->dccg,
dpp->inst, dpp->inst,
pipe_ctx->plane_res.bw.dppclk_khz); pipe_ctx->plane_res.bw.dppclk_khz,
false);
else else
dc->clk_mgr->clks.dppclk_khz = should_divided_by_2 ? dc->clk_mgr->clks.dppclk_khz = should_divided_by_2 ?
dc->clk_mgr->clks.dispclk_khz / 2 : dc->clk_mgr->clks.dispclk_khz / 2 :
......
...@@ -44,12 +44,16 @@ ...@@ -44,12 +44,16 @@
#define DC_LOGGER \ #define DC_LOGGER \
dccg->ctx->logger dccg->ctx->logger
void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk) void dccg2_update_dpp_dto(struct dccg *dccg,
int dpp_inst,
int req_dppclk,
bool reduce_divider_only)
{ {
struct dcn_dccg *dccg_dcn = TO_DCN_DCCG(dccg); struct dcn_dccg *dccg_dcn = TO_DCN_DCCG(dccg);
if (dccg->ref_dppclk && req_dppclk) { if (dccg->ref_dppclk && req_dppclk) {
int ref_dppclk = dccg->ref_dppclk; int ref_dppclk = dccg->ref_dppclk;
int current_phase, current_modulo;
ASSERT(req_dppclk <= ref_dppclk); ASSERT(req_dppclk <= ref_dppclk);
/* need to clamp to 8 bits */ /* need to clamp to 8 bits */
...@@ -61,9 +65,28 @@ void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk) ...@@ -61,9 +65,28 @@ void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk)
if (req_dppclk > ref_dppclk) if (req_dppclk > ref_dppclk)
req_dppclk = ref_dppclk; req_dppclk = ref_dppclk;
} }
REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
DPPCLK0_DTO_PHASE, req_dppclk, REG_GET_2(DPPCLK_DTO_PARAM[dpp_inst],
DPPCLK0_DTO_MODULO, ref_dppclk); DPPCLK0_DTO_PHASE, &current_phase,
DPPCLK0_DTO_MODULO, &current_modulo);
if (reduce_divider_only) {
// requested phase/modulo greater than current
if (req_dppclk * current_modulo >= current_phase * ref_dppclk) {
REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
DPPCLK0_DTO_PHASE, req_dppclk,
DPPCLK0_DTO_MODULO, ref_dppclk);
} else {
REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
DPPCLK0_DTO_PHASE, current_phase,
DPPCLK0_DTO_MODULO, current_modulo);
}
} else {
REG_SET_2(DPPCLK_DTO_PARAM[dpp_inst], 0,
DPPCLK0_DTO_PHASE, req_dppclk,
DPPCLK0_DTO_MODULO, ref_dppclk);
}
REG_UPDATE(DPPCLK_DTO_CTRL, REG_UPDATE(DPPCLK_DTO_CTRL,
DPPCLK_DTO_ENABLE[dpp_inst], 1); DPPCLK_DTO_ENABLE[dpp_inst], 1);
} else { } else {
......
...@@ -97,7 +97,7 @@ struct dcn_dccg { ...@@ -97,7 +97,7 @@ struct dcn_dccg {
const struct dccg_mask *dccg_mask; const struct dccg_mask *dccg_mask;
}; };
void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk); void dccg2_update_dpp_dto(struct dccg *dccg, int dpp_inst, int req_dppclk, bool raise_divider_only);
void dccg2_get_dccg_ref_freq(struct dccg *dccg, void dccg2_get_dccg_ref_freq(struct dccg *dccg,
unsigned int xtalin_freq_inKhz, unsigned int xtalin_freq_inKhz,
......
...@@ -2482,7 +2482,7 @@ void dcn20_calculate_dlg_params( ...@@ -2482,7 +2482,7 @@ void dcn20_calculate_dlg_params(
context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000; context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000;
context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16; context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16;
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = context->bw_ctx.dml.vba.DCFCLKDeepSleep * 1000; context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = context->bw_ctx.dml.vba.DCFCLKDeepSleep * 1000;
context->bw_ctx.bw.dcn.clk.fclk_khz = context->bw_ctx.dml.vba.FabricClock * 1000; context->bw_ctx.bw.dcn.clk.fclk_khz = 0;
context->bw_ctx.bw.dcn.clk.p_state_change_support = context->bw_ctx.bw.dcn.clk.p_state_change_support =
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb]
!= dm_dram_clock_change_unsupported; != dm_dram_clock_change_unsupported;
......
...@@ -228,7 +228,6 @@ struct resource_pool { ...@@ -228,7 +228,6 @@ struct resource_pool {
struct dcn_fe_bandwidth { struct dcn_fe_bandwidth {
int dppclk_khz; int dppclk_khz;
}; };
struct stream_resource { struct stream_resource {
......
...@@ -281,8 +281,14 @@ static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_cl ...@@ -281,8 +281,14 @@ static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_cl
static inline bool should_update_pstate_support(bool safe_to_lower, bool calc_support, bool cur_support) static inline bool should_update_pstate_support(bool safe_to_lower, bool calc_support, bool cur_support)
{ {
// Whenever we are transitioning pstate support, we always want to notify prior to committing state if (cur_support != calc_support) {
return (calc_support != cur_support) ? !safe_to_lower : false; if (calc_support == true && safe_to_lower)
return true;
else if (calc_support == false && !safe_to_lower)
return true;
}
return false;
} }
int clk_mgr_helper_get_active_display_cnt( int clk_mgr_helper_get_active_display_cnt(
......
...@@ -38,7 +38,8 @@ struct dccg { ...@@ -38,7 +38,8 @@ struct dccg {
struct dccg_funcs { struct dccg_funcs {
void (*update_dpp_dto)(struct dccg *dccg, void (*update_dpp_dto)(struct dccg *dccg,
int dpp_inst, int dpp_inst,
int req_dppclk); int req_dppclk,
bool reduce_divider_only);
void (*get_dccg_ref_freq)(struct dccg *dccg, void (*get_dccg_ref_freq)(struct dccg *dccg,
unsigned int xtalin_freq_inKhz, unsigned int xtalin_freq_inKhz,
unsigned int *dccg_ref_freq_inKhz); unsigned int *dccg_ref_freq_inKhz);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment