2 * Copyright 2016 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include <linux/delay.h>
27 #include "dm_services.h"
28 #include "core_types.h"
30 #include "custom_float.h"
31 #include "dcn10_hw_sequencer.h"
32 #include "dce110/dce110_hw_sequencer.h"
33 #include "dce/dce_hwseq.h"
36 #include "dcn10_optc.h"
37 #include "dcn10/dcn10_dpp.h"
38 #include "dcn10/dcn10_mpc.h"
39 #include "timing_generator.h"
43 #include "reg_helper.h"
44 #include "dcn10_hubp.h"
45 #include "dcn10_hubbub.h"
46 #include "dcn10_cm_common.h"
47 #include "dc_link_dp.h"
52 #ifdef CONFIG_DRM_AMD_DC_DSC_SUPPORT
56 #define DC_LOGGER_INIT(logger)
64 #define FN(reg_name, field_name) \
65 hws->shifts->field_name, hws->masks->field_name
67 /*print is 17 wide, first two characters are spaces*/
68 #define DTN_INFO_MICRO_SEC(ref_cycle) \
69 print_microsec(dc_ctx, log_ctx, ref_cycle)
71 void print_microsec(struct dc_context *dc_ctx,
72 struct dc_log_buffer_ctx *log_ctx,
75 const uint32_t ref_clk_mhz = dc_ctx->dc->res_pool->ref_clocks.dchub_ref_clock_inKhz / 1000;
76 static const unsigned int frac = 1000;
77 uint32_t us_x10 = (ref_cycle * frac) / ref_clk_mhz;
79 DTN_INFO(" %11d.%03d",
84 static void log_mpc_crc(struct dc *dc,
85 struct dc_log_buffer_ctx *log_ctx)
87 struct dc_context *dc_ctx = dc->ctx;
88 struct dce_hwseq *hws = dc->hwseq;
90 if (REG(MPC_CRC_RESULT_GB))
91 DTN_INFO("MPC_CRC_RESULT_GB:%d MPC_CRC_RESULT_C:%d MPC_CRC_RESULT_AR:%d\n",
92 REG_READ(MPC_CRC_RESULT_GB), REG_READ(MPC_CRC_RESULT_C), REG_READ(MPC_CRC_RESULT_AR));
93 if (REG(DPP_TOP0_DPP_CRC_VAL_B_A))
94 DTN_INFO("DPP_TOP0_DPP_CRC_VAL_B_A:%d DPP_TOP0_DPP_CRC_VAL_R_G:%d\n",
95 REG_READ(DPP_TOP0_DPP_CRC_VAL_B_A), REG_READ(DPP_TOP0_DPP_CRC_VAL_R_G));
98 void dcn10_log_hubbub_state(struct dc *dc, struct dc_log_buffer_ctx *log_ctx)
100 struct dc_context *dc_ctx = dc->ctx;
101 struct dcn_hubbub_wm wm;
104 memset(&wm, 0, sizeof(struct dcn_hubbub_wm));
105 dc->res_pool->hubbub->funcs->wm_read_state(dc->res_pool->hubbub, &wm);
107 DTN_INFO("HUBBUB WM: data_urgent pte_meta_urgent"
108 " sr_enter sr_exit dram_clk_change\n");
110 for (i = 0; i < 4; i++) {
111 struct dcn_hubbub_wm_set *s;
114 DTN_INFO("WM_Set[%d]:", s->wm_set);
115 DTN_INFO_MICRO_SEC(s->data_urgent);
116 DTN_INFO_MICRO_SEC(s->pte_meta_urgent);
117 DTN_INFO_MICRO_SEC(s->sr_enter);
118 DTN_INFO_MICRO_SEC(s->sr_exit);
119 DTN_INFO_MICRO_SEC(s->dram_clk_chanage);
126 static void dcn10_log_hubp_states(struct dc *dc, void *log_ctx)
128 struct dc_context *dc_ctx = dc->ctx;
129 struct resource_pool *pool = dc->res_pool;
132 DTN_INFO("HUBP: format addr_hi width height"
133 " rot mir sw_mode dcc_en blank_en ttu_dis underflow"
134 " min_ttu_vblank qos_low_wm qos_high_wm\n");
135 for (i = 0; i < pool->pipe_count; i++) {
136 struct hubp *hubp = pool->hubps[i];
137 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(hubp)->state);
139 hubp->funcs->hubp_read_state(hubp);
142 DTN_INFO("[%2d]: %5xh %6xh %5d %6d %2xh %2xh %6xh"
155 s->underflow_status);
156 DTN_INFO_MICRO_SEC(s->min_ttu_vblank);
157 DTN_INFO_MICRO_SEC(s->qos_level_low_wm);
158 DTN_INFO_MICRO_SEC(s->qos_level_high_wm);
163 DTN_INFO("\n=========RQ========\n");
164 DTN_INFO("HUBP: drq_exp_m prq_exp_m mrq_exp_m crq_exp_m plane1_ba L:chunk_s min_chu_s meta_ch_s"
165 " min_m_c_s dpte_gr_s mpte_gr_s swath_hei pte_row_h C:chunk_s min_chu_s meta_ch_s"
166 " min_m_c_s dpte_gr_s mpte_gr_s swath_hei pte_row_h\n");
167 for (i = 0; i < pool->pipe_count; i++) {
168 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
169 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs;
172 DTN_INFO("[%2d]: %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh\n",
173 pool->hubps[i]->inst, rq_regs->drq_expansion_mode, rq_regs->prq_expansion_mode, rq_regs->mrq_expansion_mode,
174 rq_regs->crq_expansion_mode, rq_regs->plane1_base_address, rq_regs->rq_regs_l.chunk_size,
175 rq_regs->rq_regs_l.min_chunk_size, rq_regs->rq_regs_l.meta_chunk_size,
176 rq_regs->rq_regs_l.min_meta_chunk_size, rq_regs->rq_regs_l.dpte_group_size,
177 rq_regs->rq_regs_l.mpte_group_size, rq_regs->rq_regs_l.swath_height,
178 rq_regs->rq_regs_l.pte_row_height_linear, rq_regs->rq_regs_c.chunk_size, rq_regs->rq_regs_c.min_chunk_size,
179 rq_regs->rq_regs_c.meta_chunk_size, rq_regs->rq_regs_c.min_meta_chunk_size,
180 rq_regs->rq_regs_c.dpte_group_size, rq_regs->rq_regs_c.mpte_group_size,
181 rq_regs->rq_regs_c.swath_height, rq_regs->rq_regs_c.pte_row_height_linear);
184 DTN_INFO("========DLG========\n");
185 DTN_INFO("HUBP: rc_hbe dlg_vbe min_d_y_n rc_per_ht rc_x_a_s "
186 " dst_y_a_s dst_y_pf dst_y_vvb dst_y_rvb dst_y_vfl dst_y_rfl rf_pix_fq"
187 " vratio_pf vrat_pf_c rc_pg_vbl rc_pg_vbc rc_mc_vbl rc_mc_vbc rc_pg_fll"
188 " rc_pg_flc rc_mc_fll rc_mc_flc pr_nom_l pr_nom_c rc_pg_nl rc_pg_nc "
189 " mr_nom_l mr_nom_c rc_mc_nl rc_mc_nc rc_ld_pl rc_ld_pc rc_ld_l "
190 " rc_ld_c cha_cur0 ofst_cur1 cha_cur1 vr_af_vc0 ddrq_limt x_rt_dlay"
191 " x_rp_dlay x_rr_sfl\n");
192 for (i = 0; i < pool->pipe_count; i++) {
193 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
194 struct _vcs_dpi_display_dlg_regs_st *dlg_regs = &s->dlg_attr;
197 DTN_INFO("[%2d]: %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh"
198 "% 8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh"
199 " %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh\n",
200 pool->hubps[i]->inst, dlg_regs->refcyc_h_blank_end, dlg_regs->dlg_vblank_end, dlg_regs->min_dst_y_next_start,
201 dlg_regs->refcyc_per_htotal, dlg_regs->refcyc_x_after_scaler, dlg_regs->dst_y_after_scaler,
202 dlg_regs->dst_y_prefetch, dlg_regs->dst_y_per_vm_vblank, dlg_regs->dst_y_per_row_vblank,
203 dlg_regs->dst_y_per_vm_flip, dlg_regs->dst_y_per_row_flip, dlg_regs->ref_freq_to_pix_freq,
204 dlg_regs->vratio_prefetch, dlg_regs->vratio_prefetch_c, dlg_regs->refcyc_per_pte_group_vblank_l,
205 dlg_regs->refcyc_per_pte_group_vblank_c, dlg_regs->refcyc_per_meta_chunk_vblank_l,
206 dlg_regs->refcyc_per_meta_chunk_vblank_c, dlg_regs->refcyc_per_pte_group_flip_l,
207 dlg_regs->refcyc_per_pte_group_flip_c, dlg_regs->refcyc_per_meta_chunk_flip_l,
208 dlg_regs->refcyc_per_meta_chunk_flip_c, dlg_regs->dst_y_per_pte_row_nom_l,
209 dlg_regs->dst_y_per_pte_row_nom_c, dlg_regs->refcyc_per_pte_group_nom_l,
210 dlg_regs->refcyc_per_pte_group_nom_c, dlg_regs->dst_y_per_meta_row_nom_l,
211 dlg_regs->dst_y_per_meta_row_nom_c, dlg_regs->refcyc_per_meta_chunk_nom_l,
212 dlg_regs->refcyc_per_meta_chunk_nom_c, dlg_regs->refcyc_per_line_delivery_pre_l,
213 dlg_regs->refcyc_per_line_delivery_pre_c, dlg_regs->refcyc_per_line_delivery_l,
214 dlg_regs->refcyc_per_line_delivery_c, dlg_regs->chunk_hdl_adjust_cur0, dlg_regs->dst_y_offset_cur1,
215 dlg_regs->chunk_hdl_adjust_cur1, dlg_regs->vready_after_vcount0, dlg_regs->dst_y_delta_drq_limit,
216 dlg_regs->xfc_reg_transfer_delay, dlg_regs->xfc_reg_precharge_delay,
217 dlg_regs->xfc_reg_remote_surface_flip_latency);
220 DTN_INFO("========TTU========\n");
221 DTN_INFO("HUBP: qos_ll_wm qos_lh_wm mn_ttu_vb qos_l_flp rc_rd_p_l rc_rd_l rc_rd_p_c"
222 " rc_rd_c rc_rd_c0 rc_rd_pc0 rc_rd_c1 rc_rd_pc1 qos_lf_l qos_rds_l"
223 " qos_lf_c qos_rds_c qos_lf_c0 qos_rds_c0 qos_lf_c1 qos_rds_c1\n");
224 for (i = 0; i < pool->pipe_count; i++) {
225 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
226 struct _vcs_dpi_display_ttu_regs_st *ttu_regs = &s->ttu_attr;
229 DTN_INFO("[%2d]: %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh %8xh\n",
230 pool->hubps[i]->inst, ttu_regs->qos_level_low_wm, ttu_regs->qos_level_high_wm, ttu_regs->min_ttu_vblank,
231 ttu_regs->qos_level_flip, ttu_regs->refcyc_per_req_delivery_pre_l, ttu_regs->refcyc_per_req_delivery_l,
232 ttu_regs->refcyc_per_req_delivery_pre_c, ttu_regs->refcyc_per_req_delivery_c, ttu_regs->refcyc_per_req_delivery_cur0,
233 ttu_regs->refcyc_per_req_delivery_pre_cur0, ttu_regs->refcyc_per_req_delivery_cur1,
234 ttu_regs->refcyc_per_req_delivery_pre_cur1, ttu_regs->qos_level_fixed_l, ttu_regs->qos_ramp_disable_l,
235 ttu_regs->qos_level_fixed_c, ttu_regs->qos_ramp_disable_c, ttu_regs->qos_level_fixed_cur0,
236 ttu_regs->qos_ramp_disable_cur0, ttu_regs->qos_level_fixed_cur1, ttu_regs->qos_ramp_disable_cur1);
241 void dcn10_log_hw_state(struct dc *dc,
242 struct dc_log_buffer_ctx *log_ctx)
244 struct dc_context *dc_ctx = dc->ctx;
245 struct resource_pool *pool = dc->res_pool;
250 dcn10_log_hubbub_state(dc, log_ctx);
252 dcn10_log_hubp_states(dc, log_ctx);
254 DTN_INFO("DPP: IGAM format IGAM mode DGAM mode RGAM mode"
255 " GAMUT mode C11 C12 C13 C14 C21 C22 C23 C24 "
256 "C31 C32 C33 C34\n");
257 for (i = 0; i < pool->pipe_count; i++) {
258 struct dpp *dpp = pool->dpps[i];
259 struct dcn_dpp_state s = {0};
261 dpp->funcs->dpp_read_state(dpp, &s);
266 DTN_INFO("[%2d]: %11xh %-11s %-11s %-11s"
267 "%8x %08xh %08xh %08xh %08xh %08xh %08xh",
270 (s.igam_lut_mode == 0) ? "BypassFixed" :
271 ((s.igam_lut_mode == 1) ? "BypassFloat" :
272 ((s.igam_lut_mode == 2) ? "RAM" :
273 ((s.igam_lut_mode == 3) ? "RAM" :
275 (s.dgam_lut_mode == 0) ? "Bypass" :
276 ((s.dgam_lut_mode == 1) ? "sRGB" :
277 ((s.dgam_lut_mode == 2) ? "Ycc" :
278 ((s.dgam_lut_mode == 3) ? "RAM" :
279 ((s.dgam_lut_mode == 4) ? "RAM" :
281 (s.rgam_lut_mode == 0) ? "Bypass" :
282 ((s.rgam_lut_mode == 1) ? "sRGB" :
283 ((s.rgam_lut_mode == 2) ? "Ycc" :
284 ((s.rgam_lut_mode == 3) ? "RAM" :
285 ((s.rgam_lut_mode == 4) ? "RAM" :
288 s.gamut_remap_c11_c12,
289 s.gamut_remap_c13_c14,
290 s.gamut_remap_c21_c22,
291 s.gamut_remap_c23_c24,
292 s.gamut_remap_c31_c32,
293 s.gamut_remap_c33_c34);
298 DTN_INFO("MPCC: OPP DPP MPCCBOT MODE ALPHA_MODE PREMULT OVERLAP_ONLY IDLE\n");
299 for (i = 0; i < pool->pipe_count; i++) {
300 struct mpcc_state s = {0};
302 pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
304 DTN_INFO("[%2d]: %2xh %2xh %6xh %4d %10d %7d %12d %4d\n",
305 i, s.opp_id, s.dpp_id, s.bot_mpcc_id,
306 s.mode, s.alpha_mode, s.pre_multiplied_alpha, s.overlap_only,
311 DTN_INFO("OTG: v_bs v_be v_ss v_se vpol vmax vmin vmax_sel vmin_sel"
312 " h_bs h_be h_ss h_se hpol htot vtot underflow\n");
314 for (i = 0; i < pool->timing_generator_count; i++) {
315 struct timing_generator *tg = pool->timing_generators[i];
316 struct dcn_otg_state s = {0};
318 optc1_read_otg_state(DCN10TG_FROM_TG(tg), &s);
320 //only print if OTG master is enabled
321 if ((s.otg_enabled & 1) == 0)
324 DTN_INFO("[%d]: %5d %5d %5d %5d %5d %5d %5d %9d %9d %5d %5d %5d"
325 " %5d %5d %5d %5d %9d\n",
343 s.underflow_occurred_status);
345 // Clear underflow for debug purposes
346 // We want to keep underflow sticky bit on for the longevity tests outside of test environment.
347 // This function is called only from Windows or Diags test environment, hence it's safe to clear
348 // it from here without affecting the original intent.
349 tg->funcs->clear_optc_underflow(tg);
353 #ifdef CONFIG_DRM_AMD_DC_DSC_SUPPORT
354 DTN_INFO("DSC: CLOCK_EN SLICE_WIDTH Bytes_pp\n");
355 for (i = 0; i < pool->res_cap->num_dsc; i++) {
356 struct display_stream_compressor *dsc = pool->dscs[i];
357 struct dcn_dsc_state s = {0};
359 dsc->funcs->dsc_read_state(dsc, &s);
360 DTN_INFO("[%d]: %-9d %-12d %-10d\n",
364 s.dsc_bytes_per_pixel);
369 DTN_INFO("S_ENC: DSC_MODE SEC_GSP7_LINE_NUM"
370 " VBID6_LINE_REFERENCE VBID6_LINE_NUM SEC_GSP7_ENABLE SEC_STREAM_ENABLE\n");
371 for (i = 0; i < pool->stream_enc_count; i++) {
372 struct stream_encoder *enc = pool->stream_enc[i];
373 struct enc_state s = {0};
375 if (enc->funcs->enc_read_state) {
376 enc->funcs->enc_read_state(enc, &s);
377 DTN_INFO("[%-3d]: %-9d %-18d %-21d %-15d %-16d %-17d\n",
380 s.sec_gsp_pps_line_num,
381 s.vbid6_line_reference,
383 s.sec_gsp_pps_enable,
384 s.sec_stream_enable);
390 DTN_INFO("L_ENC: DPHY_FEC_EN DPHY_FEC_READY_SHADOW DPHY_FEC_ACTIVE_STATUS\n");
391 for (i = 0; i < dc->link_count; i++) {
392 struct link_encoder *lenc = dc->links[i]->link_enc;
394 struct link_enc_state s = {0};
396 if (lenc->funcs->read_state) {
397 lenc->funcs->read_state(lenc, &s);
398 DTN_INFO("[%-3d]: %-12d %-22d %-22d\n",
401 s.dphy_fec_ready_shadow,
402 s.dphy_fec_active_status);
409 DTN_INFO("\nCALCULATED Clocks: dcfclk_khz:%d dcfclk_deep_sleep_khz:%d dispclk_khz:%d\n"
410 "dppclk_khz:%d max_supported_dppclk_khz:%d fclk_khz:%d socclk_khz:%d\n\n",
411 dc->current_state->bw_ctx.bw.dcn.clk.dcfclk_khz,
412 dc->current_state->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz,
413 dc->current_state->bw_ctx.bw.dcn.clk.dispclk_khz,
414 dc->current_state->bw_ctx.bw.dcn.clk.dppclk_khz,
415 dc->current_state->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz,
416 dc->current_state->bw_ctx.bw.dcn.clk.fclk_khz,
417 dc->current_state->bw_ctx.bw.dcn.clk.socclk_khz);
419 log_mpc_crc(dc, log_ctx);
424 bool dcn10_did_underflow_occur(struct dc *dc, struct pipe_ctx *pipe_ctx)
426 struct hubp *hubp = pipe_ctx->plane_res.hubp;
427 struct timing_generator *tg = pipe_ctx->stream_res.tg;
429 if (tg->funcs->is_optc_underflow_occurred(tg)) {
430 tg->funcs->clear_optc_underflow(tg);
434 if (hubp->funcs->hubp_get_underflow_status(hubp)) {
435 hubp->funcs->hubp_clear_underflow(hubp);
441 static void dcn10_enable_power_gating_plane(
442 struct dce_hwseq *hws,
445 bool force_on = 1; /* disable power gating */
451 REG_UPDATE(DOMAIN0_PG_CONFIG, DOMAIN0_POWER_FORCEON, force_on);
452 REG_UPDATE(DOMAIN2_PG_CONFIG, DOMAIN2_POWER_FORCEON, force_on);
453 REG_UPDATE(DOMAIN4_PG_CONFIG, DOMAIN4_POWER_FORCEON, force_on);
454 REG_UPDATE(DOMAIN6_PG_CONFIG, DOMAIN6_POWER_FORCEON, force_on);
457 REG_UPDATE(DOMAIN1_PG_CONFIG, DOMAIN1_POWER_FORCEON, force_on);
458 REG_UPDATE(DOMAIN3_PG_CONFIG, DOMAIN3_POWER_FORCEON, force_on);
459 REG_UPDATE(DOMAIN5_PG_CONFIG, DOMAIN5_POWER_FORCEON, force_on);
460 REG_UPDATE(DOMAIN7_PG_CONFIG, DOMAIN7_POWER_FORCEON, force_on);
463 static void dcn10_disable_vga(
464 struct dce_hwseq *hws)
466 unsigned int in_vga1_mode = 0;
467 unsigned int in_vga2_mode = 0;
468 unsigned int in_vga3_mode = 0;
469 unsigned int in_vga4_mode = 0;
471 REG_GET(D1VGA_CONTROL, D1VGA_MODE_ENABLE, &in_vga1_mode);
472 REG_GET(D2VGA_CONTROL, D2VGA_MODE_ENABLE, &in_vga2_mode);
473 REG_GET(D3VGA_CONTROL, D3VGA_MODE_ENABLE, &in_vga3_mode);
474 REG_GET(D4VGA_CONTROL, D4VGA_MODE_ENABLE, &in_vga4_mode);
476 if (in_vga1_mode == 0 && in_vga2_mode == 0 &&
477 in_vga3_mode == 0 && in_vga4_mode == 0)
480 REG_WRITE(D1VGA_CONTROL, 0);
481 REG_WRITE(D2VGA_CONTROL, 0);
482 REG_WRITE(D3VGA_CONTROL, 0);
483 REG_WRITE(D4VGA_CONTROL, 0);
485 /* HW Engineer's Notes:
486 * During switch from vga->extended, if we set the VGA_TEST_ENABLE and
487 * then hit the VGA_TEST_RENDER_START, then the DCHUBP timing gets updated correctly.
489 * Then vBIOS will have it poll for the VGA_TEST_RENDER_DONE and unset
490 * VGA_TEST_ENABLE, to leave it in the same state as before.
492 REG_UPDATE(VGA_TEST_CONTROL, VGA_TEST_ENABLE, 1);
493 REG_UPDATE(VGA_TEST_CONTROL, VGA_TEST_RENDER_START, 1);
496 static void dcn10_dpp_pg_control(
497 struct dce_hwseq *hws,
498 unsigned int dpp_inst,
501 uint32_t power_gate = power_on ? 0 : 1;
502 uint32_t pwr_status = power_on ? 0 : 2;
504 if (hws->ctx->dc->debug.disable_dpp_power_gate)
506 if (REG(DOMAIN1_PG_CONFIG) == 0)
511 REG_UPDATE(DOMAIN1_PG_CONFIG,
512 DOMAIN1_POWER_GATE, power_gate);
514 REG_WAIT(DOMAIN1_PG_STATUS,
515 DOMAIN1_PGFSM_PWR_STATUS, pwr_status,
519 REG_UPDATE(DOMAIN3_PG_CONFIG,
520 DOMAIN3_POWER_GATE, power_gate);
522 REG_WAIT(DOMAIN3_PG_STATUS,
523 DOMAIN3_PGFSM_PWR_STATUS, pwr_status,
527 REG_UPDATE(DOMAIN5_PG_CONFIG,
528 DOMAIN5_POWER_GATE, power_gate);
530 REG_WAIT(DOMAIN5_PG_STATUS,
531 DOMAIN5_PGFSM_PWR_STATUS, pwr_status,
535 REG_UPDATE(DOMAIN7_PG_CONFIG,
536 DOMAIN7_POWER_GATE, power_gate);
538 REG_WAIT(DOMAIN7_PG_STATUS,
539 DOMAIN7_PGFSM_PWR_STATUS, pwr_status,
548 static void dcn10_hubp_pg_control(
549 struct dce_hwseq *hws,
550 unsigned int hubp_inst,
553 uint32_t power_gate = power_on ? 0 : 1;
554 uint32_t pwr_status = power_on ? 0 : 2;
556 if (hws->ctx->dc->debug.disable_hubp_power_gate)
558 if (REG(DOMAIN0_PG_CONFIG) == 0)
562 case 0: /* DCHUBP0 */
563 REG_UPDATE(DOMAIN0_PG_CONFIG,
564 DOMAIN0_POWER_GATE, power_gate);
566 REG_WAIT(DOMAIN0_PG_STATUS,
567 DOMAIN0_PGFSM_PWR_STATUS, pwr_status,
570 case 1: /* DCHUBP1 */
571 REG_UPDATE(DOMAIN2_PG_CONFIG,
572 DOMAIN2_POWER_GATE, power_gate);
574 REG_WAIT(DOMAIN2_PG_STATUS,
575 DOMAIN2_PGFSM_PWR_STATUS, pwr_status,
578 case 2: /* DCHUBP2 */
579 REG_UPDATE(DOMAIN4_PG_CONFIG,
580 DOMAIN4_POWER_GATE, power_gate);
582 REG_WAIT(DOMAIN4_PG_STATUS,
583 DOMAIN4_PGFSM_PWR_STATUS, pwr_status,
586 case 3: /* DCHUBP3 */
587 REG_UPDATE(DOMAIN6_PG_CONFIG,
588 DOMAIN6_POWER_GATE, power_gate);
590 REG_WAIT(DOMAIN6_PG_STATUS,
591 DOMAIN6_PGFSM_PWR_STATUS, pwr_status,
600 static void power_on_plane(
601 struct dce_hwseq *hws,
604 DC_LOGGER_INIT(hws->ctx->logger);
605 if (REG(DC_IP_REQUEST_CNTL)) {
606 REG_SET(DC_IP_REQUEST_CNTL, 0,
608 hws->ctx->dc->hwss.dpp_pg_control(hws, plane_id, true);
609 hws->ctx->dc->hwss.hubp_pg_control(hws, plane_id, true);
610 REG_SET(DC_IP_REQUEST_CNTL, 0,
613 "Un-gated front end for pipe %d\n", plane_id);
617 static void undo_DEGVIDCN10_253_wa(struct dc *dc)
619 struct dce_hwseq *hws = dc->hwseq;
620 struct hubp *hubp = dc->res_pool->hubps[0];
622 if (!hws->wa_state.DEGVIDCN10_253_applied)
625 hubp->funcs->set_blank(hubp, true);
627 REG_SET(DC_IP_REQUEST_CNTL, 0,
630 dc->hwss.hubp_pg_control(hws, 0, false);
631 REG_SET(DC_IP_REQUEST_CNTL, 0,
634 hws->wa_state.DEGVIDCN10_253_applied = false;
637 static void apply_DEGVIDCN10_253_wa(struct dc *dc)
639 struct dce_hwseq *hws = dc->hwseq;
640 struct hubp *hubp = dc->res_pool->hubps[0];
643 if (dc->debug.disable_stutter)
646 if (!hws->wa.DEGVIDCN10_253)
649 for (i = 0; i < dc->res_pool->pipe_count; i++) {
650 if (!dc->res_pool->hubps[i]->power_gated)
654 /* all pipe power gated, apply work around to enable stutter. */
656 REG_SET(DC_IP_REQUEST_CNTL, 0,
659 dc->hwss.hubp_pg_control(hws, 0, true);
660 REG_SET(DC_IP_REQUEST_CNTL, 0,
663 hubp->funcs->set_hubp_blank_en(hubp, false);
664 hws->wa_state.DEGVIDCN10_253_applied = true;
667 static void dcn10_bios_golden_init(struct dc *dc)
669 struct dc_bios *bp = dc->ctx->dc_bios;
671 bool allow_self_fresh_force_enable = true;
673 if (dc->res_pool->hubbub->funcs->is_allow_self_refresh_enabled)
674 allow_self_fresh_force_enable =
675 dc->res_pool->hubbub->funcs->is_allow_self_refresh_enabled(dc->res_pool->hubbub);
678 /* WA for making DF sleep when idle after resume from S0i3.
679 * DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_ENABLE is set to 1 by
680 * command table, if DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_ENABLE = 0
681 * before calling command table and it changed to 1 after,
682 * it should be set back to 0.
685 /* initialize dcn global */
686 bp->funcs->enable_disp_power_gating(bp,
687 CONTROLLER_ID_D0, ASIC_PIPE_INIT);
689 for (i = 0; i < dc->res_pool->pipe_count; i++) {
690 /* initialize dcn per pipe */
691 bp->funcs->enable_disp_power_gating(bp,
692 CONTROLLER_ID_D0 + i, ASIC_PIPE_DISABLE);
695 if (dc->res_pool->hubbub->funcs->allow_self_refresh_control)
696 if (allow_self_fresh_force_enable == false &&
697 dc->res_pool->hubbub->funcs->is_allow_self_refresh_enabled(dc->res_pool->hubbub))
698 dc->res_pool->hubbub->funcs->allow_self_refresh_control(dc->res_pool->hubbub, true);
702 static void false_optc_underflow_wa(
704 const struct dc_stream_state *stream,
705 struct timing_generator *tg)
710 if (!dc->hwseq->wa.false_optc_underflow)
713 underflow = tg->funcs->is_optc_underflow_occurred(tg);
715 for (i = 0; i < dc->res_pool->pipe_count; i++) {
716 struct pipe_ctx *old_pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
718 if (old_pipe_ctx->stream != stream)
721 dc->hwss.wait_for_mpcc_disconnect(dc, dc->res_pool, old_pipe_ctx);
724 tg->funcs->set_blank_data_double_buffer(tg, true);
726 if (tg->funcs->is_optc_underflow_occurred(tg) && !underflow)
727 tg->funcs->clear_optc_underflow(tg);
730 static enum dc_status dcn10_enable_stream_timing(
731 struct pipe_ctx *pipe_ctx,
732 struct dc_state *context,
735 struct dc_stream_state *stream = pipe_ctx->stream;
736 enum dc_color_space color_space;
737 struct tg_color black_color = {0};
739 /* by upper caller loop, pipe0 is parent pipe and be called first.
740 * back end is set up by for pipe0. Other children pipe share back end
741 * with pipe 0. No program is needed.
743 if (pipe_ctx->top_pipe != NULL)
746 /* TODO check if timing_changed, disable stream if timing changed */
748 /* HW program guide assume display already disable
749 * by unplug sequence. OTG assume stop.
751 pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, true);
753 if (false == pipe_ctx->clock_source->funcs->program_pix_clk(
754 pipe_ctx->clock_source,
755 &pipe_ctx->stream_res.pix_clk_params,
756 &pipe_ctx->pll_settings)) {
758 return DC_ERROR_UNEXPECTED;
761 pipe_ctx->stream_res.tg->funcs->program_timing(
762 pipe_ctx->stream_res.tg,
764 pipe_ctx->pipe_dlg_param.vready_offset,
765 pipe_ctx->pipe_dlg_param.vstartup_start,
766 pipe_ctx->pipe_dlg_param.vupdate_offset,
767 pipe_ctx->pipe_dlg_param.vupdate_width,
768 pipe_ctx->stream->signal,
771 #if 0 /* move to after enable_crtc */
772 /* TODO: OPP FMT, ABM. etc. should be done here. */
773 /* or FPGA now. instance 0 only. TODO: move to opp.c */
775 inst_offset = reg_offsets[pipe_ctx->stream_res.tg->inst].fmt;
777 pipe_ctx->stream_res.opp->funcs->opp_program_fmt(
778 pipe_ctx->stream_res.opp,
779 &stream->bit_depth_params,
782 /* program otg blank color */
783 color_space = stream->output_color_space;
784 color_space_to_black_color(dc, color_space, &black_color);
786 if (pipe_ctx->stream_res.tg->funcs->set_blank_color)
787 pipe_ctx->stream_res.tg->funcs->set_blank_color(
788 pipe_ctx->stream_res.tg,
791 if (pipe_ctx->stream_res.tg->funcs->is_blanked &&
792 !pipe_ctx->stream_res.tg->funcs->is_blanked(pipe_ctx->stream_res.tg)) {
793 pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, true);
794 hwss_wait_for_blank_complete(pipe_ctx->stream_res.tg);
795 false_optc_underflow_wa(dc, pipe_ctx->stream, pipe_ctx->stream_res.tg);
798 /* VTG is within DCHUB command block. DCFCLK is always on */
799 if (false == pipe_ctx->stream_res.tg->funcs->enable_crtc(pipe_ctx->stream_res.tg)) {
801 return DC_ERROR_UNEXPECTED;
804 /* TODO program crtc source select for non-virtual signal*/
805 /* TODO program FMT */
806 /* TODO setup link_enc */
807 /* TODO set stream attributes */
808 /* TODO program audio */
809 /* TODO enable stream if timing changed */
810 /* TODO unblank stream if DP */
815 static void dcn10_reset_back_end_for_pipe(
817 struct pipe_ctx *pipe_ctx,
818 struct dc_state *context)
821 DC_LOGGER_INIT(dc->ctx->logger);
822 if (pipe_ctx->stream_res.stream_enc == NULL) {
823 pipe_ctx->stream = NULL;
827 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
828 /* DPMS may already disable */
829 if (!pipe_ctx->stream->dpms_off)
830 core_link_disable_stream(pipe_ctx, FREE_ACQUIRED_RESOURCE);
831 else if (pipe_ctx->stream_res.audio) {
832 dc->hwss.disable_audio_stream(pipe_ctx, FREE_ACQUIRED_RESOURCE);
837 /* by upper caller loop, parent pipe: pipe0, will be reset last.
838 * back end share by all pipes and will be disable only when disable
841 if (pipe_ctx->top_pipe == NULL) {
842 pipe_ctx->stream_res.tg->funcs->disable_crtc(pipe_ctx->stream_res.tg);
844 pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, false);
845 if (pipe_ctx->stream_res.tg->funcs->set_drr)
846 pipe_ctx->stream_res.tg->funcs->set_drr(
847 pipe_ctx->stream_res.tg, NULL);
850 for (i = 0; i < dc->res_pool->pipe_count; i++)
851 if (&dc->current_state->res_ctx.pipe_ctx[i] == pipe_ctx)
854 if (i == dc->res_pool->pipe_count)
857 pipe_ctx->stream = NULL;
858 DC_LOG_DEBUG("Reset back end for pipe %d, tg:%d\n",
859 pipe_ctx->pipe_idx, pipe_ctx->stream_res.tg->inst);
862 static bool dcn10_hw_wa_force_recovery(struct dc *dc)
866 bool need_recover = true;
868 if (!dc->debug.recovery_enabled)
871 for (i = 0; i < dc->res_pool->pipe_count; i++) {
872 struct pipe_ctx *pipe_ctx =
873 &dc->current_state->res_ctx.pipe_ctx[i];
874 if (pipe_ctx != NULL) {
875 hubp = pipe_ctx->plane_res.hubp;
876 if (hubp != NULL && hubp->funcs->hubp_get_underflow_status) {
877 if (hubp->funcs->hubp_get_underflow_status(hubp) != 0) {
878 /* one pipe underflow, we will reset all the pipes*/
887 DCHUBP_CNTL:HUBP_BLANK_EN=1
888 DCHUBBUB_SOFT_RESET:DCHUBBUB_GLOBAL_SOFT_RESET=1
889 DCHUBP_CNTL:HUBP_DISABLE=1
890 DCHUBP_CNTL:HUBP_DISABLE=0
891 DCHUBBUB_SOFT_RESET:DCHUBBUB_GLOBAL_SOFT_RESET=0
892 DCSURF_PRIMARY_SURFACE_ADDRESS
893 DCHUBP_CNTL:HUBP_BLANK_EN=0
896 for (i = 0; i < dc->res_pool->pipe_count; i++) {
897 struct pipe_ctx *pipe_ctx =
898 &dc->current_state->res_ctx.pipe_ctx[i];
899 if (pipe_ctx != NULL) {
900 hubp = pipe_ctx->plane_res.hubp;
901 /*DCHUBP_CNTL:HUBP_BLANK_EN=1*/
902 if (hubp != NULL && hubp->funcs->set_hubp_blank_en)
903 hubp->funcs->set_hubp_blank_en(hubp, true);
906 /*DCHUBBUB_SOFT_RESET:DCHUBBUB_GLOBAL_SOFT_RESET=1*/
907 hubbub1_soft_reset(dc->res_pool->hubbub, true);
909 for (i = 0; i < dc->res_pool->pipe_count; i++) {
910 struct pipe_ctx *pipe_ctx =
911 &dc->current_state->res_ctx.pipe_ctx[i];
912 if (pipe_ctx != NULL) {
913 hubp = pipe_ctx->plane_res.hubp;
914 /*DCHUBP_CNTL:HUBP_DISABLE=1*/
915 if (hubp != NULL && hubp->funcs->hubp_disable_control)
916 hubp->funcs->hubp_disable_control(hubp, true);
919 for (i = 0; i < dc->res_pool->pipe_count; i++) {
920 struct pipe_ctx *pipe_ctx =
921 &dc->current_state->res_ctx.pipe_ctx[i];
922 if (pipe_ctx != NULL) {
923 hubp = pipe_ctx->plane_res.hubp;
924 /*DCHUBP_CNTL:HUBP_DISABLE=0*/
925 if (hubp != NULL && hubp->funcs->hubp_disable_control)
926 hubp->funcs->hubp_disable_control(hubp, true);
929 /*DCHUBBUB_SOFT_RESET:DCHUBBUB_GLOBAL_SOFT_RESET=0*/
930 hubbub1_soft_reset(dc->res_pool->hubbub, false);
931 for (i = 0; i < dc->res_pool->pipe_count; i++) {
932 struct pipe_ctx *pipe_ctx =
933 &dc->current_state->res_ctx.pipe_ctx[i];
934 if (pipe_ctx != NULL) {
935 hubp = pipe_ctx->plane_res.hubp;
936 /*DCHUBP_CNTL:HUBP_BLANK_EN=0*/
937 if (hubp != NULL && hubp->funcs->set_hubp_blank_en)
938 hubp->funcs->set_hubp_blank_en(hubp, true);
946 void dcn10_verify_allow_pstate_change_high(struct dc *dc)
948 static bool should_log_hw_state; /* prevent hw state log by default */
950 if (!hubbub1_verify_allow_pstate_change_high(dc->res_pool->hubbub)) {
951 if (should_log_hw_state) {
952 dcn10_log_hw_state(dc, NULL);
955 if (dcn10_hw_wa_force_recovery(dc)) {
957 if (!hubbub1_verify_allow_pstate_change_high(dc->res_pool->hubbub))
963 /* trigger HW to start disconnect plane from stream on the next vsync */
964 void hwss1_plane_atomic_disconnect(struct dc *dc, struct pipe_ctx *pipe_ctx)
966 struct hubp *hubp = pipe_ctx->plane_res.hubp;
967 int dpp_id = pipe_ctx->plane_res.dpp->inst;
968 struct mpc *mpc = dc->res_pool->mpc;
969 struct mpc_tree *mpc_tree_params;
970 struct mpcc *mpcc_to_remove = NULL;
971 struct output_pixel_processor *opp = pipe_ctx->stream_res.opp;
973 mpc_tree_params = &(opp->mpc_tree_params);
974 mpcc_to_remove = mpc->funcs->get_mpcc_for_dpp(mpc_tree_params, dpp_id);
977 if (mpcc_to_remove == NULL)
980 mpc->funcs->remove_mpcc(mpc, mpc_tree_params, mpcc_to_remove);
982 opp->mpcc_disconnect_pending[pipe_ctx->plane_res.mpcc_inst] = true;
984 dc->optimized_required = true;
986 if (hubp->funcs->hubp_disconnect)
987 hubp->funcs->hubp_disconnect(hubp);
989 if (dc->debug.sanity_checks)
990 dcn10_verify_allow_pstate_change_high(dc);
993 static void dcn10_plane_atomic_power_down(struct dc *dc,
997 struct dce_hwseq *hws = dc->hwseq;
998 DC_LOGGER_INIT(dc->ctx->logger);
1000 if (REG(DC_IP_REQUEST_CNTL)) {
1001 REG_SET(DC_IP_REQUEST_CNTL, 0,
1003 dc->hwss.dpp_pg_control(hws, dpp->inst, false);
1004 dc->hwss.hubp_pg_control(hws, hubp->inst, false);
1005 dpp->funcs->dpp_reset(dpp);
1006 REG_SET(DC_IP_REQUEST_CNTL, 0,
1009 "Power gated front end %d\n", hubp->inst);
1013 /* disable HW used by plane.
1014 * note: cannot disable until disconnect is complete
1016 static void dcn10_plane_atomic_disable(struct dc *dc, struct pipe_ctx *pipe_ctx)
1018 struct hubp *hubp = pipe_ctx->plane_res.hubp;
1019 struct dpp *dpp = pipe_ctx->plane_res.dpp;
1020 int opp_id = hubp->opp_id;
1022 dc->hwss.wait_for_mpcc_disconnect(dc, dc->res_pool, pipe_ctx);
1024 hubp->funcs->hubp_clk_cntl(hubp, false);
1026 dpp->funcs->dpp_dppclk_control(dpp, false, false);
1028 if (opp_id != 0xf && pipe_ctx->stream_res.opp->mpc_tree_params.opp_list == NULL)
1029 pipe_ctx->stream_res.opp->funcs->opp_pipe_clock_control(
1030 pipe_ctx->stream_res.opp,
1033 hubp->power_gated = true;
1034 dc->optimized_required = false; /* We're powering off, no need to optimize */
1036 dc->hwss.plane_atomic_power_down(dc,
1037 pipe_ctx->plane_res.dpp,
1038 pipe_ctx->plane_res.hubp);
1040 pipe_ctx->stream = NULL;
1041 memset(&pipe_ctx->stream_res, 0, sizeof(pipe_ctx->stream_res));
1042 memset(&pipe_ctx->plane_res, 0, sizeof(pipe_ctx->plane_res));
1043 pipe_ctx->top_pipe = NULL;
1044 pipe_ctx->bottom_pipe = NULL;
1045 pipe_ctx->plane_state = NULL;
1048 static void dcn10_disable_plane(struct dc *dc, struct pipe_ctx *pipe_ctx)
1050 DC_LOGGER_INIT(dc->ctx->logger);
1052 if (!pipe_ctx->plane_res.hubp || pipe_ctx->plane_res.hubp->power_gated)
1055 dc->hwss.plane_atomic_disable(dc, pipe_ctx);
1057 apply_DEGVIDCN10_253_wa(dc);
1059 DC_LOG_DC("Power down front end %d\n",
1060 pipe_ctx->pipe_idx);
1063 static void dcn10_init_pipes(struct dc *dc, struct dc_state *context)
1066 bool can_apply_seamless_boot = false;
1068 for (i = 0; i < context->stream_count; i++) {
1069 if (context->streams[i]->apply_seamless_boot_optimization) {
1070 can_apply_seamless_boot = true;
1075 for (i = 0; i < dc->res_pool->pipe_count; i++) {
1076 struct timing_generator *tg = dc->res_pool->timing_generators[i];
1077 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1079 /* There is assumption that pipe_ctx is not mapping irregularly
1080 * to non-preferred front end. If pipe_ctx->stream is not NULL,
1081 * we will use the pipe, so don't disable
1083 if (pipe_ctx->stream != NULL && can_apply_seamless_boot)
1086 /* Blank controller using driver code instead of
1089 if (tg->funcs->is_tg_enabled(tg)) {
1090 if (dc->hwss.init_blank != NULL) {
1091 dc->hwss.init_blank(dc, tg);
1092 tg->funcs->lock(tg);
1094 tg->funcs->lock(tg);
1095 tg->funcs->set_blank(tg, true);
1096 hwss_wait_for_blank_complete(tg);
1101 for (i = 0; i < dc->res_pool->pipe_count; i++) {
1102 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1104 /* Cannot reset the MPC mux if seamless boot */
1105 if (pipe_ctx->stream != NULL && can_apply_seamless_boot)
1108 dc->res_pool->mpc->funcs->mpc_init_single_inst(
1109 dc->res_pool->mpc, i);
1112 for (i = 0; i < dc->res_pool->pipe_count; i++) {
1113 struct timing_generator *tg = dc->res_pool->timing_generators[i];
1114 struct hubp *hubp = dc->res_pool->hubps[i];
1115 struct dpp *dpp = dc->res_pool->dpps[i];
1116 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1118 /* There is assumption that pipe_ctx is not mapping irregularly
1119 * to non-preferred front end. If pipe_ctx->stream is not NULL,
1120 * we will use the pipe, so don't disable
1122 if (can_apply_seamless_boot &&
1123 pipe_ctx->stream != NULL &&
1124 pipe_ctx->stream_res.tg->funcs->is_tg_enabled(
1125 pipe_ctx->stream_res.tg))
1128 /* Disable on the current state so the new one isn't cleared. */
1129 pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
1131 dpp->funcs->dpp_reset(dpp);
1133 pipe_ctx->stream_res.tg = tg;
1134 pipe_ctx->pipe_idx = i;
1136 pipe_ctx->plane_res.hubp = hubp;
1137 pipe_ctx->plane_res.dpp = dpp;
1138 pipe_ctx->plane_res.mpcc_inst = dpp->inst;
1139 hubp->mpcc_id = dpp->inst;
1140 hubp->opp_id = OPP_ID_INVALID;
1141 hubp->power_gated = false;
1143 dc->res_pool->opps[i]->mpc_tree_params.opp_id = dc->res_pool->opps[i]->inst;
1144 dc->res_pool->opps[i]->mpc_tree_params.opp_list = NULL;
1145 dc->res_pool->opps[i]->mpcc_disconnect_pending[pipe_ctx->plane_res.mpcc_inst] = true;
1146 pipe_ctx->stream_res.opp = dc->res_pool->opps[i];
1148 dc->hwss.plane_atomic_disconnect(dc, pipe_ctx);
1150 if (tg->funcs->is_tg_enabled(tg))
1151 tg->funcs->unlock(tg);
1153 dc->hwss.disable_plane(dc, pipe_ctx);
1155 pipe_ctx->stream_res.tg = NULL;
1156 pipe_ctx->plane_res.hubp = NULL;
1158 tg->funcs->tg_init(tg);
1162 static void dcn10_init_hw(struct dc *dc)
1165 struct abm *abm = dc->res_pool->abm;
1166 struct dmcu *dmcu = dc->res_pool->dmcu;
1167 struct dce_hwseq *hws = dc->hwseq;
1168 struct dc_bios *dcb = dc->ctx->dc_bios;
1169 struct resource_pool *res_pool = dc->res_pool;
1171 if (dc->clk_mgr && dc->clk_mgr->funcs->init_clocks)
1172 dc->clk_mgr->funcs->init_clocks(dc->clk_mgr);
1174 // Initialize the dccg
1175 if (dc->res_pool->dccg && dc->res_pool->dccg->funcs->dccg_init)
1176 dc->res_pool->dccg->funcs->dccg_init(res_pool->dccg);
1178 if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
1180 REG_WRITE(REFCLK_CNTL, 0);
1181 REG_UPDATE(DCHUBBUB_GLOBAL_TIMER_CNTL, DCHUBBUB_GLOBAL_TIMER_ENABLE, 1);
1182 REG_WRITE(DIO_MEM_PWR_CTRL, 0);
1184 if (!dc->debug.disable_clock_gate) {
1185 /* enable all DCN clock gating */
1186 REG_WRITE(DCCG_GATE_DISABLE_CNTL, 0);
1188 REG_WRITE(DCCG_GATE_DISABLE_CNTL2, 0);
1190 REG_UPDATE(DCFCLK_CNTL, DCFCLK_GATE_DIS, 0);
1193 //Enable ability to power gate / don't force power on permanently
1194 dc->hwss.enable_power_gating_plane(hws, true);
1199 if (!dcb->funcs->is_accelerated_mode(dcb)) {
1200 dc->hwss.bios_golden_init(dc);
1201 if (dc->ctx->dc_bios->fw_info_valid) {
1202 res_pool->ref_clocks.xtalin_clock_inKhz =
1203 dc->ctx->dc_bios->fw_info.pll_info.crystal_frequency;
1205 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
1206 if (res_pool->dccg && res_pool->hubbub) {
1208 (res_pool->dccg->funcs->get_dccg_ref_freq)(res_pool->dccg,
1209 dc->ctx->dc_bios->fw_info.pll_info.crystal_frequency,
1210 &res_pool->ref_clocks.dccg_ref_clock_inKhz);
1212 (res_pool->hubbub->funcs->get_dchub_ref_freq)(res_pool->hubbub,
1213 res_pool->ref_clocks.dccg_ref_clock_inKhz,
1214 &res_pool->ref_clocks.dchub_ref_clock_inKhz);
1216 // Not all ASICs have DCCG sw component
1217 res_pool->ref_clocks.dccg_ref_clock_inKhz =
1218 res_pool->ref_clocks.xtalin_clock_inKhz;
1219 res_pool->ref_clocks.dchub_ref_clock_inKhz =
1220 res_pool->ref_clocks.xtalin_clock_inKhz;
1224 ASSERT_CRITICAL(false);
1225 dc->hwss.disable_vga(dc->hwseq);
1228 for (i = 0; i < dc->link_count; i++) {
1229 /* Power up AND update implementation according to the
1230 * required signal (which may be different from the
1231 * default signal on connector).
1233 struct dc_link *link = dc->links[i];
1235 link->link_enc->funcs->hw_init(link->link_enc);
1237 /* Check for enabled DIG to identify enabled display */
1238 if (link->link_enc->funcs->is_dig_enabled &&
1239 link->link_enc->funcs->is_dig_enabled(link->link_enc))
1240 link->link_status.link_active = true;
1243 /* Power gate DSCs */
1244 #ifdef CONFIG_DRM_AMD_DC_DSC_SUPPORT
1245 for (i = 0; i < res_pool->res_cap->num_dsc; i++)
1246 if (dc->hwss.dsc_pg_control != NULL)
1247 dc->hwss.dsc_pg_control(hws, res_pool->dscs[i]->inst, false);
1250 /* If taking control over from VBIOS, we may want to optimize our first
1251 * mode set, so we need to skip powering down pipes until we know which
1252 * pipes we want to use.
1253 * Otherwise, if taking control is not possible, we need to power
1256 if (dcb->funcs->is_accelerated_mode(dcb) || dc->config.power_down_display_on_boot) {
1257 dc->hwss.init_pipes(dc, dc->current_state);
1258 for (i = 0; i < res_pool->pipe_count; i++) {
1259 struct hubp *hubp = res_pool->hubps[i];
1260 struct dpp *dpp = res_pool->dpps[i];
1262 hubp->funcs->hubp_init(hubp);
1263 res_pool->opps[i]->mpc_tree_params.opp_id = res_pool->opps[i]->inst;
1264 dc->hwss.plane_atomic_power_down(dc, dpp, hubp);
1267 apply_DEGVIDCN10_253_wa(dc);
1271 for (i = 0; i < res_pool->audio_count; i++) {
1272 struct audio *audio = res_pool->audios[i];
1274 audio->funcs->hw_init(audio);
1278 abm->funcs->init_backlight(abm);
1279 abm->funcs->abm_init(abm);
1283 dmcu->funcs->dmcu_init(dmcu);
1285 if (abm != NULL && dmcu != NULL)
1286 abm->dmcu_is_running = dmcu->funcs->is_dmcu_initialized(dmcu);
1288 /* power AFMT HDMI memory TODO: may move to dis/en output save power*/
1289 REG_WRITE(DIO_MEM_PWR_CTRL, 0);
1291 if (!dc->debug.disable_clock_gate) {
1292 /* enable all DCN clock gating */
1293 REG_WRITE(DCCG_GATE_DISABLE_CNTL, 0);
1295 REG_WRITE(DCCG_GATE_DISABLE_CNTL2, 0);
1297 REG_UPDATE(DCFCLK_CNTL, DCFCLK_GATE_DIS, 0);
1300 dc->hwss.enable_power_gating_plane(dc->hwseq, true);
1303 static void dcn10_reset_hw_ctx_wrap(
1305 struct dc_state *context)
1310 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1311 struct pipe_ctx *pipe_ctx_old =
1312 &dc->current_state->res_ctx.pipe_ctx[i];
1313 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1315 if (!pipe_ctx_old->stream)
1318 if (pipe_ctx_old->top_pipe)
1321 if (!pipe_ctx->stream ||
1322 pipe_need_reprogram(pipe_ctx_old, pipe_ctx)) {
1323 struct clock_source *old_clk = pipe_ctx_old->clock_source;
1325 dcn10_reset_back_end_for_pipe(dc, pipe_ctx_old, dc->current_state);
1326 if (dc->hwss.enable_stream_gating)
1327 dc->hwss.enable_stream_gating(dc, pipe_ctx);
1329 old_clk->funcs->cs_power_down(old_clk);
1334 static bool patch_address_for_sbs_tb_stereo(
1335 struct pipe_ctx *pipe_ctx, PHYSICAL_ADDRESS_LOC *addr)
1337 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1338 bool sec_split = pipe_ctx->top_pipe &&
1339 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
1340 if (sec_split && plane_state->address.type == PLN_ADDR_TYPE_GRPH_STEREO &&
1341 (pipe_ctx->stream->timing.timing_3d_format ==
1342 TIMING_3D_FORMAT_SIDE_BY_SIDE ||
1343 pipe_ctx->stream->timing.timing_3d_format ==
1344 TIMING_3D_FORMAT_TOP_AND_BOTTOM)) {
1345 *addr = plane_state->address.grph_stereo.left_addr;
1346 plane_state->address.grph_stereo.left_addr =
1347 plane_state->address.grph_stereo.right_addr;
1350 if (pipe_ctx->stream->view_format != VIEW_3D_FORMAT_NONE &&
1351 plane_state->address.type != PLN_ADDR_TYPE_GRPH_STEREO) {
1352 plane_state->address.type = PLN_ADDR_TYPE_GRPH_STEREO;
1353 plane_state->address.grph_stereo.right_addr =
1354 plane_state->address.grph_stereo.left_addr;
1362 static void dcn10_update_plane_addr(const struct dc *dc, struct pipe_ctx *pipe_ctx)
1364 bool addr_patched = false;
1365 PHYSICAL_ADDRESS_LOC addr;
1366 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1368 if (plane_state == NULL)
1371 addr_patched = patch_address_for_sbs_tb_stereo(pipe_ctx, &addr);
1373 pipe_ctx->plane_res.hubp->funcs->hubp_program_surface_flip_and_addr(
1374 pipe_ctx->plane_res.hubp,
1375 &plane_state->address,
1376 plane_state->flip_immediate);
1378 plane_state->status.requested_address = plane_state->address;
1380 if (plane_state->flip_immediate)
1381 plane_state->status.current_address = plane_state->address;
1384 pipe_ctx->plane_state->address.grph_stereo.left_addr = addr;
1387 static bool dcn10_set_input_transfer_func(struct pipe_ctx *pipe_ctx,
1388 const struct dc_plane_state *plane_state)
1390 struct dpp *dpp_base = pipe_ctx->plane_res.dpp;
1391 const struct dc_transfer_func *tf = NULL;
1394 if (dpp_base == NULL)
1397 if (plane_state->in_transfer_func)
1398 tf = plane_state->in_transfer_func;
1400 if (plane_state->gamma_correction &&
1401 !dpp_base->ctx->dc->debug.always_use_regamma
1402 && !plane_state->gamma_correction->is_identity
1403 && dce_use_lut(plane_state->format))
1404 dpp_base->funcs->dpp_program_input_lut(dpp_base, plane_state->gamma_correction);
1407 dpp_base->funcs->dpp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1408 else if (tf->type == TF_TYPE_PREDEFINED) {
1410 case TRANSFER_FUNCTION_SRGB:
1411 dpp_base->funcs->dpp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_HW_sRGB);
1413 case TRANSFER_FUNCTION_BT709:
1414 dpp_base->funcs->dpp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_HW_xvYCC);
1416 case TRANSFER_FUNCTION_LINEAR:
1417 dpp_base->funcs->dpp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1419 case TRANSFER_FUNCTION_PQ:
1424 } else if (tf->type == TF_TYPE_BYPASS) {
1425 dpp_base->funcs->dpp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1427 cm_helper_translate_curve_to_degamma_hw_format(tf,
1428 &dpp_base->degamma_params);
1429 dpp_base->funcs->dpp_program_degamma_pwl(dpp_base,
1430 &dpp_base->degamma_params);
1438 dcn10_set_output_transfer_func(struct pipe_ctx *pipe_ctx,
1439 const struct dc_stream_state *stream)
1441 struct dpp *dpp = pipe_ctx->plane_res.dpp;
1446 dpp->regamma_params.hw_points_num = GAMMA_HW_POINTS_NUM;
1448 if (stream->out_transfer_func &&
1449 stream->out_transfer_func->type == TF_TYPE_PREDEFINED &&
1450 stream->out_transfer_func->tf == TRANSFER_FUNCTION_SRGB)
1451 dpp->funcs->dpp_program_regamma_pwl(dpp, NULL, OPP_REGAMMA_SRGB);
1453 /* dcn10_translate_regamma_to_hw_format takes 750us, only do it when full
1456 else if (cm_helper_translate_curve_to_hw_format(
1457 stream->out_transfer_func,
1458 &dpp->regamma_params, false)) {
1459 dpp->funcs->dpp_program_regamma_pwl(
1461 &dpp->regamma_params, OPP_REGAMMA_USER);
1463 dpp->funcs->dpp_program_regamma_pwl(dpp, NULL, OPP_REGAMMA_BYPASS);
1468 static void dcn10_pipe_control_lock(
1470 struct pipe_ctx *pipe,
1473 /* use TG master update lock to lock everything on the TG
1474 * therefore only top pipe need to lock
1479 if (dc->debug.sanity_checks)
1480 dcn10_verify_allow_pstate_change_high(dc);
1483 pipe->stream_res.tg->funcs->lock(pipe->stream_res.tg);
1485 pipe->stream_res.tg->funcs->unlock(pipe->stream_res.tg);
1487 if (dc->debug.sanity_checks)
1488 dcn10_verify_allow_pstate_change_high(dc);
1491 static bool wait_for_reset_trigger_to_occur(
1492 struct dc_context *dc_ctx,
1493 struct timing_generator *tg)
1497 /* To avoid endless loop we wait at most
1498 * frames_to_wait_on_triggered_reset frames for the reset to occur. */
1499 const uint32_t frames_to_wait_on_triggered_reset = 10;
1502 for (i = 0; i < frames_to_wait_on_triggered_reset; i++) {
1504 if (!tg->funcs->is_counter_moving(tg)) {
1505 DC_ERROR("TG counter is not moving!\n");
1509 if (tg->funcs->did_triggered_reset_occur(tg)) {
1511 /* usually occurs at i=1 */
1512 DC_SYNC_INFO("GSL: reset occurred at wait count: %d\n",
1517 /* Wait for one frame. */
1518 tg->funcs->wait_for_state(tg, CRTC_STATE_VACTIVE);
1519 tg->funcs->wait_for_state(tg, CRTC_STATE_VBLANK);
1523 DC_ERROR("GSL: Timeout on reset trigger!\n");
1528 static void dcn10_enable_timing_synchronization(
1532 struct pipe_ctx *grouped_pipes[])
1534 struct dc_context *dc_ctx = dc->ctx;
1537 DC_SYNC_INFO("Setting up OTG reset trigger\n");
1539 for (i = 1; i < group_size; i++)
1540 grouped_pipes[i]->stream_res.tg->funcs->enable_reset_trigger(
1541 grouped_pipes[i]->stream_res.tg,
1542 grouped_pipes[0]->stream_res.tg->inst);
1544 DC_SYNC_INFO("Waiting for trigger\n");
1546 /* Need to get only check 1 pipe for having reset as all the others are
1547 * synchronized. Look at last pipe programmed to reset.
1550 wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[1]->stream_res.tg);
1551 for (i = 1; i < group_size; i++)
1552 grouped_pipes[i]->stream_res.tg->funcs->disable_reset_trigger(
1553 grouped_pipes[i]->stream_res.tg);
1555 DC_SYNC_INFO("Sync complete\n");
1558 static void dcn10_enable_per_frame_crtc_position_reset(
1561 struct pipe_ctx *grouped_pipes[])
1563 struct dc_context *dc_ctx = dc->ctx;
1566 DC_SYNC_INFO("Setting up\n");
1567 for (i = 0; i < group_size; i++)
1568 if (grouped_pipes[i]->stream_res.tg->funcs->enable_crtc_reset)
1569 grouped_pipes[i]->stream_res.tg->funcs->enable_crtc_reset(
1570 grouped_pipes[i]->stream_res.tg,
1572 &grouped_pipes[i]->stream->triggered_crtc_reset);
1574 DC_SYNC_INFO("Waiting for trigger\n");
1576 for (i = 0; i < group_size; i++)
1577 wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[i]->stream_res.tg);
1579 DC_SYNC_INFO("Multi-display sync is complete\n");
1582 /*static void print_rq_dlg_ttu(
1584 struct pipe_ctx *pipe_ctx)
1586 DC_LOG_BANDWIDTH_CALCS(core_dc->ctx->logger,
1587 "\n============== DML TTU Output parameters [%d] ==============\n"
1588 "qos_level_low_wm: %d, \n"
1589 "qos_level_high_wm: %d, \n"
1590 "min_ttu_vblank: %d, \n"
1591 "qos_level_flip: %d, \n"
1592 "refcyc_per_req_delivery_l: %d, \n"
1593 "qos_level_fixed_l: %d, \n"
1594 "qos_ramp_disable_l: %d, \n"
1595 "refcyc_per_req_delivery_pre_l: %d, \n"
1596 "refcyc_per_req_delivery_c: %d, \n"
1597 "qos_level_fixed_c: %d, \n"
1598 "qos_ramp_disable_c: %d, \n"
1599 "refcyc_per_req_delivery_pre_c: %d\n"
1600 "=============================================================\n",
1602 pipe_ctx->ttu_regs.qos_level_low_wm,
1603 pipe_ctx->ttu_regs.qos_level_high_wm,
1604 pipe_ctx->ttu_regs.min_ttu_vblank,
1605 pipe_ctx->ttu_regs.qos_level_flip,
1606 pipe_ctx->ttu_regs.refcyc_per_req_delivery_l,
1607 pipe_ctx->ttu_regs.qos_level_fixed_l,
1608 pipe_ctx->ttu_regs.qos_ramp_disable_l,
1609 pipe_ctx->ttu_regs.refcyc_per_req_delivery_pre_l,
1610 pipe_ctx->ttu_regs.refcyc_per_req_delivery_c,
1611 pipe_ctx->ttu_regs.qos_level_fixed_c,
1612 pipe_ctx->ttu_regs.qos_ramp_disable_c,
1613 pipe_ctx->ttu_regs.refcyc_per_req_delivery_pre_c
1616 DC_LOG_BANDWIDTH_CALCS(core_dc->ctx->logger,
1617 "\n============== DML DLG Output parameters [%d] ==============\n"
1618 "refcyc_h_blank_end: %d, \n"
1619 "dlg_vblank_end: %d, \n"
1620 "min_dst_y_next_start: %d, \n"
1621 "refcyc_per_htotal: %d, \n"
1622 "refcyc_x_after_scaler: %d, \n"
1623 "dst_y_after_scaler: %d, \n"
1624 "dst_y_prefetch: %d, \n"
1625 "dst_y_per_vm_vblank: %d, \n"
1626 "dst_y_per_row_vblank: %d, \n"
1627 "ref_freq_to_pix_freq: %d, \n"
1628 "vratio_prefetch: %d, \n"
1629 "refcyc_per_pte_group_vblank_l: %d, \n"
1630 "refcyc_per_meta_chunk_vblank_l: %d, \n"
1631 "dst_y_per_pte_row_nom_l: %d, \n"
1632 "refcyc_per_pte_group_nom_l: %d, \n",
1634 pipe_ctx->dlg_regs.refcyc_h_blank_end,
1635 pipe_ctx->dlg_regs.dlg_vblank_end,
1636 pipe_ctx->dlg_regs.min_dst_y_next_start,
1637 pipe_ctx->dlg_regs.refcyc_per_htotal,
1638 pipe_ctx->dlg_regs.refcyc_x_after_scaler,
1639 pipe_ctx->dlg_regs.dst_y_after_scaler,
1640 pipe_ctx->dlg_regs.dst_y_prefetch,
1641 pipe_ctx->dlg_regs.dst_y_per_vm_vblank,
1642 pipe_ctx->dlg_regs.dst_y_per_row_vblank,
1643 pipe_ctx->dlg_regs.ref_freq_to_pix_freq,
1644 pipe_ctx->dlg_regs.vratio_prefetch,
1645 pipe_ctx->dlg_regs.refcyc_per_pte_group_vblank_l,
1646 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_vblank_l,
1647 pipe_ctx->dlg_regs.dst_y_per_pte_row_nom_l,
1648 pipe_ctx->dlg_regs.refcyc_per_pte_group_nom_l
1651 DC_LOG_BANDWIDTH_CALCS(core_dc->ctx->logger,
1652 "\ndst_y_per_meta_row_nom_l: %d, \n"
1653 "refcyc_per_meta_chunk_nom_l: %d, \n"
1654 "refcyc_per_line_delivery_pre_l: %d, \n"
1655 "refcyc_per_line_delivery_l: %d, \n"
1656 "vratio_prefetch_c: %d, \n"
1657 "refcyc_per_pte_group_vblank_c: %d, \n"
1658 "refcyc_per_meta_chunk_vblank_c: %d, \n"
1659 "dst_y_per_pte_row_nom_c: %d, \n"
1660 "refcyc_per_pte_group_nom_c: %d, \n"
1661 "dst_y_per_meta_row_nom_c: %d, \n"
1662 "refcyc_per_meta_chunk_nom_c: %d, \n"
1663 "refcyc_per_line_delivery_pre_c: %d, \n"
1664 "refcyc_per_line_delivery_c: %d \n"
1665 "========================================================\n",
1666 pipe_ctx->dlg_regs.dst_y_per_meta_row_nom_l,
1667 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_nom_l,
1668 pipe_ctx->dlg_regs.refcyc_per_line_delivery_pre_l,
1669 pipe_ctx->dlg_regs.refcyc_per_line_delivery_l,
1670 pipe_ctx->dlg_regs.vratio_prefetch_c,
1671 pipe_ctx->dlg_regs.refcyc_per_pte_group_vblank_c,
1672 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_vblank_c,
1673 pipe_ctx->dlg_regs.dst_y_per_pte_row_nom_c,
1674 pipe_ctx->dlg_regs.refcyc_per_pte_group_nom_c,
1675 pipe_ctx->dlg_regs.dst_y_per_meta_row_nom_c,
1676 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_nom_c,
1677 pipe_ctx->dlg_regs.refcyc_per_line_delivery_pre_c,
1678 pipe_ctx->dlg_regs.refcyc_per_line_delivery_c
1681 DC_LOG_BANDWIDTH_CALCS(core_dc->ctx->logger,
1682 "\n============== DML RQ Output parameters [%d] ==============\n"
1684 "min_chunk_size: %d \n"
1685 "meta_chunk_size: %d \n"
1686 "min_meta_chunk_size: %d \n"
1687 "dpte_group_size: %d \n"
1688 "mpte_group_size: %d \n"
1689 "swath_height: %d \n"
1690 "pte_row_height_linear: %d \n"
1691 "========================================================\n",
1693 pipe_ctx->rq_regs.rq_regs_l.chunk_size,
1694 pipe_ctx->rq_regs.rq_regs_l.min_chunk_size,
1695 pipe_ctx->rq_regs.rq_regs_l.meta_chunk_size,
1696 pipe_ctx->rq_regs.rq_regs_l.min_meta_chunk_size,
1697 pipe_ctx->rq_regs.rq_regs_l.dpte_group_size,
1698 pipe_ctx->rq_regs.rq_regs_l.mpte_group_size,
1699 pipe_ctx->rq_regs.rq_regs_l.swath_height,
1700 pipe_ctx->rq_regs.rq_regs_l.pte_row_height_linear
1705 static void mmhub_read_vm_system_aperture_settings(struct dcn10_hubp *hubp1,
1706 struct vm_system_aperture_param *apt,
1707 struct dce_hwseq *hws)
1709 PHYSICAL_ADDRESS_LOC physical_page_number;
1710 uint32_t logical_addr_low;
1711 uint32_t logical_addr_high;
1713 REG_GET(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB,
1714 PHYSICAL_PAGE_NUMBER_MSB, &physical_page_number.high_part);
1715 REG_GET(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB,
1716 PHYSICAL_PAGE_NUMBER_LSB, &physical_page_number.low_part);
1718 REG_GET(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1719 LOGICAL_ADDR, &logical_addr_low);
1721 REG_GET(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1722 LOGICAL_ADDR, &logical_addr_high);
1724 apt->sys_default.quad_part = physical_page_number.quad_part << 12;
1725 apt->sys_low.quad_part = (int64_t)logical_addr_low << 18;
1726 apt->sys_high.quad_part = (int64_t)logical_addr_high << 18;
1729 /* Temporary read settings, future will get values from kmd directly */
1730 static void mmhub_read_vm_context0_settings(struct dcn10_hubp *hubp1,
1731 struct vm_context0_param *vm0,
1732 struct dce_hwseq *hws)
1734 PHYSICAL_ADDRESS_LOC fb_base;
1735 PHYSICAL_ADDRESS_LOC fb_offset;
1736 uint32_t fb_base_value;
1737 uint32_t fb_offset_value;
1739 REG_GET(DCHUBBUB_SDPIF_FB_BASE, SDPIF_FB_BASE, &fb_base_value);
1740 REG_GET(DCHUBBUB_SDPIF_FB_OFFSET, SDPIF_FB_OFFSET, &fb_offset_value);
1742 REG_GET(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR_HI32,
1743 PAGE_DIRECTORY_ENTRY_HI32, &vm0->pte_base.high_part);
1744 REG_GET(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR_LO32,
1745 PAGE_DIRECTORY_ENTRY_LO32, &vm0->pte_base.low_part);
1747 REG_GET(VM_CONTEXT0_PAGE_TABLE_START_ADDR_HI32,
1748 LOGICAL_PAGE_NUMBER_HI4, &vm0->pte_start.high_part);
1749 REG_GET(VM_CONTEXT0_PAGE_TABLE_START_ADDR_LO32,
1750 LOGICAL_PAGE_NUMBER_LO32, &vm0->pte_start.low_part);
1752 REG_GET(VM_CONTEXT0_PAGE_TABLE_END_ADDR_HI32,
1753 LOGICAL_PAGE_NUMBER_HI4, &vm0->pte_end.high_part);
1754 REG_GET(VM_CONTEXT0_PAGE_TABLE_END_ADDR_LO32,
1755 LOGICAL_PAGE_NUMBER_LO32, &vm0->pte_end.low_part);
1757 REG_GET(VM_L2_PROTECTION_FAULT_DEFAULT_ADDR_HI32,
1758 PHYSICAL_PAGE_ADDR_HI4, &vm0->fault_default.high_part);
1759 REG_GET(VM_L2_PROTECTION_FAULT_DEFAULT_ADDR_LO32,
1760 PHYSICAL_PAGE_ADDR_LO32, &vm0->fault_default.low_part);
1763 * The values in VM_CONTEXT0_PAGE_TABLE_BASE_ADDR is in UMA space.
1764 * Therefore we need to do
1765 * DCN_VM_CONTEXT0_PAGE_TABLE_BASE_ADDR = VM_CONTEXT0_PAGE_TABLE_BASE_ADDR
1766 * - DCHUBBUB_SDPIF_FB_OFFSET + DCHUBBUB_SDPIF_FB_BASE
1768 fb_base.quad_part = (uint64_t)fb_base_value << 24;
1769 fb_offset.quad_part = (uint64_t)fb_offset_value << 24;
1770 vm0->pte_base.quad_part += fb_base.quad_part;
1771 vm0->pte_base.quad_part -= fb_offset.quad_part;
1775 void dcn10_program_pte_vm(struct dce_hwseq *hws, struct hubp *hubp)
1777 struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp);
1778 struct vm_system_aperture_param apt = { {{ 0 } } };
1779 struct vm_context0_param vm0 = { { { 0 } } };
1781 mmhub_read_vm_system_aperture_settings(hubp1, &apt, hws);
1782 mmhub_read_vm_context0_settings(hubp1, &vm0, hws);
1784 hubp->funcs->hubp_set_vm_system_aperture_settings(hubp, &apt);
1785 hubp->funcs->hubp_set_vm_context0_settings(hubp, &vm0);
1788 static void dcn10_enable_plane(
1790 struct pipe_ctx *pipe_ctx,
1791 struct dc_state *context)
1793 struct dce_hwseq *hws = dc->hwseq;
1795 if (dc->debug.sanity_checks) {
1796 dcn10_verify_allow_pstate_change_high(dc);
1799 undo_DEGVIDCN10_253_wa(dc);
1801 power_on_plane(dc->hwseq,
1802 pipe_ctx->plane_res.hubp->inst);
1804 /* enable DCFCLK current DCHUB */
1805 pipe_ctx->plane_res.hubp->funcs->hubp_clk_cntl(pipe_ctx->plane_res.hubp, true);
1807 /* make sure OPP_PIPE_CLOCK_EN = 1 */
1808 pipe_ctx->stream_res.opp->funcs->opp_pipe_clock_control(
1809 pipe_ctx->stream_res.opp,
1812 /* TODO: enable/disable in dm as per update type.
1814 DC_LOG_DC(dc->ctx->logger,
1815 "Pipe:%d 0x%x: addr hi:0x%x, "
1818 " %d; dst: %d, %d, %d, %d;\n",
1821 plane_state->address.grph.addr.high_part,
1822 plane_state->address.grph.addr.low_part,
1823 plane_state->src_rect.x,
1824 plane_state->src_rect.y,
1825 plane_state->src_rect.width,
1826 plane_state->src_rect.height,
1827 plane_state->dst_rect.x,
1828 plane_state->dst_rect.y,
1829 plane_state->dst_rect.width,
1830 plane_state->dst_rect.height);
1832 DC_LOG_DC(dc->ctx->logger,
1833 "Pipe %d: width, height, x, y format:%d\n"
1834 "viewport:%d, %d, %d, %d\n"
1835 "recout: %d, %d, %d, %d\n",
1837 plane_state->format,
1838 pipe_ctx->plane_res.scl_data.viewport.width,
1839 pipe_ctx->plane_res.scl_data.viewport.height,
1840 pipe_ctx->plane_res.scl_data.viewport.x,
1841 pipe_ctx->plane_res.scl_data.viewport.y,
1842 pipe_ctx->plane_res.scl_data.recout.width,
1843 pipe_ctx->plane_res.scl_data.recout.height,
1844 pipe_ctx->plane_res.scl_data.recout.x,
1845 pipe_ctx->plane_res.scl_data.recout.y);
1846 print_rq_dlg_ttu(dc, pipe_ctx);
1849 if (dc->config.gpu_vm_support)
1850 dcn10_program_pte_vm(hws, pipe_ctx->plane_res.hubp);
1852 if (dc->debug.sanity_checks) {
1853 dcn10_verify_allow_pstate_change_high(dc);
1857 static void dcn10_program_gamut_remap(struct pipe_ctx *pipe_ctx)
1860 struct dpp_grph_csc_adjustment adjust;
1861 memset(&adjust, 0, sizeof(adjust));
1862 adjust.gamut_adjust_type = GRAPHICS_GAMUT_ADJUST_TYPE_BYPASS;
1865 if (pipe_ctx->stream->gamut_remap_matrix.enable_remap == true) {
1866 adjust.gamut_adjust_type = GRAPHICS_GAMUT_ADJUST_TYPE_SW;
1867 for (i = 0; i < CSC_TEMPERATURE_MATRIX_SIZE; i++)
1868 adjust.temperature_matrix[i] =
1869 pipe_ctx->stream->gamut_remap_matrix.matrix[i];
1872 pipe_ctx->plane_res.dpp->funcs->dpp_set_gamut_remap(pipe_ctx->plane_res.dpp, &adjust);
1875 static void dcn10_program_output_csc(struct dc *dc,
1876 struct pipe_ctx *pipe_ctx,
1877 enum dc_color_space colorspace,
1881 if (pipe_ctx->stream->csc_color_matrix.enable_adjustment == true) {
1882 if (pipe_ctx->plane_res.dpp->funcs->dpp_set_csc_adjustment != NULL)
1883 pipe_ctx->plane_res.dpp->funcs->dpp_set_csc_adjustment(pipe_ctx->plane_res.dpp, matrix);
1885 if (pipe_ctx->plane_res.dpp->funcs->dpp_set_csc_default != NULL)
1886 pipe_ctx->plane_res.dpp->funcs->dpp_set_csc_default(pipe_ctx->plane_res.dpp, colorspace);
1890 bool is_lower_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
1892 if (pipe_ctx->plane_state && pipe_ctx->plane_state->visible)
1894 if (pipe_ctx->bottom_pipe && is_lower_pipe_tree_visible(pipe_ctx->bottom_pipe))
1899 bool is_upper_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
1901 if (pipe_ctx->plane_state && pipe_ctx->plane_state->visible)
1903 if (pipe_ctx->top_pipe && is_upper_pipe_tree_visible(pipe_ctx->top_pipe))
1908 bool is_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
1910 if (pipe_ctx->plane_state && pipe_ctx->plane_state->visible)
1912 if (pipe_ctx->top_pipe && is_upper_pipe_tree_visible(pipe_ctx->top_pipe))
1914 if (pipe_ctx->bottom_pipe && is_lower_pipe_tree_visible(pipe_ctx->bottom_pipe))
1919 bool is_rgb_cspace(enum dc_color_space output_color_space)
1921 switch (output_color_space) {
1922 case COLOR_SPACE_SRGB:
1923 case COLOR_SPACE_SRGB_LIMITED:
1924 case COLOR_SPACE_2020_RGB_FULLRANGE:
1925 case COLOR_SPACE_2020_RGB_LIMITEDRANGE:
1926 case COLOR_SPACE_ADOBERGB:
1928 case COLOR_SPACE_YCBCR601:
1929 case COLOR_SPACE_YCBCR709:
1930 case COLOR_SPACE_YCBCR601_LIMITED:
1931 case COLOR_SPACE_YCBCR709_LIMITED:
1932 case COLOR_SPACE_2020_YCBCR:
1935 /* Add a case to switch */
1936 BREAK_TO_DEBUGGER();
1941 void dcn10_get_surface_visual_confirm_color(
1942 const struct pipe_ctx *pipe_ctx,
1943 struct tg_color *color)
1945 uint32_t color_value = MAX_TG_COLOR_VALUE;
1947 switch (pipe_ctx->plane_res.scl_data.format) {
1948 case PIXEL_FORMAT_ARGB8888:
1949 /* set boarder color to red */
1950 color->color_r_cr = color_value;
1953 case PIXEL_FORMAT_ARGB2101010:
1954 /* set boarder color to blue */
1955 color->color_b_cb = color_value;
1957 case PIXEL_FORMAT_420BPP8:
1958 /* set boarder color to green */
1959 color->color_g_y = color_value;
1961 case PIXEL_FORMAT_420BPP10:
1962 /* set boarder color to yellow */
1963 color->color_g_y = color_value;
1964 color->color_r_cr = color_value;
1966 case PIXEL_FORMAT_FP16:
1967 /* set boarder color to white */
1968 color->color_r_cr = color_value;
1969 color->color_b_cb = color_value;
1970 color->color_g_y = color_value;
1977 void dcn10_get_hdr_visual_confirm_color(
1978 struct pipe_ctx *pipe_ctx,
1979 struct tg_color *color)
1981 uint32_t color_value = MAX_TG_COLOR_VALUE;
1983 // Determine the overscan color based on the top-most (desktop) plane's context
1984 struct pipe_ctx *top_pipe_ctx = pipe_ctx;
1986 while (top_pipe_ctx->top_pipe != NULL)
1987 top_pipe_ctx = top_pipe_ctx->top_pipe;
1989 switch (top_pipe_ctx->plane_res.scl_data.format) {
1990 case PIXEL_FORMAT_ARGB2101010:
1991 if (top_pipe_ctx->stream->out_transfer_func->tf == TRANSFER_FUNCTION_PQ) {
1992 /* HDR10, ARGB2101010 - set boarder color to red */
1993 color->color_r_cr = color_value;
1996 case PIXEL_FORMAT_FP16:
1997 if (top_pipe_ctx->stream->out_transfer_func->tf == TRANSFER_FUNCTION_PQ) {
1998 /* HDR10, FP16 - set boarder color to blue */
1999 color->color_b_cb = color_value;
2000 } else if (top_pipe_ctx->stream->out_transfer_func->tf == TRANSFER_FUNCTION_GAMMA22) {
2001 /* FreeSync 2 HDR - set boarder color to green */
2002 color->color_g_y = color_value;
2006 /* SDR - set boarder color to Gray */
2007 color->color_r_cr = color_value/2;
2008 color->color_b_cb = color_value/2;
2009 color->color_g_y = color_value/2;
2014 static uint16_t fixed_point_to_int_frac(
2015 struct fixed31_32 arg,
2016 uint8_t integer_bits,
2017 uint8_t fractional_bits)
2020 int32_t divisor = 1 << fractional_bits;
2024 uint16_t d = (uint16_t)dc_fixpt_floor(
2028 if (d <= (uint16_t)(1 << integer_bits) - (1 / (uint16_t)divisor))
2029 numerator = (uint16_t)dc_fixpt_floor(
2034 numerator = dc_fixpt_floor(
2037 1LL << integer_bits),
2044 result = (uint16_t)numerator;
2046 result = (uint16_t)(
2047 (1 << (integer_bits + fractional_bits + 1)) + numerator);
2049 if ((result != 0) && dc_fixpt_lt(
2050 arg, dc_fixpt_zero))
2051 result |= 1 << (integer_bits + fractional_bits);
2056 void dcn10_build_prescale_params(struct dc_bias_and_scale *bias_and_scale,
2057 const struct dc_plane_state *plane_state)
2059 if (plane_state->format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN
2060 && plane_state->format != SURFACE_PIXEL_FORMAT_INVALID
2061 && plane_state->input_csc_color_matrix.enable_adjustment
2062 && plane_state->coeff_reduction_factor.value != 0) {
2063 bias_and_scale->scale_blue = fixed_point_to_int_frac(
2064 dc_fixpt_mul(plane_state->coeff_reduction_factor,
2065 dc_fixpt_from_fraction(256, 255)),
2068 bias_and_scale->scale_red = bias_and_scale->scale_blue;
2069 bias_and_scale->scale_green = bias_and_scale->scale_blue;
2071 bias_and_scale->scale_blue = 0x2000;
2072 bias_and_scale->scale_red = 0x2000;
2073 bias_and_scale->scale_green = 0x2000;
2077 static void update_dpp(struct dpp *dpp, struct dc_plane_state *plane_state)
2079 struct dc_bias_and_scale bns_params = {0};
2081 // program the input csc
2082 dpp->funcs->dpp_setup(dpp,
2083 plane_state->format,
2084 EXPANSION_MODE_ZERO,
2085 plane_state->input_csc_color_matrix,
2086 #ifdef CONFIG_DRM_AMD_DC_DCN2_0
2087 plane_state->color_space,
2090 plane_state->color_space);
2093 //set scale and bias registers
2094 dcn10_build_prescale_params(&bns_params, plane_state);
2095 if (dpp->funcs->dpp_program_bias_and_scale)
2096 dpp->funcs->dpp_program_bias_and_scale(dpp, &bns_params);
2099 static void dcn10_update_mpcc(struct dc *dc, struct pipe_ctx *pipe_ctx)
2101 struct hubp *hubp = pipe_ctx->plane_res.hubp;
2102 struct mpcc_blnd_cfg blnd_cfg = {{0}};
2103 bool per_pixel_alpha = pipe_ctx->plane_state->per_pixel_alpha && pipe_ctx->bottom_pipe;
2105 struct mpcc *new_mpcc;
2106 struct mpc *mpc = dc->res_pool->mpc;
2107 struct mpc_tree *mpc_tree_params = &(pipe_ctx->stream_res.opp->mpc_tree_params);
2109 if (dc->debug.visual_confirm == VISUAL_CONFIRM_HDR) {
2110 dcn10_get_hdr_visual_confirm_color(
2111 pipe_ctx, &blnd_cfg.black_color);
2112 } else if (dc->debug.visual_confirm == VISUAL_CONFIRM_SURFACE) {
2113 dcn10_get_surface_visual_confirm_color(
2114 pipe_ctx, &blnd_cfg.black_color);
2116 color_space_to_black_color(
2117 dc, pipe_ctx->stream->output_color_space,
2118 &blnd_cfg.black_color);
2121 if (per_pixel_alpha)
2122 blnd_cfg.alpha_mode = MPCC_ALPHA_BLEND_MODE_PER_PIXEL_ALPHA;
2124 blnd_cfg.alpha_mode = MPCC_ALPHA_BLEND_MODE_GLOBAL_ALPHA;
2126 blnd_cfg.overlap_only = false;
2127 blnd_cfg.global_gain = 0xff;
2129 if (pipe_ctx->plane_state->global_alpha)
2130 blnd_cfg.global_alpha = pipe_ctx->plane_state->global_alpha_value;
2132 blnd_cfg.global_alpha = 0xff;
2134 /* DCN1.0 has output CM before MPC which seems to screw with
2135 * pre-multiplied alpha.
2137 blnd_cfg.pre_multiplied_alpha = is_rgb_cspace(
2138 pipe_ctx->stream->output_color_space)
2144 * Note: currently there is a bug in init_hw such that
2145 * on resume from hibernate, BIOS sets up MPCC0, and
2146 * we do mpcc_remove but the mpcc cannot go to idle
2147 * after remove. This cause us to pick mpcc1 here,
2148 * which causes a pstate hang for yet unknown reason.
2150 mpcc_id = hubp->inst;
2152 /* If there is no full update, don't need to touch MPC tree*/
2153 if (!pipe_ctx->plane_state->update_flags.bits.full_update) {
2154 mpc->funcs->update_blending(mpc, &blnd_cfg, mpcc_id);
2158 /* check if this MPCC is already being used */
2159 new_mpcc = mpc->funcs->get_mpcc_for_dpp(mpc_tree_params, mpcc_id);
2160 /* remove MPCC if being used */
2161 if (new_mpcc != NULL)
2162 mpc->funcs->remove_mpcc(mpc, mpc_tree_params, new_mpcc);
2164 if (dc->debug.sanity_checks)
2165 mpc->funcs->assert_mpcc_idle_before_connect(
2166 dc->res_pool->mpc, mpcc_id);
2168 /* Call MPC to insert new plane */
2169 new_mpcc = mpc->funcs->insert_plane(dc->res_pool->mpc,
2177 ASSERT(new_mpcc != NULL);
2179 hubp->opp_id = pipe_ctx->stream_res.opp->inst;
2180 hubp->mpcc_id = mpcc_id;
2183 static void update_scaler(struct pipe_ctx *pipe_ctx)
2185 bool per_pixel_alpha =
2186 pipe_ctx->plane_state->per_pixel_alpha && pipe_ctx->bottom_pipe;
2188 pipe_ctx->plane_res.scl_data.lb_params.alpha_en = per_pixel_alpha;
2189 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
2190 /* scaler configuration */
2191 pipe_ctx->plane_res.dpp->funcs->dpp_set_scaler(
2192 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data);
2195 void update_dchubp_dpp(
2197 struct pipe_ctx *pipe_ctx,
2198 struct dc_state *context)
2200 struct hubp *hubp = pipe_ctx->plane_res.hubp;
2201 struct dpp *dpp = pipe_ctx->plane_res.dpp;
2202 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
2203 struct plane_size size = plane_state->plane_size;
2204 unsigned int compat_level = 0;
2206 /* depends on DML calculation, DPP clock value may change dynamically */
2207 /* If request max dpp clk is lower than current dispclk, no need to
2210 if (plane_state->update_flags.bits.full_update) {
2211 bool should_divided_by_2 = context->bw_ctx.bw.dcn.clk.dppclk_khz <=
2212 dc->clk_mgr->clks.dispclk_khz / 2;
2214 dpp->funcs->dpp_dppclk_control(
2216 should_divided_by_2,
2219 if (dc->res_pool->dccg)
2220 dc->res_pool->dccg->funcs->update_dpp_dto(
2223 pipe_ctx->plane_res.bw.dppclk_khz,
2226 dc->clk_mgr->clks.dppclk_khz = should_divided_by_2 ?
2227 dc->clk_mgr->clks.dispclk_khz / 2 :
2228 dc->clk_mgr->clks.dispclk_khz;
2231 /* TODO: Need input parameter to tell current DCHUB pipe tie to which OTG
2232 * VTG is within DCHUBBUB which is commond block share by each pipe HUBP.
2233 * VTG is 1:1 mapping with OTG. Each pipe HUBP will select which VTG
2235 if (plane_state->update_flags.bits.full_update) {
2236 hubp->funcs->hubp_vtg_sel(hubp, pipe_ctx->stream_res.tg->inst);
2238 hubp->funcs->hubp_setup(
2240 &pipe_ctx->dlg_regs,
2241 &pipe_ctx->ttu_regs,
2243 &pipe_ctx->pipe_dlg_param);
2244 hubp->funcs->hubp_setup_interdependent(
2246 &pipe_ctx->dlg_regs,
2247 &pipe_ctx->ttu_regs);
2250 size.surface_size = pipe_ctx->plane_res.scl_data.viewport;
2252 if (plane_state->update_flags.bits.full_update ||
2253 plane_state->update_flags.bits.bpp_change)
2254 update_dpp(dpp, plane_state);
2256 if (plane_state->update_flags.bits.full_update ||
2257 plane_state->update_flags.bits.per_pixel_alpha_change ||
2258 plane_state->update_flags.bits.global_alpha_change)
2259 dc->hwss.update_mpcc(dc, pipe_ctx);
2261 if (plane_state->update_flags.bits.full_update ||
2262 plane_state->update_flags.bits.per_pixel_alpha_change ||
2263 plane_state->update_flags.bits.global_alpha_change ||
2264 plane_state->update_flags.bits.scaling_change ||
2265 plane_state->update_flags.bits.position_change) {
2266 update_scaler(pipe_ctx);
2269 if (plane_state->update_flags.bits.full_update ||
2270 plane_state->update_flags.bits.scaling_change ||
2271 plane_state->update_flags.bits.position_change) {
2272 hubp->funcs->mem_program_viewport(
2274 &pipe_ctx->plane_res.scl_data.viewport,
2275 &pipe_ctx->plane_res.scl_data.viewport_c);
2278 if (pipe_ctx->stream->cursor_attributes.address.quad_part != 0) {
2279 dc->hwss.set_cursor_position(pipe_ctx);
2280 dc->hwss.set_cursor_attribute(pipe_ctx);
2282 if (dc->hwss.set_cursor_sdr_white_level)
2283 dc->hwss.set_cursor_sdr_white_level(pipe_ctx);
2286 if (plane_state->update_flags.bits.full_update) {
2288 dc->hwss.program_gamut_remap(pipe_ctx);
2290 dc->hwss.program_output_csc(dc,
2292 pipe_ctx->stream->output_color_space,
2293 pipe_ctx->stream->csc_color_matrix.matrix,
2294 pipe_ctx->stream_res.opp->inst);
2297 if (plane_state->update_flags.bits.full_update ||
2298 plane_state->update_flags.bits.pixel_format_change ||
2299 plane_state->update_flags.bits.horizontal_mirror_change ||
2300 plane_state->update_flags.bits.rotation_change ||
2301 plane_state->update_flags.bits.swizzle_change ||
2302 plane_state->update_flags.bits.dcc_change ||
2303 plane_state->update_flags.bits.bpp_change ||
2304 plane_state->update_flags.bits.scaling_change ||
2305 plane_state->update_flags.bits.plane_size_change) {
2306 hubp->funcs->hubp_program_surface_config(
2308 plane_state->format,
2309 &plane_state->tiling_info,
2311 plane_state->rotation,
2313 plane_state->horizontal_mirror,
2317 hubp->power_gated = false;
2319 dc->hwss.update_plane_addr(dc, pipe_ctx);
2321 if (is_pipe_tree_visible(pipe_ctx))
2322 hubp->funcs->set_blank(hubp, false);
2325 static void dcn10_blank_pixel_data(
2327 struct pipe_ctx *pipe_ctx,
2330 enum dc_color_space color_space;
2331 struct tg_color black_color = {0};
2332 struct stream_resource *stream_res = &pipe_ctx->stream_res;
2333 struct dc_stream_state *stream = pipe_ctx->stream;
2335 /* program otg blank color */
2336 color_space = stream->output_color_space;
2337 color_space_to_black_color(dc, color_space, &black_color);
2340 * The way 420 is packed, 2 channels carry Y component, 1 channel
2341 * alternate between Cb and Cr, so both channels need the pixel
2344 if (stream->timing.pixel_encoding == PIXEL_ENCODING_YCBCR420)
2345 black_color.color_r_cr = black_color.color_g_y;
2348 if (stream_res->tg->funcs->set_blank_color)
2349 stream_res->tg->funcs->set_blank_color(
2354 if (stream_res->tg->funcs->set_blank)
2355 stream_res->tg->funcs->set_blank(stream_res->tg, blank);
2356 if (stream_res->abm) {
2357 stream_res->abm->funcs->set_pipe(stream_res->abm, stream_res->tg->inst + 1);
2358 stream_res->abm->funcs->set_abm_level(stream_res->abm, stream->abm_level);
2361 if (stream_res->abm)
2362 stream_res->abm->funcs->set_abm_immediate_disable(stream_res->abm);
2363 if (stream_res->tg->funcs->set_blank)
2364 stream_res->tg->funcs->set_blank(stream_res->tg, blank);
2368 void set_hdr_multiplier(struct pipe_ctx *pipe_ctx)
2370 struct fixed31_32 multiplier = dc_fixpt_from_fraction(
2371 pipe_ctx->plane_state->sdr_white_level, 80);
2372 uint32_t hw_mult = 0x1f000; // 1.0 default multiplier
2373 struct custom_float_format fmt;
2375 fmt.exponenta_bits = 6;
2376 fmt.mantissa_bits = 12;
2379 if (pipe_ctx->plane_state->sdr_white_level > 80)
2380 convert_to_custom_float_format(multiplier, &fmt, &hw_mult);
2382 pipe_ctx->plane_res.dpp->funcs->dpp_set_hdr_multiplier(
2383 pipe_ctx->plane_res.dpp, hw_mult);
2386 void dcn10_program_pipe(
2388 struct pipe_ctx *pipe_ctx,
2389 struct dc_state *context)
2391 if (pipe_ctx->plane_state->update_flags.bits.full_update)
2392 dcn10_enable_plane(dc, pipe_ctx, context);
2394 update_dchubp_dpp(dc, pipe_ctx, context);
2396 set_hdr_multiplier(pipe_ctx);
2398 if (pipe_ctx->plane_state->update_flags.bits.full_update ||
2399 pipe_ctx->plane_state->update_flags.bits.in_transfer_func_change ||
2400 pipe_ctx->plane_state->update_flags.bits.gamma_change)
2401 dc->hwss.set_input_transfer_func(pipe_ctx, pipe_ctx->plane_state);
2403 /* dcn10_translate_regamma_to_hw_format takes 750us to finish
2404 * only do gamma programming for full update.
2405 * TODO: This can be further optimized/cleaned up
2406 * Always call this for now since it does memcmp inside before
2407 * doing heavy calculation and programming
2409 if (pipe_ctx->plane_state->update_flags.bits.full_update)
2410 dc->hwss.set_output_transfer_func(pipe_ctx, pipe_ctx->stream);
2413 static void program_all_pipe_in_tree(
2415 struct pipe_ctx *pipe_ctx,
2416 struct dc_state *context)
2418 if (pipe_ctx->top_pipe == NULL) {
2419 bool blank = !is_pipe_tree_visible(pipe_ctx);
2421 pipe_ctx->stream_res.tg->funcs->program_global_sync(
2422 pipe_ctx->stream_res.tg,
2423 pipe_ctx->pipe_dlg_param.vready_offset,
2424 pipe_ctx->pipe_dlg_param.vstartup_start,
2425 pipe_ctx->pipe_dlg_param.vupdate_offset,
2426 pipe_ctx->pipe_dlg_param.vupdate_width);
2428 pipe_ctx->stream_res.tg->funcs->set_vtg_params(
2429 pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing);
2431 dc->hwss.blank_pixel_data(dc, pipe_ctx, blank);
2435 if (pipe_ctx->plane_state != NULL)
2436 dcn10_program_pipe(dc, pipe_ctx, context);
2438 if (pipe_ctx->bottom_pipe != NULL && pipe_ctx->bottom_pipe != pipe_ctx)
2439 program_all_pipe_in_tree(dc, pipe_ctx->bottom_pipe, context);
2442 struct pipe_ctx *find_top_pipe_for_stream(
2444 struct dc_state *context,
2445 const struct dc_stream_state *stream)
2449 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2450 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2451 struct pipe_ctx *old_pipe_ctx =
2452 &dc->current_state->res_ctx.pipe_ctx[i];
2454 if (!pipe_ctx->plane_state && !old_pipe_ctx->plane_state)
2457 if (pipe_ctx->stream != stream)
2460 if (!pipe_ctx->top_pipe)
2466 static void dcn10_apply_ctx_for_surface(
2468 const struct dc_stream_state *stream,
2470 struct dc_state *context)
2473 struct timing_generator *tg;
2474 uint32_t underflow_check_delay_us;
2475 bool removed_pipe[4] = { false };
2476 bool interdependent_update = false;
2477 struct pipe_ctx *top_pipe_to_program =
2478 find_top_pipe_for_stream(dc, context, stream);
2479 DC_LOGGER_INIT(dc->ctx->logger);
2481 if (!top_pipe_to_program)
2484 tg = top_pipe_to_program->stream_res.tg;
2486 interdependent_update = top_pipe_to_program->plane_state &&
2487 top_pipe_to_program->plane_state->update_flags.bits.full_update;
2489 underflow_check_delay_us = dc->debug.underflow_assert_delay_us;
2491 if (underflow_check_delay_us != 0xFFFFFFFF && dc->hwss.did_underflow_occur)
2492 ASSERT(dc->hwss.did_underflow_occur(dc, top_pipe_to_program));
2494 if (interdependent_update)
2495 lock_all_pipes(dc, context, true);
2497 dcn10_pipe_control_lock(dc, top_pipe_to_program, true);
2499 if (underflow_check_delay_us != 0xFFFFFFFF)
2500 udelay(underflow_check_delay_us);
2502 if (underflow_check_delay_us != 0xFFFFFFFF && dc->hwss.did_underflow_occur)
2503 ASSERT(dc->hwss.did_underflow_occur(dc, top_pipe_to_program));
2505 if (num_planes == 0) {
2506 /* OTG blank before remove all front end */
2507 dc->hwss.blank_pixel_data(dc, top_pipe_to_program, true);
2510 /* Disconnect unused mpcc */
2511 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2512 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2513 struct pipe_ctx *old_pipe_ctx =
2514 &dc->current_state->res_ctx.pipe_ctx[i];
2516 * Powergate reused pipes that are not powergated
2517 * fairly hacky right now, using opp_id as indicator
2518 * TODO: After move dc_post to dc_update, this will
2521 if (pipe_ctx->plane_state && !old_pipe_ctx->plane_state) {
2522 if (old_pipe_ctx->stream_res.tg == tg &&
2523 old_pipe_ctx->plane_res.hubp &&
2524 old_pipe_ctx->plane_res.hubp->opp_id != OPP_ID_INVALID)
2525 dc->hwss.disable_plane(dc, old_pipe_ctx);
2528 if ((!pipe_ctx->plane_state ||
2529 pipe_ctx->stream_res.tg != old_pipe_ctx->stream_res.tg) &&
2530 old_pipe_ctx->plane_state &&
2531 old_pipe_ctx->stream_res.tg == tg) {
2533 dc->hwss.plane_atomic_disconnect(dc, old_pipe_ctx);
2534 removed_pipe[i] = true;
2536 DC_LOG_DC("Reset mpcc for pipe %d\n",
2537 old_pipe_ctx->pipe_idx);
2542 program_all_pipe_in_tree(dc, top_pipe_to_program, context);
2544 #if defined(CONFIG_DRM_AMD_DC_DCN2_0)
2545 /* Program secondary blending tree and writeback pipes */
2546 if ((stream->num_wb_info > 0) && (dc->hwss.program_all_writeback_pipes_in_tree))
2547 dc->hwss.program_all_writeback_pipes_in_tree(dc, stream, context);
2549 if (interdependent_update)
2550 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2551 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2552 /* Skip inactive pipes and ones already updated */
2553 if (!pipe_ctx->stream || pipe_ctx->stream == stream ||
2554 !pipe_ctx->plane_state || !tg->funcs->is_tg_enabled(tg))
2557 pipe_ctx->plane_res.hubp->funcs->hubp_setup_interdependent(
2558 pipe_ctx->plane_res.hubp,
2559 &pipe_ctx->dlg_regs,
2560 &pipe_ctx->ttu_regs);
2563 if (interdependent_update)
2564 lock_all_pipes(dc, context, false);
2566 dcn10_pipe_control_lock(dc, top_pipe_to_program, false);
2568 if (num_planes == 0)
2569 false_optc_underflow_wa(dc, stream, tg);
2571 for (i = 0; i < dc->res_pool->pipe_count; i++)
2572 if (removed_pipe[i])
2573 dc->hwss.disable_plane(dc, &dc->current_state->res_ctx.pipe_ctx[i]);
2575 for (i = 0; i < dc->res_pool->pipe_count; i++)
2576 if (removed_pipe[i]) {
2577 dc->hwss.optimize_bandwidth(dc, context);
2581 if (dc->hwseq->wa.DEGVIDCN10_254)
2582 hubbub1_wm_change_req_wa(dc->res_pool->hubbub);
2585 static void dcn10_stereo_hw_frame_pack_wa(struct dc *dc, struct dc_state *context)
2589 for (i = 0; i < context->stream_count; i++) {
2590 if (context->streams[i]->timing.timing_3d_format
2591 == TIMING_3D_FORMAT_HW_FRAME_PACKING) {
2595 hubbub1_allow_self_refresh_control(dc->res_pool->hubbub, false);
2601 static void dcn10_prepare_bandwidth(
2603 struct dc_state *context)
2605 struct hubbub *hubbub = dc->res_pool->hubbub;
2607 if (dc->debug.sanity_checks)
2608 dcn10_verify_allow_pstate_change_high(dc);
2610 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
2611 if (context->stream_count == 0)
2612 context->bw_ctx.bw.dcn.clk.phyclk_khz = 0;
2614 dc->clk_mgr->funcs->update_clocks(
2620 hubbub->funcs->program_watermarks(hubbub,
2621 &context->bw_ctx.bw.dcn.watermarks,
2622 dc->res_pool->ref_clocks.dchub_ref_clock_inKhz / 1000,
2624 dcn10_stereo_hw_frame_pack_wa(dc, context);
2626 if (dc->debug.pplib_wm_report_mode == WM_REPORT_OVERRIDE)
2627 dcn_bw_notify_pplib_of_wm_ranges(dc);
2629 if (dc->debug.sanity_checks)
2630 dcn10_verify_allow_pstate_change_high(dc);
2633 static void dcn10_optimize_bandwidth(
2635 struct dc_state *context)
2637 struct hubbub *hubbub = dc->res_pool->hubbub;
2639 if (dc->debug.sanity_checks)
2640 dcn10_verify_allow_pstate_change_high(dc);
2642 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
2643 if (context->stream_count == 0)
2644 context->bw_ctx.bw.dcn.clk.phyclk_khz = 0;
2646 dc->clk_mgr->funcs->update_clocks(
2652 hubbub->funcs->program_watermarks(hubbub,
2653 &context->bw_ctx.bw.dcn.watermarks,
2654 dc->res_pool->ref_clocks.dchub_ref_clock_inKhz / 1000,
2656 dcn10_stereo_hw_frame_pack_wa(dc, context);
2658 if (dc->debug.pplib_wm_report_mode == WM_REPORT_OVERRIDE)
2659 dcn_bw_notify_pplib_of_wm_ranges(dc);
2661 if (dc->debug.sanity_checks)
2662 dcn10_verify_allow_pstate_change_high(dc);
2665 static void dcn10_set_drr(struct pipe_ctx **pipe_ctx,
2666 int num_pipes, int vmin, int vmax)
2669 struct drr_params params = {0};
2670 // DRR set trigger event mapped to OTG_TRIG_A (bit 11) for manual control flow
2671 unsigned int event_triggers = 0x800;
2673 params.vertical_total_max = vmax;
2674 params.vertical_total_min = vmin;
2676 /* TODO: If multiple pipes are to be supported, you need
2677 * some GSL stuff. Static screen triggers may be programmed differently
2680 for (i = 0; i < num_pipes; i++) {
2681 pipe_ctx[i]->stream_res.tg->funcs->set_drr(
2682 pipe_ctx[i]->stream_res.tg, ¶ms);
2683 if (vmax != 0 && vmin != 0)
2684 pipe_ctx[i]->stream_res.tg->funcs->set_static_screen_control(
2685 pipe_ctx[i]->stream_res.tg,
2690 static void dcn10_get_position(struct pipe_ctx **pipe_ctx,
2692 struct crtc_position *position)
2696 /* TODO: handle pipes > 1
2698 for (i = 0; i < num_pipes; i++)
2699 pipe_ctx[i]->stream_res.tg->funcs->get_position(pipe_ctx[i]->stream_res.tg, position);
2702 static void dcn10_set_static_screen_control(struct pipe_ctx **pipe_ctx,
2703 int num_pipes, const struct dc_static_screen_events *events)
2706 unsigned int value = 0;
2708 if (events->surface_update)
2710 if (events->cursor_update)
2712 if (events->force_trigger)
2715 for (i = 0; i < num_pipes; i++)
2716 pipe_ctx[i]->stream_res.tg->funcs->
2717 set_static_screen_control(pipe_ctx[i]->stream_res.tg, value);
2720 static void dcn10_config_stereo_parameters(
2721 struct dc_stream_state *stream, struct crtc_stereo_flags *flags)
2723 enum view_3d_format view_format = stream->view_format;
2724 enum dc_timing_3d_format timing_3d_format =\
2725 stream->timing.timing_3d_format;
2726 bool non_stereo_timing = false;
2728 if (timing_3d_format == TIMING_3D_FORMAT_NONE ||
2729 timing_3d_format == TIMING_3D_FORMAT_SIDE_BY_SIDE ||
2730 timing_3d_format == TIMING_3D_FORMAT_TOP_AND_BOTTOM)
2731 non_stereo_timing = true;
2733 if (non_stereo_timing == false &&
2734 view_format == VIEW_3D_FORMAT_FRAME_SEQUENTIAL) {
2736 flags->PROGRAM_STEREO = 1;
2737 flags->PROGRAM_POLARITY = 1;
2738 if (timing_3d_format == TIMING_3D_FORMAT_INBAND_FA ||
2739 timing_3d_format == TIMING_3D_FORMAT_DP_HDMI_INBAND_FA ||
2740 timing_3d_format == TIMING_3D_FORMAT_SIDEBAND_FA) {
2741 enum display_dongle_type dongle = \
2742 stream->link->ddc->dongle_type;
2743 if (dongle == DISPLAY_DONGLE_DP_VGA_CONVERTER ||
2744 dongle == DISPLAY_DONGLE_DP_DVI_CONVERTER ||
2745 dongle == DISPLAY_DONGLE_DP_HDMI_CONVERTER)
2746 flags->DISABLE_STEREO_DP_SYNC = 1;
2748 flags->RIGHT_EYE_POLARITY =\
2749 stream->timing.flags.RIGHT_EYE_3D_POLARITY;
2750 if (timing_3d_format == TIMING_3D_FORMAT_HW_FRAME_PACKING)
2751 flags->FRAME_PACKED = 1;
2757 static void dcn10_setup_stereo(struct pipe_ctx *pipe_ctx, struct dc *dc)
2759 struct crtc_stereo_flags flags = { 0 };
2760 struct dc_stream_state *stream = pipe_ctx->stream;
2762 dcn10_config_stereo_parameters(stream, &flags);
2764 if (stream->timing.timing_3d_format == TIMING_3D_FORMAT_SIDEBAND_FA) {
2765 if (!dc_set_generic_gpio_for_stereo(true, dc->ctx->gpio_service))
2766 dc_set_generic_gpio_for_stereo(false, dc->ctx->gpio_service);
2768 dc_set_generic_gpio_for_stereo(false, dc->ctx->gpio_service);
2771 pipe_ctx->stream_res.opp->funcs->opp_program_stereo(
2772 pipe_ctx->stream_res.opp,
2773 flags.PROGRAM_STEREO == 1 ? true:false,
2776 pipe_ctx->stream_res.tg->funcs->program_stereo(
2777 pipe_ctx->stream_res.tg,
2784 static struct hubp *get_hubp_by_inst(struct resource_pool *res_pool, int mpcc_inst)
2788 for (i = 0; i < res_pool->pipe_count; i++) {
2789 if (res_pool->hubps[i]->inst == mpcc_inst)
2790 return res_pool->hubps[i];
2796 static void dcn10_wait_for_mpcc_disconnect(
2798 struct resource_pool *res_pool,
2799 struct pipe_ctx *pipe_ctx)
2803 if (dc->debug.sanity_checks) {
2804 dcn10_verify_allow_pstate_change_high(dc);
2807 if (!pipe_ctx->stream_res.opp)
2810 for (mpcc_inst = 0; mpcc_inst < MAX_PIPES; mpcc_inst++) {
2811 if (pipe_ctx->stream_res.opp->mpcc_disconnect_pending[mpcc_inst]) {
2812 struct hubp *hubp = get_hubp_by_inst(res_pool, mpcc_inst);
2814 res_pool->mpc->funcs->wait_for_idle(res_pool->mpc, mpcc_inst);
2815 pipe_ctx->stream_res.opp->mpcc_disconnect_pending[mpcc_inst] = false;
2816 hubp->funcs->set_blank(hubp, true);
2820 if (dc->debug.sanity_checks) {
2821 dcn10_verify_allow_pstate_change_high(dc);
2826 static bool dcn10_dummy_display_power_gating(
2828 uint8_t controller_id,
2829 struct dc_bios *dcb,
2830 enum pipe_gating_control power_gating)
2835 static void dcn10_update_pending_status(struct pipe_ctx *pipe_ctx)
2837 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
2838 struct timing_generator *tg = pipe_ctx->stream_res.tg;
2841 if (plane_state == NULL)
2844 flip_pending = pipe_ctx->plane_res.hubp->funcs->hubp_is_flip_pending(
2845 pipe_ctx->plane_res.hubp);
2847 plane_state->status.is_flip_pending = plane_state->status.is_flip_pending || flip_pending;
2850 plane_state->status.current_address = plane_state->status.requested_address;
2852 if (plane_state->status.current_address.type == PLN_ADDR_TYPE_GRPH_STEREO &&
2853 tg->funcs->is_stereo_left_eye) {
2854 plane_state->status.is_right_eye =
2855 !tg->funcs->is_stereo_left_eye(pipe_ctx->stream_res.tg);
2859 static void dcn10_update_dchub(struct dce_hwseq *hws, struct dchub_init_data *dh_data)
2861 if (hws->ctx->dc->res_pool->hubbub != NULL) {
2862 struct hubp *hubp = hws->ctx->dc->res_pool->hubps[0];
2864 if (hubp->funcs->hubp_update_dchub)
2865 hubp->funcs->hubp_update_dchub(hubp, dh_data);
2867 hubbub1_update_dchub(hws->ctx->dc->res_pool->hubbub, dh_data);
2871 static void dcn10_set_cursor_position(struct pipe_ctx *pipe_ctx)
2873 struct dc_cursor_position pos_cpy = pipe_ctx->stream->cursor_position;
2874 struct hubp *hubp = pipe_ctx->plane_res.hubp;
2875 struct dpp *dpp = pipe_ctx->plane_res.dpp;
2876 struct dc_cursor_mi_param param = {
2877 .pixel_clk_khz = pipe_ctx->stream->timing.pix_clk_100hz / 10,
2878 .ref_clk_khz = pipe_ctx->stream->ctx->dc->res_pool->ref_clocks.dchub_ref_clock_inKhz,
2879 .viewport = pipe_ctx->plane_res.scl_data.viewport,
2880 .h_scale_ratio = pipe_ctx->plane_res.scl_data.ratios.horz,
2881 .v_scale_ratio = pipe_ctx->plane_res.scl_data.ratios.vert,
2882 .rotation = pipe_ctx->plane_state->rotation,
2883 .mirror = pipe_ctx->plane_state->horizontal_mirror
2885 uint32_t x_plane = pipe_ctx->plane_state->dst_rect.x;
2886 uint32_t y_plane = pipe_ctx->plane_state->dst_rect.y;
2887 uint32_t x_offset = min(x_plane, pos_cpy.x);
2888 uint32_t y_offset = min(y_plane, pos_cpy.y);
2890 pos_cpy.x -= x_offset;
2891 pos_cpy.y -= y_offset;
2892 pos_cpy.x_hotspot += (x_plane - x_offset);
2893 pos_cpy.y_hotspot += (y_plane - y_offset);
2895 if (pipe_ctx->plane_state->address.type
2896 == PLN_ADDR_TYPE_VIDEO_PROGRESSIVE)
2897 pos_cpy.enable = false;
2899 hubp->funcs->set_cursor_position(hubp, &pos_cpy, ¶m);
2900 dpp->funcs->set_cursor_position(dpp, &pos_cpy, ¶m, hubp->curs_attr.width, hubp->curs_attr.height);
2903 static void dcn10_set_cursor_attribute(struct pipe_ctx *pipe_ctx)
2905 struct dc_cursor_attributes *attributes = &pipe_ctx->stream->cursor_attributes;
2907 pipe_ctx->plane_res.hubp->funcs->set_cursor_attributes(
2908 pipe_ctx->plane_res.hubp, attributes);
2909 pipe_ctx->plane_res.dpp->funcs->set_cursor_attributes(
2910 pipe_ctx->plane_res.dpp, attributes->color_format);
2913 static void dcn10_set_cursor_sdr_white_level(struct pipe_ctx *pipe_ctx)
2915 uint32_t sdr_white_level = pipe_ctx->stream->cursor_attributes.sdr_white_level;
2916 struct fixed31_32 multiplier;
2917 struct dpp_cursor_attributes opt_attr = { 0 };
2918 uint32_t hw_scale = 0x3c00; // 1.0 default multiplier
2919 struct custom_float_format fmt;
2921 if (!pipe_ctx->plane_res.dpp->funcs->set_optional_cursor_attributes)
2924 fmt.exponenta_bits = 5;
2925 fmt.mantissa_bits = 10;
2928 if (sdr_white_level > 80) {
2929 multiplier = dc_fixpt_from_fraction(sdr_white_level, 80);
2930 convert_to_custom_float_format(multiplier, &fmt, &hw_scale);
2933 opt_attr.scale = hw_scale;
2936 pipe_ctx->plane_res.dpp->funcs->set_optional_cursor_attributes(
2937 pipe_ctx->plane_res.dpp, &opt_attr);
2941 * apply_front_porch_workaround TODO FPGA still need?
2943 * This is a workaround for a bug that has existed since R5xx and has not been
2944 * fixed keep Front porch at minimum 2 for Interlaced mode or 1 for progressive.
2946 static void apply_front_porch_workaround(
2947 struct dc_crtc_timing *timing)
2949 if (timing->flags.INTERLACE == 1) {
2950 if (timing->v_front_porch < 2)
2951 timing->v_front_porch = 2;
2953 if (timing->v_front_porch < 1)
2954 timing->v_front_porch = 1;
2958 int get_vupdate_offset_from_vsync(struct pipe_ctx *pipe_ctx)
2960 const struct dc_crtc_timing *dc_crtc_timing = &pipe_ctx->stream->timing;
2961 struct dc_crtc_timing patched_crtc_timing;
2962 int vesa_sync_start;
2964 int interlace_factor;
2965 int vertical_line_start;
2967 patched_crtc_timing = *dc_crtc_timing;
2968 apply_front_porch_workaround(&patched_crtc_timing);
2970 interlace_factor = patched_crtc_timing.flags.INTERLACE ? 2 : 1;
2972 vesa_sync_start = patched_crtc_timing.v_addressable +
2973 patched_crtc_timing.v_border_bottom +
2974 patched_crtc_timing.v_front_porch;
2976 asic_blank_end = (patched_crtc_timing.v_total -
2978 patched_crtc_timing.v_border_top)
2981 vertical_line_start = asic_blank_end -
2982 pipe_ctx->pipe_dlg_param.vstartup_start + 1;
2984 return vertical_line_start;
2987 void lock_all_pipes(struct dc *dc,
2988 struct dc_state *context,
2991 struct pipe_ctx *pipe_ctx;
2992 struct timing_generator *tg;
2995 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2996 pipe_ctx = &context->res_ctx.pipe_ctx[i];
2997 tg = pipe_ctx->stream_res.tg;
2999 * Only lock the top pipe's tg to prevent redundant
3000 * (un)locking. Also skip if pipe is disabled.
3002 if (pipe_ctx->top_pipe ||
3003 !pipe_ctx->stream || !pipe_ctx->plane_state ||
3004 !tg->funcs->is_tg_enabled(tg))
3008 tg->funcs->lock(tg);
3010 tg->funcs->unlock(tg);
3014 static void calc_vupdate_position(
3015 struct pipe_ctx *pipe_ctx,
3016 uint32_t *start_line,
3019 const struct dc_crtc_timing *dc_crtc_timing = &pipe_ctx->stream->timing;
3020 int vline_int_offset_from_vupdate =
3021 pipe_ctx->stream->periodic_interrupt0.lines_offset;
3022 int vupdate_offset_from_vsync = get_vupdate_offset_from_vsync(pipe_ctx);
3025 if (vline_int_offset_from_vupdate > 0)
3026 vline_int_offset_from_vupdate--;
3027 else if (vline_int_offset_from_vupdate < 0)
3028 vline_int_offset_from_vupdate++;
3030 start_position = vline_int_offset_from_vupdate + vupdate_offset_from_vsync;
3032 if (start_position >= 0)
3033 *start_line = start_position;
3035 *start_line = dc_crtc_timing->v_total + start_position - 1;
3037 *end_line = *start_line + 2;
3039 if (*end_line >= dc_crtc_timing->v_total)
3043 static void cal_vline_position(
3044 struct pipe_ctx *pipe_ctx,
3045 enum vline_select vline,
3046 uint32_t *start_line,
3049 enum vertical_interrupt_ref_point ref_point = INVALID_POINT;
3051 if (vline == VLINE0)
3052 ref_point = pipe_ctx->stream->periodic_interrupt0.ref_point;
3053 else if (vline == VLINE1)
3054 ref_point = pipe_ctx->stream->periodic_interrupt1.ref_point;
3056 switch (ref_point) {
3057 case START_V_UPDATE:
3058 calc_vupdate_position(
3064 // Suppose to do nothing because vsync is 0;
3072 static void dcn10_setup_periodic_interrupt(
3073 struct pipe_ctx *pipe_ctx,
3074 enum vline_select vline)
3076 struct timing_generator *tg = pipe_ctx->stream_res.tg;
3078 if (vline == VLINE0) {
3079 uint32_t start_line = 0;
3080 uint32_t end_line = 0;
3082 cal_vline_position(pipe_ctx, vline, &start_line, &end_line);
3084 tg->funcs->setup_vertical_interrupt0(tg, start_line, end_line);
3086 } else if (vline == VLINE1) {
3087 pipe_ctx->stream_res.tg->funcs->setup_vertical_interrupt1(
3089 pipe_ctx->stream->periodic_interrupt1.lines_offset);
3093 static void dcn10_setup_vupdate_interrupt(struct pipe_ctx *pipe_ctx)
3095 struct timing_generator *tg = pipe_ctx->stream_res.tg;
3096 int start_line = get_vupdate_offset_from_vsync(pipe_ctx);
3098 if (start_line < 0) {
3103 if (tg->funcs->setup_vertical_interrupt2)
3104 tg->funcs->setup_vertical_interrupt2(tg, start_line);
3107 static void dcn10_unblank_stream(struct pipe_ctx *pipe_ctx,
3108 struct dc_link_settings *link_settings)
3110 struct encoder_unblank_param params = { { 0 } };
3111 struct dc_stream_state *stream = pipe_ctx->stream;
3112 struct dc_link *link = stream->link;
3114 /* only 3 items below are used by unblank */
3115 params.timing = pipe_ctx->stream->timing;
3117 params.link_settings.link_rate = link_settings->link_rate;
3119 if (dc_is_dp_signal(pipe_ctx->stream->signal)) {
3120 if (params.timing.pixel_encoding == PIXEL_ENCODING_YCBCR420)
3121 params.timing.pix_clk_100hz /= 2;
3122 pipe_ctx->stream_res.stream_enc->funcs->dp_unblank(pipe_ctx->stream_res.stream_enc, ¶ms);
3125 if (link->local_sink && link->local_sink->sink_signal == SIGNAL_TYPE_EDP) {
3126 link->dc->hwss.edp_backlight_control(link, true);
3130 static void dcn10_send_immediate_sdp_message(struct pipe_ctx *pipe_ctx,
3131 const uint8_t *custom_sdp_message,
3132 unsigned int sdp_message_size)
3134 if (dc_is_dp_signal(pipe_ctx->stream->signal)) {
3135 pipe_ctx->stream_res.stream_enc->funcs->send_immediate_sdp_message(
3136 pipe_ctx->stream_res.stream_enc,
3141 static enum dc_status dcn10_set_clock(struct dc *dc,
3142 enum dc_clock_type clock_type,
3146 struct dc_state *context = dc->current_state;
3147 struct dc_clock_config clock_cfg = {0};
3148 struct dc_clocks *current_clocks = &context->bw_ctx.bw.dcn.clk;
3150 if (dc->clk_mgr && dc->clk_mgr->funcs->get_clock)
3151 dc->clk_mgr->funcs->get_clock(dc->clk_mgr,
3152 context, clock_type, &clock_cfg);
3154 if (!dc->clk_mgr->funcs->get_clock)
3155 return DC_FAIL_UNSUPPORTED_1;
3157 if (clk_khz > clock_cfg.max_clock_khz)
3158 return DC_FAIL_CLK_EXCEED_MAX;
3160 if (clk_khz < clock_cfg.min_clock_khz)
3161 return DC_FAIL_CLK_BELOW_MIN;
3163 if (clk_khz < clock_cfg.bw_requirequired_clock_khz)
3164 return DC_FAIL_CLK_BELOW_CFG_REQUIRED;
3166 /*update internal request clock for update clock use*/
3167 if (clock_type == DC_CLOCK_TYPE_DISPCLK)
3168 current_clocks->dispclk_khz = clk_khz;
3169 else if (clock_type == DC_CLOCK_TYPE_DPPCLK)
3170 current_clocks->dppclk_khz = clk_khz;
3172 return DC_ERROR_UNEXPECTED;
3174 if (dc->clk_mgr && dc->clk_mgr->funcs->update_clocks)
3175 dc->clk_mgr->funcs->update_clocks(dc->clk_mgr,
3181 static void dcn10_get_clock(struct dc *dc,
3182 enum dc_clock_type clock_type,
3183 struct dc_clock_config *clock_cfg)
3185 struct dc_state *context = dc->current_state;
3187 if (dc->clk_mgr && dc->clk_mgr->funcs->get_clock)
3188 dc->clk_mgr->funcs->get_clock(dc->clk_mgr, context, clock_type, clock_cfg);
3192 static const struct hw_sequencer_funcs dcn10_funcs = {
3193 .program_gamut_remap = dcn10_program_gamut_remap,
3194 .init_hw = dcn10_init_hw,
3195 .init_pipes = dcn10_init_pipes,
3196 .apply_ctx_to_hw = dce110_apply_ctx_to_hw,
3197 .apply_ctx_for_surface = dcn10_apply_ctx_for_surface,
3198 .update_plane_addr = dcn10_update_plane_addr,
3199 .plane_atomic_disconnect = hwss1_plane_atomic_disconnect,
3200 .update_dchub = dcn10_update_dchub,
3201 .update_mpcc = dcn10_update_mpcc,
3202 .update_pending_status = dcn10_update_pending_status,
3203 .set_input_transfer_func = dcn10_set_input_transfer_func,
3204 .set_output_transfer_func = dcn10_set_output_transfer_func,
3205 .program_output_csc = dcn10_program_output_csc,
3206 .power_down = dce110_power_down,
3207 .enable_accelerated_mode = dce110_enable_accelerated_mode,
3208 .enable_timing_synchronization = dcn10_enable_timing_synchronization,
3209 .enable_per_frame_crtc_position_reset = dcn10_enable_per_frame_crtc_position_reset,
3210 .update_info_frame = dce110_update_info_frame,
3211 .send_immediate_sdp_message = dcn10_send_immediate_sdp_message,
3212 .enable_stream = dce110_enable_stream,
3213 .disable_stream = dce110_disable_stream,
3214 .unblank_stream = dcn10_unblank_stream,
3215 .blank_stream = dce110_blank_stream,
3216 .enable_audio_stream = dce110_enable_audio_stream,
3217 .disable_audio_stream = dce110_disable_audio_stream,
3218 .enable_display_power_gating = dcn10_dummy_display_power_gating,
3219 .disable_plane = dcn10_disable_plane,
3220 .blank_pixel_data = dcn10_blank_pixel_data,
3221 .pipe_control_lock = dcn10_pipe_control_lock,
3222 .prepare_bandwidth = dcn10_prepare_bandwidth,
3223 .optimize_bandwidth = dcn10_optimize_bandwidth,
3224 .reset_hw_ctx_wrap = dcn10_reset_hw_ctx_wrap,
3225 .enable_stream_timing = dcn10_enable_stream_timing,
3226 .set_drr = dcn10_set_drr,
3227 .get_position = dcn10_get_position,
3228 .set_static_screen_control = dcn10_set_static_screen_control,
3229 .setup_stereo = dcn10_setup_stereo,
3230 .set_avmute = dce110_set_avmute,
3231 .log_hw_state = dcn10_log_hw_state,
3232 .get_hw_state = dcn10_get_hw_state,
3233 .clear_status_bits = dcn10_clear_status_bits,
3234 .wait_for_mpcc_disconnect = dcn10_wait_for_mpcc_disconnect,
3235 .edp_backlight_control = dce110_edp_backlight_control,
3236 .edp_power_control = dce110_edp_power_control,
3237 .edp_wait_for_hpd_ready = dce110_edp_wait_for_hpd_ready,
3238 .set_cursor_position = dcn10_set_cursor_position,
3239 .set_cursor_attribute = dcn10_set_cursor_attribute,
3240 .set_cursor_sdr_white_level = dcn10_set_cursor_sdr_white_level,
3241 .disable_stream_gating = NULL,
3242 .enable_stream_gating = NULL,
3243 .setup_periodic_interrupt = dcn10_setup_periodic_interrupt,
3244 .setup_vupdate_interrupt = dcn10_setup_vupdate_interrupt,
3245 .set_clock = dcn10_set_clock,
3246 .get_clock = dcn10_get_clock,
3247 .did_underflow_occur = dcn10_did_underflow_occur,
3249 .disable_vga = dcn10_disable_vga,
3250 .bios_golden_init = dcn10_bios_golden_init,
3251 .plane_atomic_disable = dcn10_plane_atomic_disable,
3252 .plane_atomic_power_down = dcn10_plane_atomic_power_down,
3253 .enable_power_gating_plane = dcn10_enable_power_gating_plane,
3254 .dpp_pg_control = dcn10_dpp_pg_control,
3255 .hubp_pg_control = dcn10_hubp_pg_control,
3256 .dsc_pg_control = NULL,
3260 void dcn10_hw_sequencer_construct(struct dc *dc)
3262 dc->hwss = dcn10_funcs;