]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
Merge remote-tracking branches 'asoc/topic/tas6424', 'asoc/topic/tfa9879', 'asoc...
[mirror_ubuntu-focal-kernel.git] / drivers / gpu / drm / amd / display / dc / dcn10 / dcn10_hw_sequencer.c
1 /*
2 * Copyright 2016 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include "dm_services.h"
27 #include "core_types.h"
28 #include "resource.h"
29 #include "custom_float.h"
30 #include "dcn10_hw_sequencer.h"
31 #include "dce110/dce110_hw_sequencer.h"
32 #include "dce/dce_hwseq.h"
33 #include "abm.h"
34 #include "dcn10/dcn10_timing_generator.h"
35 #include "dcn10/dcn10_dpp.h"
36 #include "dcn10/dcn10_mpc.h"
37 #include "timing_generator.h"
38 #include "opp.h"
39 #include "ipp.h"
40 #include "mpc.h"
41 #include "reg_helper.h"
42 #include "custom_float.h"
43 #include "dcn10_hubp.h"
44
45 #define CTX \
46 hws->ctx
47 #define REG(reg)\
48 hws->regs->reg
49
50 #undef FN
51 #define FN(reg_name, field_name) \
52 hws->shifts->field_name, hws->masks->field_name
53
54 static void log_mpc_crc(struct dc *dc)
55 {
56 struct dc_context *dc_ctx = dc->ctx;
57 struct dce_hwseq *hws = dc->hwseq;
58
59 if (REG(MPC_CRC_RESULT_GB))
60 DTN_INFO("MPC_CRC_RESULT_GB:%d MPC_CRC_RESULT_C:%d MPC_CRC_RESULT_AR:%d\n",
61 REG_READ(MPC_CRC_RESULT_GB), REG_READ(MPC_CRC_RESULT_C), REG_READ(MPC_CRC_RESULT_AR));
62 if (REG(DPP_TOP0_DPP_CRC_VAL_B_A))
63 DTN_INFO("DPP_TOP0_DPP_CRC_VAL_B_A:%d DPP_TOP0_DPP_CRC_VAL_R_G:%d\n",
64 REG_READ(DPP_TOP0_DPP_CRC_VAL_B_A), REG_READ(DPP_TOP0_DPP_CRC_VAL_R_G));
65 }
66
67 void print_microsec(struct dc_context *dc_ctx, uint32_t ref_cycle)
68 {
69 static const uint32_t ref_clk_mhz = 48;
70 static const unsigned int frac = 10;
71 uint32_t us_x10 = (ref_cycle * frac) / ref_clk_mhz;
72
73 DTN_INFO("%d.%d \t ",
74 us_x10 / frac,
75 us_x10 % frac);
76 }
77
78 #define DTN_INFO_MICRO_SEC(ref_cycle) \
79 print_microsec(dc_ctx, ref_cycle)
80
81 struct dcn_hubbub_wm_set {
82 uint32_t wm_set;
83 uint32_t data_urgent;
84 uint32_t pte_meta_urgent;
85 uint32_t sr_enter;
86 uint32_t sr_exit;
87 uint32_t dram_clk_chanage;
88 };
89
90 struct dcn_hubbub_wm {
91 struct dcn_hubbub_wm_set sets[4];
92 };
93
94 static void dcn10_hubbub_wm_read_state(struct dce_hwseq *hws,
95 struct dcn_hubbub_wm *wm)
96 {
97 struct dcn_hubbub_wm_set *s;
98
99 s = &wm->sets[0];
100 s->wm_set = 0;
101 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_A);
102 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_A);
103 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A);
104 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_A);
105 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_A);
106
107 s = &wm->sets[1];
108 s->wm_set = 1;
109 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_B);
110 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_B);
111 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B);
112 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_B);
113 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_B);
114
115 s = &wm->sets[2];
116 s->wm_set = 2;
117 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_C);
118 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_C);
119 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C);
120 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_C);
121 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_C);
122
123 s = &wm->sets[3];
124 s->wm_set = 3;
125 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_D);
126 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_D);
127 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D);
128 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_D);
129 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_D);
130 }
131
132 static void dcn10_log_hubbub_state(struct dc *dc)
133 {
134 struct dc_context *dc_ctx = dc->ctx;
135 struct dcn_hubbub_wm wm;
136 int i;
137
138 dcn10_hubbub_wm_read_state(dc->hwseq, &wm);
139
140 DTN_INFO("HUBBUB WM: \t data_urgent \t pte_meta_urgent \t "
141 "sr_enter \t sr_exit \t dram_clk_change \n");
142
143 for (i = 0; i < 4; i++) {
144 struct dcn_hubbub_wm_set *s;
145
146 s = &wm.sets[i];
147 DTN_INFO("WM_Set[%d]:\t ", s->wm_set);
148 DTN_INFO_MICRO_SEC(s->data_urgent);
149 DTN_INFO_MICRO_SEC(s->pte_meta_urgent);
150 DTN_INFO_MICRO_SEC(s->sr_enter);
151 DTN_INFO_MICRO_SEC(s->sr_exit);
152 DTN_INFO_MICRO_SEC(s->dram_clk_chanage);
153 DTN_INFO("\n");
154 }
155
156 DTN_INFO("\n");
157 }
158
159 static void dcn10_log_hw_state(struct dc *dc)
160 {
161 struct dc_context *dc_ctx = dc->ctx;
162 struct resource_pool *pool = dc->res_pool;
163 int i;
164
165 DTN_INFO_BEGIN();
166
167 dcn10_log_hubbub_state(dc);
168
169 DTN_INFO("HUBP:\t format \t addr_hi \t width \t height \t "
170 "rotation \t mirror \t sw_mode \t "
171 "dcc_en \t blank_en \t ttu_dis \t underflow \t "
172 "min_ttu_vblank \t qos_low_wm \t qos_high_wm \n");
173
174 for (i = 0; i < pool->pipe_count; i++) {
175 struct hubp *hubp = pool->hubps[i];
176 struct dcn_hubp_state s;
177
178 hubp1_read_state(TO_DCN10_HUBP(hubp), &s);
179
180 DTN_INFO("[%d]:\t %xh \t %xh \t %d \t %d \t "
181 "%xh \t %xh \t %xh \t "
182 "%d \t %d \t %d \t %xh \t",
183 i,
184 s.pixel_format,
185 s.inuse_addr_hi,
186 s.viewport_width,
187 s.viewport_height,
188 s.rotation_angle,
189 s.h_mirror_en,
190 s.sw_mode,
191 s.dcc_en,
192 s.blank_en,
193 s.ttu_disable,
194 s.underflow_status);
195 DTN_INFO_MICRO_SEC(s.min_ttu_vblank);
196 DTN_INFO_MICRO_SEC(s.qos_level_low_wm);
197 DTN_INFO_MICRO_SEC(s.qos_level_high_wm);
198 DTN_INFO("\n");
199 }
200 DTN_INFO("\n");
201
202 DTN_INFO("OTG:\t v_bs \t v_be \t v_ss \t v_se \t vpol \t vmax \t vmin \t "
203 "h_bs \t h_be \t h_ss \t h_se \t hpol \t htot \t vtot \t underflow\n");
204
205 for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
206 struct timing_generator *tg = pool->timing_generators[i];
207 struct dcn_otg_state s = {0};
208
209 tgn10_read_otg_state(DCN10TG_FROM_TG(tg), &s);
210
211 //only print if OTG master is enabled
212 if ((s.otg_enabled & 1) == 0)
213 continue;
214
215 DTN_INFO("[%d]:\t %d \t %d \t %d \t %d \t "
216 "%d \t %d \t %d \t %d \t %d \t %d \t "
217 "%d \t %d \t %d \t %d \t %d \t ",
218 i,
219 s.v_blank_start,
220 s.v_blank_end,
221 s.v_sync_a_start,
222 s.v_sync_a_end,
223 s.v_sync_a_pol,
224 s.v_total_max,
225 s.v_total_min,
226 s.h_blank_start,
227 s.h_blank_end,
228 s.h_sync_a_start,
229 s.h_sync_a_end,
230 s.h_sync_a_pol,
231 s.h_total,
232 s.v_total,
233 s.underflow_occurred_status);
234 DTN_INFO("\n");
235 }
236 DTN_INFO("\n");
237
238 log_mpc_crc(dc);
239
240 DTN_INFO_END();
241 }
242
243 static void verify_allow_pstate_change_high(
244 struct dce_hwseq *hws)
245 {
246 /* pstate latency is ~20us so if we wait over 40us and pstate allow
247 * still not asserted, we are probably stuck and going to hang
248 *
249 * TODO: Figure out why it takes ~100us on linux
250 * pstate takes around ~100us on linux. Unknown currently as to
251 * why it takes that long on linux
252 */
253 static unsigned int pstate_wait_timeout_us = 200;
254 static unsigned int pstate_wait_expected_timeout_us = 40;
255 static unsigned int max_sampled_pstate_wait_us; /* data collection */
256 static bool forced_pstate_allow; /* help with revert wa */
257 static bool should_log_hw_state; /* prevent hw state log by default */
258
259 unsigned int debug_index = 0x7;
260 unsigned int debug_data;
261 unsigned int i;
262
263 if (forced_pstate_allow) {
264 /* we hacked to force pstate allow to prevent hang last time
265 * we verify_allow_pstate_change_high. so disable force
266 * here so we can check status
267 */
268 REG_UPDATE_2(DCHUBBUB_ARB_DRAM_STATE_CNTL,
269 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_VALUE, 0,
270 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_ENABLE, 0);
271 forced_pstate_allow = false;
272 }
273
274 /* description "3-0: Pipe0 cursor0 QOS
275 * 7-4: Pipe1 cursor0 QOS
276 * 11-8: Pipe2 cursor0 QOS
277 * 15-12: Pipe3 cursor0 QOS
278 * 16: Pipe0 Plane0 Allow Pstate Change
279 * 17: Pipe1 Plane0 Allow Pstate Change
280 * 18: Pipe2 Plane0 Allow Pstate Change
281 * 19: Pipe3 Plane0 Allow Pstate Change
282 * 20: Pipe0 Plane1 Allow Pstate Change
283 * 21: Pipe1 Plane1 Allow Pstate Change
284 * 22: Pipe2 Plane1 Allow Pstate Change
285 * 23: Pipe3 Plane1 Allow Pstate Change
286 * 24: Pipe0 cursor0 Allow Pstate Change
287 * 25: Pipe1 cursor0 Allow Pstate Change
288 * 26: Pipe2 cursor0 Allow Pstate Change
289 * 27: Pipe3 cursor0 Allow Pstate Change
290 * 28: WB0 Allow Pstate Change
291 * 29: WB1 Allow Pstate Change
292 * 30: Arbiter's allow_pstate_change
293 * 31: SOC pstate change request
294 */
295
296 REG_WRITE(DCHUBBUB_TEST_DEBUG_INDEX, debug_index);
297
298 for (i = 0; i < pstate_wait_timeout_us; i++) {
299 debug_data = REG_READ(DCHUBBUB_TEST_DEBUG_DATA);
300
301 if (debug_data & (1 << 30)) {
302
303 if (i > pstate_wait_expected_timeout_us)
304 dm_logger_write(hws->ctx->logger, LOG_WARNING,
305 "pstate took longer than expected ~%dus\n",
306 i);
307
308 return;
309 }
310 if (max_sampled_pstate_wait_us < i)
311 max_sampled_pstate_wait_us = i;
312
313 udelay(1);
314 }
315
316 /* force pstate allow to prevent system hang
317 * and break to debugger to investigate
318 */
319 REG_UPDATE_2(DCHUBBUB_ARB_DRAM_STATE_CNTL,
320 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_VALUE, 1,
321 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_ENABLE, 1);
322 forced_pstate_allow = true;
323
324 if (should_log_hw_state) {
325 dcn10_log_hw_state(hws->ctx->dc);
326 }
327
328 dm_logger_write(hws->ctx->logger, LOG_WARNING,
329 "pstate TEST_DEBUG_DATA: 0x%X\n",
330 debug_data);
331 BREAK_TO_DEBUGGER();
332 }
333
334 static void enable_dppclk(
335 struct dce_hwseq *hws,
336 uint8_t plane_id,
337 uint32_t requested_pix_clk,
338 bool dppclk_div)
339 {
340 dm_logger_write(hws->ctx->logger, LOG_SURFACE,
341 "dppclk_rate_control for pipe %d programed to %d\n",
342 plane_id,
343 dppclk_div);
344
345 if (hws->shifts->DPPCLK_RATE_CONTROL)
346 REG_UPDATE_2(DPP_CONTROL[plane_id],
347 DPPCLK_RATE_CONTROL, dppclk_div,
348 DPP_CLOCK_ENABLE, 1);
349 else
350 REG_UPDATE(DPP_CONTROL[plane_id],
351 DPP_CLOCK_ENABLE, 1);
352 }
353
354 static void enable_power_gating_plane(
355 struct dce_hwseq *hws,
356 bool enable)
357 {
358 bool force_on = 1; /* disable power gating */
359
360 if (enable)
361 force_on = 0;
362
363 /* DCHUBP0/1/2/3 */
364 REG_UPDATE(DOMAIN0_PG_CONFIG, DOMAIN0_POWER_FORCEON, force_on);
365 REG_UPDATE(DOMAIN2_PG_CONFIG, DOMAIN2_POWER_FORCEON, force_on);
366 REG_UPDATE(DOMAIN4_PG_CONFIG, DOMAIN4_POWER_FORCEON, force_on);
367 REG_UPDATE(DOMAIN6_PG_CONFIG, DOMAIN6_POWER_FORCEON, force_on);
368
369 /* DPP0/1/2/3 */
370 REG_UPDATE(DOMAIN1_PG_CONFIG, DOMAIN1_POWER_FORCEON, force_on);
371 REG_UPDATE(DOMAIN3_PG_CONFIG, DOMAIN3_POWER_FORCEON, force_on);
372 REG_UPDATE(DOMAIN5_PG_CONFIG, DOMAIN5_POWER_FORCEON, force_on);
373 REG_UPDATE(DOMAIN7_PG_CONFIG, DOMAIN7_POWER_FORCEON, force_on);
374 }
375
376 static void disable_vga(
377 struct dce_hwseq *hws)
378 {
379 REG_WRITE(D1VGA_CONTROL, 0);
380 REG_WRITE(D2VGA_CONTROL, 0);
381 REG_WRITE(D3VGA_CONTROL, 0);
382 REG_WRITE(D4VGA_CONTROL, 0);
383 }
384
385 static void dpp_pg_control(
386 struct dce_hwseq *hws,
387 unsigned int dpp_inst,
388 bool power_on)
389 {
390 uint32_t power_gate = power_on ? 0 : 1;
391 uint32_t pwr_status = power_on ? 0 : 2;
392
393 if (hws->ctx->dc->debug.disable_dpp_power_gate)
394 return;
395
396 switch (dpp_inst) {
397 case 0: /* DPP0 */
398 REG_UPDATE(DOMAIN1_PG_CONFIG,
399 DOMAIN1_POWER_GATE, power_gate);
400
401 REG_WAIT(DOMAIN1_PG_STATUS,
402 DOMAIN1_PGFSM_PWR_STATUS, pwr_status,
403 1, 1000);
404 break;
405 case 1: /* DPP1 */
406 REG_UPDATE(DOMAIN3_PG_CONFIG,
407 DOMAIN3_POWER_GATE, power_gate);
408
409 REG_WAIT(DOMAIN3_PG_STATUS,
410 DOMAIN3_PGFSM_PWR_STATUS, pwr_status,
411 1, 1000);
412 break;
413 case 2: /* DPP2 */
414 REG_UPDATE(DOMAIN5_PG_CONFIG,
415 DOMAIN5_POWER_GATE, power_gate);
416
417 REG_WAIT(DOMAIN5_PG_STATUS,
418 DOMAIN5_PGFSM_PWR_STATUS, pwr_status,
419 1, 1000);
420 break;
421 case 3: /* DPP3 */
422 REG_UPDATE(DOMAIN7_PG_CONFIG,
423 DOMAIN7_POWER_GATE, power_gate);
424
425 REG_WAIT(DOMAIN7_PG_STATUS,
426 DOMAIN7_PGFSM_PWR_STATUS, pwr_status,
427 1, 1000);
428 break;
429 default:
430 BREAK_TO_DEBUGGER();
431 break;
432 }
433 }
434
435 static uint32_t convert_and_clamp(
436 uint32_t wm_ns,
437 uint32_t refclk_mhz,
438 uint32_t clamp_value)
439 {
440 uint32_t ret_val = 0;
441 ret_val = wm_ns * refclk_mhz;
442 ret_val /= 1000;
443
444 if (ret_val > clamp_value)
445 ret_val = clamp_value;
446
447 return ret_val;
448 }
449
450 static void program_watermarks(
451 struct dce_hwseq *hws,
452 struct dcn_watermark_set *watermarks,
453 unsigned int refclk_mhz)
454 {
455 uint32_t force_en = hws->ctx->dc->debug.disable_stutter ? 1 : 0;
456 /*
457 * Need to clamp to max of the register values (i.e. no wrap)
458 * for dcn1, all wm registers are 21-bit wide
459 */
460 uint32_t prog_wm_value;
461
462 REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
463 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, 0);
464
465 /* Repeat for water mark set A, B, C and D. */
466 /* clock state A */
467 prog_wm_value = convert_and_clamp(watermarks->a.urgent_ns,
468 refclk_mhz, 0x1fffff);
469 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_A, prog_wm_value);
470
471 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
472 "URGENCY_WATERMARK_A calculated =%d\n"
473 "HW register value = 0x%x\n",
474 watermarks->a.urgent_ns, prog_wm_value);
475
476 prog_wm_value = convert_and_clamp(watermarks->a.pte_meta_urgent_ns,
477 refclk_mhz, 0x1fffff);
478 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_A, prog_wm_value);
479 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
480 "PTE_META_URGENCY_WATERMARK_A calculated =%d\n"
481 "HW register value = 0x%x\n",
482 watermarks->a.pte_meta_urgent_ns, prog_wm_value);
483
484 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A)) {
485 prog_wm_value = convert_and_clamp(
486 watermarks->a.cstate_pstate.cstate_enter_plus_exit_ns,
487 refclk_mhz, 0x1fffff);
488 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A, prog_wm_value);
489 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
490 "SR_ENTER_EXIT_WATERMARK_A calculated =%d\n"
491 "HW register value = 0x%x\n",
492 watermarks->a.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
493
494
495 prog_wm_value = convert_and_clamp(
496 watermarks->a.cstate_pstate.cstate_exit_ns,
497 refclk_mhz, 0x1fffff);
498 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_A, prog_wm_value);
499 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
500 "SR_EXIT_WATERMARK_A calculated =%d\n"
501 "HW register value = 0x%x\n",
502 watermarks->a.cstate_pstate.cstate_exit_ns, prog_wm_value);
503 }
504
505 prog_wm_value = convert_and_clamp(
506 watermarks->a.cstate_pstate.pstate_change_ns,
507 refclk_mhz, 0x1fffff);
508 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_A, prog_wm_value);
509 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
510 "DRAM_CLK_CHANGE_WATERMARK_A calculated =%d\n"
511 "HW register value = 0x%x\n\n",
512 watermarks->a.cstate_pstate.pstate_change_ns, prog_wm_value);
513
514
515 /* clock state B */
516 prog_wm_value = convert_and_clamp(
517 watermarks->b.urgent_ns, refclk_mhz, 0x1fffff);
518 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_B, prog_wm_value);
519 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
520 "URGENCY_WATERMARK_B calculated =%d\n"
521 "HW register value = 0x%x\n",
522 watermarks->b.urgent_ns, prog_wm_value);
523
524
525 prog_wm_value = convert_and_clamp(
526 watermarks->b.pte_meta_urgent_ns,
527 refclk_mhz, 0x1fffff);
528 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_B, prog_wm_value);
529 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
530 "PTE_META_URGENCY_WATERMARK_B calculated =%d\n"
531 "HW register value = 0x%x\n",
532 watermarks->b.pte_meta_urgent_ns, prog_wm_value);
533
534
535 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B)) {
536 prog_wm_value = convert_and_clamp(
537 watermarks->b.cstate_pstate.cstate_enter_plus_exit_ns,
538 refclk_mhz, 0x1fffff);
539 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B, prog_wm_value);
540 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
541 "SR_ENTER_WATERMARK_B calculated =%d\n"
542 "HW register value = 0x%x\n",
543 watermarks->b.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
544
545
546 prog_wm_value = convert_and_clamp(
547 watermarks->b.cstate_pstate.cstate_exit_ns,
548 refclk_mhz, 0x1fffff);
549 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_B, prog_wm_value);
550 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
551 "SR_EXIT_WATERMARK_B calculated =%d\n"
552 "HW register value = 0x%x\n",
553 watermarks->b.cstate_pstate.cstate_exit_ns, prog_wm_value);
554 }
555
556 prog_wm_value = convert_and_clamp(
557 watermarks->b.cstate_pstate.pstate_change_ns,
558 refclk_mhz, 0x1fffff);
559 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_B, prog_wm_value);
560 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
561 "DRAM_CLK_CHANGE_WATERMARK_B calculated =%d\n\n"
562 "HW register value = 0x%x\n",
563 watermarks->b.cstate_pstate.pstate_change_ns, prog_wm_value);
564
565 /* clock state C */
566 prog_wm_value = convert_and_clamp(
567 watermarks->c.urgent_ns, refclk_mhz, 0x1fffff);
568 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_C, prog_wm_value);
569 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
570 "URGENCY_WATERMARK_C calculated =%d\n"
571 "HW register value = 0x%x\n",
572 watermarks->c.urgent_ns, prog_wm_value);
573
574
575 prog_wm_value = convert_and_clamp(
576 watermarks->c.pte_meta_urgent_ns,
577 refclk_mhz, 0x1fffff);
578 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_C, prog_wm_value);
579 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
580 "PTE_META_URGENCY_WATERMARK_C calculated =%d\n"
581 "HW register value = 0x%x\n",
582 watermarks->c.pte_meta_urgent_ns, prog_wm_value);
583
584
585 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C)) {
586 prog_wm_value = convert_and_clamp(
587 watermarks->c.cstate_pstate.cstate_enter_plus_exit_ns,
588 refclk_mhz, 0x1fffff);
589 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C, prog_wm_value);
590 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
591 "SR_ENTER_WATERMARK_C calculated =%d\n"
592 "HW register value = 0x%x\n",
593 watermarks->c.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
594
595
596 prog_wm_value = convert_and_clamp(
597 watermarks->c.cstate_pstate.cstate_exit_ns,
598 refclk_mhz, 0x1fffff);
599 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_C, prog_wm_value);
600 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
601 "SR_EXIT_WATERMARK_C calculated =%d\n"
602 "HW register value = 0x%x\n",
603 watermarks->c.cstate_pstate.cstate_exit_ns, prog_wm_value);
604 }
605
606 prog_wm_value = convert_and_clamp(
607 watermarks->c.cstate_pstate.pstate_change_ns,
608 refclk_mhz, 0x1fffff);
609 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_C, prog_wm_value);
610 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
611 "DRAM_CLK_CHANGE_WATERMARK_C calculated =%d\n\n"
612 "HW register value = 0x%x\n",
613 watermarks->c.cstate_pstate.pstate_change_ns, prog_wm_value);
614
615 /* clock state D */
616 prog_wm_value = convert_and_clamp(
617 watermarks->d.urgent_ns, refclk_mhz, 0x1fffff);
618 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_D, prog_wm_value);
619 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
620 "URGENCY_WATERMARK_D calculated =%d\n"
621 "HW register value = 0x%x\n",
622 watermarks->d.urgent_ns, prog_wm_value);
623
624 prog_wm_value = convert_and_clamp(
625 watermarks->d.pte_meta_urgent_ns,
626 refclk_mhz, 0x1fffff);
627 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_D, prog_wm_value);
628 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
629 "PTE_META_URGENCY_WATERMARK_D calculated =%d\n"
630 "HW register value = 0x%x\n",
631 watermarks->d.pte_meta_urgent_ns, prog_wm_value);
632
633
634 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D)) {
635 prog_wm_value = convert_and_clamp(
636 watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns,
637 refclk_mhz, 0x1fffff);
638 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D, prog_wm_value);
639 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
640 "SR_ENTER_WATERMARK_D calculated =%d\n"
641 "HW register value = 0x%x\n",
642 watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
643
644
645 prog_wm_value = convert_and_clamp(
646 watermarks->d.cstate_pstate.cstate_exit_ns,
647 refclk_mhz, 0x1fffff);
648 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_D, prog_wm_value);
649 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
650 "SR_EXIT_WATERMARK_D calculated =%d\n"
651 "HW register value = 0x%x\n",
652 watermarks->d.cstate_pstate.cstate_exit_ns, prog_wm_value);
653 }
654
655
656 prog_wm_value = convert_and_clamp(
657 watermarks->d.cstate_pstate.pstate_change_ns,
658 refclk_mhz, 0x1fffff);
659 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_D, prog_wm_value);
660 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
661 "DRAM_CLK_CHANGE_WATERMARK_D calculated =%d\n"
662 "HW register value = 0x%x\n\n",
663 watermarks->d.cstate_pstate.pstate_change_ns, prog_wm_value);
664
665 REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
666 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, 1);
667
668 REG_UPDATE(DCHUBBUB_ARB_SAT_LEVEL,
669 DCHUBBUB_ARB_SAT_LEVEL, 60 * refclk_mhz);
670 REG_UPDATE(DCHUBBUB_ARB_DF_REQ_OUTSTAND,
671 DCHUBBUB_ARB_MIN_REQ_OUTSTAND, 68);
672
673 REG_UPDATE_2(DCHUBBUB_ARB_DRAM_STATE_CNTL,
674 DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_VALUE, 0,
675 DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_ENABLE, force_en);
676
677 #if 0
678 REG_UPDATE_2(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
679 DCHUBBUB_ARB_WATERMARK_CHANGE_DONE_INTERRUPT_DISABLE, 1,
680 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, 1);
681 #endif
682 }
683
684
685 static void dcn10_update_dchub(
686 struct dce_hwseq *hws,
687 struct dchub_init_data *dh_data)
688 {
689 /* TODO: port code from dal2 */
690 switch (dh_data->fb_mode) {
691 case FRAME_BUFFER_MODE_ZFB_ONLY:
692 /*For ZFB case need to put DCHUB FB BASE and TOP upside down to indicate ZFB mode*/
693 REG_UPDATE(DCHUBBUB_SDPIF_FB_TOP,
694 SDPIF_FB_TOP, 0);
695
696 REG_UPDATE(DCHUBBUB_SDPIF_FB_BASE,
697 SDPIF_FB_BASE, 0x0FFFF);
698
699 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BASE,
700 SDPIF_AGP_BASE, dh_data->zfb_phys_addr_base >> 22);
701
702 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BOT,
703 SDPIF_AGP_BOT, dh_data->zfb_mc_base_addr >> 22);
704
705 REG_UPDATE(DCHUBBUB_SDPIF_AGP_TOP,
706 SDPIF_AGP_TOP, (dh_data->zfb_mc_base_addr +
707 dh_data->zfb_size_in_byte - 1) >> 22);
708 break;
709 case FRAME_BUFFER_MODE_MIXED_ZFB_AND_LOCAL:
710 /*Should not touch FB LOCATION (done by VBIOS on AsicInit table)*/
711
712 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BASE,
713 SDPIF_AGP_BASE, dh_data->zfb_phys_addr_base >> 22);
714
715 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BOT,
716 SDPIF_AGP_BOT, dh_data->zfb_mc_base_addr >> 22);
717
718 REG_UPDATE(DCHUBBUB_SDPIF_AGP_TOP,
719 SDPIF_AGP_TOP, (dh_data->zfb_mc_base_addr +
720 dh_data->zfb_size_in_byte - 1) >> 22);
721 break;
722 case FRAME_BUFFER_MODE_LOCAL_ONLY:
723 /*Should not touch FB LOCATION (done by VBIOS on AsicInit table)*/
724 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BASE,
725 SDPIF_AGP_BASE, 0);
726
727 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BOT,
728 SDPIF_AGP_BOT, 0X03FFFF);
729
730 REG_UPDATE(DCHUBBUB_SDPIF_AGP_TOP,
731 SDPIF_AGP_TOP, 0);
732 break;
733 default:
734 break;
735 }
736
737 dh_data->dchub_initialzied = true;
738 dh_data->dchub_info_valid = false;
739 }
740
741 static void hubp_pg_control(
742 struct dce_hwseq *hws,
743 unsigned int hubp_inst,
744 bool power_on)
745 {
746 uint32_t power_gate = power_on ? 0 : 1;
747 uint32_t pwr_status = power_on ? 0 : 2;
748
749 if (hws->ctx->dc->debug.disable_hubp_power_gate)
750 return;
751
752 switch (hubp_inst) {
753 case 0: /* DCHUBP0 */
754 REG_UPDATE(DOMAIN0_PG_CONFIG,
755 DOMAIN0_POWER_GATE, power_gate);
756
757 REG_WAIT(DOMAIN0_PG_STATUS,
758 DOMAIN0_PGFSM_PWR_STATUS, pwr_status,
759 1, 1000);
760 break;
761 case 1: /* DCHUBP1 */
762 REG_UPDATE(DOMAIN2_PG_CONFIG,
763 DOMAIN2_POWER_GATE, power_gate);
764
765 REG_WAIT(DOMAIN2_PG_STATUS,
766 DOMAIN2_PGFSM_PWR_STATUS, pwr_status,
767 1, 1000);
768 break;
769 case 2: /* DCHUBP2 */
770 REG_UPDATE(DOMAIN4_PG_CONFIG,
771 DOMAIN4_POWER_GATE, power_gate);
772
773 REG_WAIT(DOMAIN4_PG_STATUS,
774 DOMAIN4_PGFSM_PWR_STATUS, pwr_status,
775 1, 1000);
776 break;
777 case 3: /* DCHUBP3 */
778 REG_UPDATE(DOMAIN6_PG_CONFIG,
779 DOMAIN6_POWER_GATE, power_gate);
780
781 REG_WAIT(DOMAIN6_PG_STATUS,
782 DOMAIN6_PGFSM_PWR_STATUS, pwr_status,
783 1, 1000);
784 break;
785 default:
786 BREAK_TO_DEBUGGER();
787 break;
788 }
789 }
790
791 static void power_on_plane(
792 struct dce_hwseq *hws,
793 int plane_id)
794 {
795 if (REG(DC_IP_REQUEST_CNTL)) {
796 REG_SET(DC_IP_REQUEST_CNTL, 0,
797 IP_REQUEST_EN, 1);
798 dpp_pg_control(hws, plane_id, true);
799 hubp_pg_control(hws, plane_id, true);
800 REG_SET(DC_IP_REQUEST_CNTL, 0,
801 IP_REQUEST_EN, 0);
802 dm_logger_write(hws->ctx->logger, LOG_DEBUG,
803 "Un-gated front end for pipe %d\n", plane_id);
804 }
805 }
806
807 static void undo_DEGVIDCN10_253_wa(struct dc *dc)
808 {
809 struct dce_hwseq *hws = dc->hwseq;
810 struct hubp *hubp = dc->res_pool->hubps[0];
811 int pwr_status = 0;
812
813 REG_GET(DOMAIN0_PG_STATUS, DOMAIN0_PGFSM_PWR_STATUS, &pwr_status);
814 /* Don't need to blank if hubp is power gated*/
815 if (pwr_status == 2)
816 return;
817
818 hubp->funcs->set_blank(hubp, true);
819
820 REG_SET(DC_IP_REQUEST_CNTL, 0,
821 IP_REQUEST_EN, 1);
822
823 hubp_pg_control(hws, 0, false);
824 REG_SET(DC_IP_REQUEST_CNTL, 0,
825 IP_REQUEST_EN, 0);
826 }
827
828 static void apply_DEGVIDCN10_253_wa(struct dc *dc)
829 {
830 struct dce_hwseq *hws = dc->hwseq;
831 struct hubp *hubp = dc->res_pool->hubps[0];
832
833 if (dc->debug.disable_stutter)
834 return;
835
836 REG_SET(DC_IP_REQUEST_CNTL, 0,
837 IP_REQUEST_EN, 1);
838
839 hubp_pg_control(hws, 0, true);
840 REG_SET(DC_IP_REQUEST_CNTL, 0,
841 IP_REQUEST_EN, 0);
842
843 hubp->funcs->set_hubp_blank_en(hubp, false);
844 }
845
846 static void bios_golden_init(struct dc *dc)
847 {
848 struct dc_bios *bp = dc->ctx->dc_bios;
849 int i;
850
851 /* initialize dcn global */
852 bp->funcs->enable_disp_power_gating(bp,
853 CONTROLLER_ID_D0, ASIC_PIPE_INIT);
854
855 for (i = 0; i < dc->res_pool->pipe_count; i++) {
856 /* initialize dcn per pipe */
857 bp->funcs->enable_disp_power_gating(bp,
858 CONTROLLER_ID_D0 + i, ASIC_PIPE_DISABLE);
859 }
860 }
861
862 static void dcn10_init_hw(struct dc *dc)
863 {
864 int i;
865 struct abm *abm = dc->res_pool->abm;
866 struct dce_hwseq *hws = dc->hwseq;
867
868 if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
869 REG_WRITE(REFCLK_CNTL, 0);
870 REG_UPDATE(DCHUBBUB_GLOBAL_TIMER_CNTL, DCHUBBUB_GLOBAL_TIMER_ENABLE, 1);
871 REG_WRITE(DIO_MEM_PWR_CTRL, 0);
872
873 if (!dc->debug.disable_clock_gate) {
874 /* enable all DCN clock gating */
875 REG_WRITE(DCCG_GATE_DISABLE_CNTL, 0);
876
877 REG_WRITE(DCCG_GATE_DISABLE_CNTL2, 0);
878
879 REG_UPDATE(DCFCLK_CNTL, DCFCLK_GATE_DIS, 0);
880 }
881
882 enable_power_gating_plane(dc->hwseq, true);
883 return;
884 }
885 /* end of FPGA. Below if real ASIC */
886
887 bios_golden_init(dc);
888
889 disable_vga(dc->hwseq);
890
891 for (i = 0; i < dc->link_count; i++) {
892 /* Power up AND update implementation according to the
893 * required signal (which may be different from the
894 * default signal on connector).
895 */
896 struct dc_link *link = dc->links[i];
897
898 link->link_enc->funcs->hw_init(link->link_enc);
899 }
900
901 for (i = 0; i < dc->res_pool->pipe_count; i++) {
902 struct dpp *dpp = dc->res_pool->dpps[i];
903 struct timing_generator *tg = dc->res_pool->timing_generators[i];
904
905 dpp->funcs->dpp_reset(dpp);
906 dc->res_pool->mpc->funcs->remove(
907 dc->res_pool->mpc, &(dc->res_pool->opps[i]->mpc_tree),
908 dc->res_pool->opps[i]->inst, i);
909
910 /* Blank controller using driver code instead of
911 * command table.
912 */
913 tg->funcs->set_blank(tg, true);
914 hwss_wait_for_blank_complete(tg);
915 }
916
917 for (i = 0; i < dc->res_pool->audio_count; i++) {
918 struct audio *audio = dc->res_pool->audios[i];
919
920 audio->funcs->hw_init(audio);
921 }
922
923 if (abm != NULL) {
924 abm->funcs->init_backlight(abm);
925 abm->funcs->abm_init(abm);
926 }
927
928 /* power AFMT HDMI memory TODO: may move to dis/en output save power*/
929 REG_WRITE(DIO_MEM_PWR_CTRL, 0);
930
931 if (!dc->debug.disable_clock_gate) {
932 /* enable all DCN clock gating */
933 REG_WRITE(DCCG_GATE_DISABLE_CNTL, 0);
934
935 REG_WRITE(DCCG_GATE_DISABLE_CNTL2, 0);
936
937 REG_UPDATE(DCFCLK_CNTL, DCFCLK_GATE_DIS, 0);
938 }
939
940 enable_power_gating_plane(dc->hwseq, true);
941 }
942
943 static enum dc_status dcn10_prog_pixclk_crtc_otg(
944 struct pipe_ctx *pipe_ctx,
945 struct dc_state *context,
946 struct dc *dc)
947 {
948 struct dc_stream_state *stream = pipe_ctx->stream;
949 enum dc_color_space color_space;
950 struct tg_color black_color = {0};
951 bool enableStereo = stream->timing.timing_3d_format == TIMING_3D_FORMAT_NONE ?
952 false:true;
953 bool rightEyePolarity = stream->timing.flags.RIGHT_EYE_3D_POLARITY;
954
955
956 /* by upper caller loop, pipe0 is parent pipe and be called first.
957 * back end is set up by for pipe0. Other children pipe share back end
958 * with pipe 0. No program is needed.
959 */
960 if (pipe_ctx->top_pipe != NULL)
961 return DC_OK;
962
963 /* TODO check if timing_changed, disable stream if timing changed */
964
965 /* HW program guide assume display already disable
966 * by unplug sequence. OTG assume stop.
967 */
968 pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, true);
969
970 if (false == pipe_ctx->clock_source->funcs->program_pix_clk(
971 pipe_ctx->clock_source,
972 &pipe_ctx->stream_res.pix_clk_params,
973 &pipe_ctx->pll_settings)) {
974 BREAK_TO_DEBUGGER();
975 return DC_ERROR_UNEXPECTED;
976 }
977 pipe_ctx->stream_res.tg->dlg_otg_param.vready_offset = pipe_ctx->pipe_dlg_param.vready_offset;
978 pipe_ctx->stream_res.tg->dlg_otg_param.vstartup_start = pipe_ctx->pipe_dlg_param.vstartup_start;
979 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_offset = pipe_ctx->pipe_dlg_param.vupdate_offset;
980 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_width = pipe_ctx->pipe_dlg_param.vupdate_width;
981
982 pipe_ctx->stream_res.tg->dlg_otg_param.signal = pipe_ctx->stream->signal;
983
984 pipe_ctx->stream_res.tg->funcs->program_timing(
985 pipe_ctx->stream_res.tg,
986 &stream->timing,
987 true);
988
989 pipe_ctx->stream_res.opp->funcs->opp_set_stereo_polarity(
990 pipe_ctx->stream_res.opp,
991 enableStereo,
992 rightEyePolarity);
993
994 #if 0 /* move to after enable_crtc */
995 /* TODO: OPP FMT, ABM. etc. should be done here. */
996 /* or FPGA now. instance 0 only. TODO: move to opp.c */
997
998 inst_offset = reg_offsets[pipe_ctx->stream_res.tg->inst].fmt;
999
1000 pipe_ctx->stream_res.opp->funcs->opp_program_fmt(
1001 pipe_ctx->stream_res.opp,
1002 &stream->bit_depth_params,
1003 &stream->clamping);
1004 #endif
1005 /* program otg blank color */
1006 color_space = stream->output_color_space;
1007 color_space_to_black_color(dc, color_space, &black_color);
1008 pipe_ctx->stream_res.tg->funcs->set_blank_color(
1009 pipe_ctx->stream_res.tg,
1010 &black_color);
1011
1012 pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, true);
1013 hwss_wait_for_blank_complete(pipe_ctx->stream_res.tg);
1014
1015 /* VTG is within DCHUB command block. DCFCLK is always on */
1016 if (false == pipe_ctx->stream_res.tg->funcs->enable_crtc(pipe_ctx->stream_res.tg)) {
1017 BREAK_TO_DEBUGGER();
1018 return DC_ERROR_UNEXPECTED;
1019 }
1020
1021 /* TODO program crtc source select for non-virtual signal*/
1022 /* TODO program FMT */
1023 /* TODO setup link_enc */
1024 /* TODO set stream attributes */
1025 /* TODO program audio */
1026 /* TODO enable stream if timing changed */
1027 /* TODO unblank stream if DP */
1028
1029 return DC_OK;
1030 }
1031
1032 static void reset_back_end_for_pipe(
1033 struct dc *dc,
1034 struct pipe_ctx *pipe_ctx,
1035 struct dc_state *context)
1036 {
1037 int i;
1038
1039 if (pipe_ctx->stream_res.stream_enc == NULL) {
1040 pipe_ctx->stream = NULL;
1041 return;
1042 }
1043
1044 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
1045 /* DPMS may already disable */
1046 if (!pipe_ctx->stream->dpms_off)
1047 core_link_disable_stream(pipe_ctx, FREE_ACQUIRED_RESOURCE);
1048 }
1049
1050 /* by upper caller loop, parent pipe: pipe0, will be reset last.
1051 * back end share by all pipes and will be disable only when disable
1052 * parent pipe.
1053 */
1054 if (pipe_ctx->top_pipe == NULL) {
1055 pipe_ctx->stream_res.tg->funcs->disable_crtc(pipe_ctx->stream_res.tg);
1056
1057 pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, false);
1058 }
1059
1060 for (i = 0; i < dc->res_pool->pipe_count; i++)
1061 if (&dc->current_state->res_ctx.pipe_ctx[i] == pipe_ctx)
1062 break;
1063
1064 if (i == dc->res_pool->pipe_count)
1065 return;
1066
1067 pipe_ctx->stream = NULL;
1068 dm_logger_write(dc->ctx->logger, LOG_DEBUG,
1069 "Reset back end for pipe %d, tg:%d\n",
1070 pipe_ctx->pipe_idx, pipe_ctx->stream_res.tg->inst);
1071 }
1072
1073 /* trigger HW to start disconnect plane from stream on the next vsync */
1074 static void plane_atomic_disconnect(struct dc *dc,
1075 int fe_idx)
1076 {
1077 struct hubp *hubp = dc->res_pool->hubps[fe_idx];
1078 struct mpc *mpc = dc->res_pool->mpc;
1079 int opp_id, z_idx;
1080 int mpcc_id = -1;
1081
1082 /* look at tree rather than mi here to know if we already reset */
1083 for (opp_id = 0; opp_id < dc->res_pool->pipe_count; opp_id++) {
1084 struct output_pixel_processor *opp = dc->res_pool->opps[opp_id];
1085
1086 for (z_idx = 0; z_idx < opp->mpc_tree.num_pipes; z_idx++) {
1087 if (opp->mpc_tree.dpp[z_idx] == fe_idx) {
1088 mpcc_id = opp->mpc_tree.mpcc[z_idx];
1089 break;
1090 }
1091 }
1092 if (mpcc_id != -1)
1093 break;
1094 }
1095 /*Already reset*/
1096 if (opp_id == dc->res_pool->pipe_count)
1097 return;
1098
1099 if (dc->debug.sanity_checks)
1100 verify_allow_pstate_change_high(dc->hwseq);
1101 hubp->funcs->dcc_control(hubp, false, false);
1102 if (dc->debug.sanity_checks)
1103 verify_allow_pstate_change_high(dc->hwseq);
1104
1105 mpc->funcs->remove(mpc, &(dc->res_pool->opps[opp_id]->mpc_tree),
1106 dc->res_pool->opps[opp_id]->inst, fe_idx);
1107 }
1108
1109 /* disable HW used by plane.
1110 * note: cannot disable until disconnect is complete */
1111 static void plane_atomic_disable(struct dc *dc,
1112 int fe_idx)
1113 {
1114 struct dce_hwseq *hws = dc->hwseq;
1115 struct hubp *hubp = dc->res_pool->hubps[fe_idx];
1116 struct mpc *mpc = dc->res_pool->mpc;
1117 int opp_id = hubp->opp_id;
1118
1119 if (opp_id == 0xf)
1120 return;
1121
1122 mpc->funcs->wait_for_idle(mpc, hubp->mpcc_id);
1123 dc->res_pool->opps[hubp->opp_id]->mpcc_disconnect_pending[hubp->mpcc_id] = false;
1124 /*dm_logger_write(dc->ctx->logger, LOG_ERROR,
1125 "[debug_mpo: atomic disable finished on mpcc %d]\n",
1126 fe_idx);*/
1127
1128 hubp->funcs->set_blank(hubp, true);
1129
1130 if (dc->debug.sanity_checks)
1131 verify_allow_pstate_change_high(dc->hwseq);
1132
1133 REG_UPDATE(HUBP_CLK_CNTL[fe_idx],
1134 HUBP_CLOCK_ENABLE, 0);
1135 REG_UPDATE(DPP_CONTROL[fe_idx],
1136 DPP_CLOCK_ENABLE, 0);
1137
1138 if (dc->res_pool->opps[opp_id]->mpc_tree.num_pipes == 0)
1139 REG_UPDATE(OPP_PIPE_CONTROL[opp_id],
1140 OPP_PIPE_CLOCK_EN, 0);
1141
1142 if (dc->debug.sanity_checks)
1143 verify_allow_pstate_change_high(dc->hwseq);
1144 }
1145
1146 /*
1147 * kill power to plane hw
1148 * note: cannot power down until plane is disable
1149 */
1150 static void plane_atomic_power_down(struct dc *dc, int fe_idx)
1151 {
1152 struct dce_hwseq *hws = dc->hwseq;
1153 struct dpp *dpp = dc->res_pool->dpps[fe_idx];
1154
1155 if (REG(DC_IP_REQUEST_CNTL)) {
1156 REG_SET(DC_IP_REQUEST_CNTL, 0,
1157 IP_REQUEST_EN, 1);
1158 dpp_pg_control(hws, fe_idx, false);
1159 hubp_pg_control(hws, fe_idx, false);
1160 dpp->funcs->dpp_reset(dpp);
1161 REG_SET(DC_IP_REQUEST_CNTL, 0,
1162 IP_REQUEST_EN, 0);
1163 dm_logger_write(dc->ctx->logger, LOG_DEBUG,
1164 "Power gated front end %d\n", fe_idx);
1165
1166 if (dc->debug.sanity_checks)
1167 verify_allow_pstate_change_high(dc->hwseq);
1168 }
1169 }
1170
1171
1172 static void reset_front_end(
1173 struct dc *dc,
1174 int fe_idx)
1175 {
1176 struct dce_hwseq *hws = dc->hwseq;
1177 struct timing_generator *tg;
1178 int opp_id = dc->res_pool->hubps[fe_idx]->opp_id;
1179
1180 /*Already reset*/
1181 if (opp_id == 0xf)
1182 return;
1183
1184 tg = dc->res_pool->timing_generators[opp_id];
1185 tg->funcs->lock(tg);
1186
1187 plane_atomic_disconnect(dc, fe_idx);
1188
1189 REG_UPDATE(OTG_GLOBAL_SYNC_STATUS[tg->inst], VUPDATE_NO_LOCK_EVENT_CLEAR, 1);
1190 tg->funcs->unlock(tg);
1191
1192 if (dc->debug.sanity_checks)
1193 verify_allow_pstate_change_high(hws);
1194
1195 if (tg->ctx->dce_environment != DCE_ENV_FPGA_MAXIMUS)
1196 REG_WAIT(OTG_GLOBAL_SYNC_STATUS[tg->inst],
1197 VUPDATE_NO_LOCK_EVENT_OCCURRED, 1,
1198 1, 100000);
1199
1200 plane_atomic_disable(dc, fe_idx);
1201
1202 dm_logger_write(dc->ctx->logger, LOG_DC,
1203 "Reset front end %d\n",
1204 fe_idx);
1205 }
1206
1207 static void dcn10_power_down_fe(struct dc *dc, int fe_idx)
1208 {
1209 struct dce_hwseq *hws = dc->hwseq;
1210 struct dpp *dpp = dc->res_pool->dpps[fe_idx];
1211
1212 reset_front_end(dc, fe_idx);
1213
1214 REG_SET(DC_IP_REQUEST_CNTL, 0,
1215 IP_REQUEST_EN, 1);
1216 dpp_pg_control(hws, fe_idx, false);
1217 hubp_pg_control(hws, fe_idx, false);
1218 dpp->funcs->dpp_reset(dpp);
1219 REG_SET(DC_IP_REQUEST_CNTL, 0,
1220 IP_REQUEST_EN, 0);
1221 dm_logger_write(dc->ctx->logger, LOG_DEBUG,
1222 "Power gated front end %d\n", fe_idx);
1223
1224 if (dc->debug.sanity_checks)
1225 verify_allow_pstate_change_high(dc->hwseq);
1226 }
1227
1228 static void reset_hw_ctx_wrap(
1229 struct dc *dc,
1230 struct dc_state *context)
1231 {
1232 int i;
1233
1234 /* Reset Front End*/
1235 /* Lock*/
1236 for (i = 0; i < dc->res_pool->pipe_count; i++) {
1237 struct pipe_ctx *cur_pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
1238 struct timing_generator *tg = cur_pipe_ctx->stream_res.tg;
1239
1240 if (cur_pipe_ctx->stream)
1241 tg->funcs->lock(tg);
1242 }
1243 /* Disconnect*/
1244 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1245 struct pipe_ctx *pipe_ctx_old =
1246 &dc->current_state->res_ctx.pipe_ctx[i];
1247 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1248
1249 if (!pipe_ctx->stream ||
1250 !pipe_ctx->plane_state ||
1251 pipe_need_reprogram(pipe_ctx_old, pipe_ctx)) {
1252
1253 plane_atomic_disconnect(dc, i);
1254 }
1255 }
1256 /* Unlock*/
1257 for (i = dc->res_pool->pipe_count - 1; i >= 0; i--) {
1258 struct pipe_ctx *cur_pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
1259 struct timing_generator *tg = cur_pipe_ctx->stream_res.tg;
1260
1261 if (cur_pipe_ctx->stream)
1262 tg->funcs->unlock(tg);
1263 }
1264
1265 /* Disable and Powerdown*/
1266 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1267 struct pipe_ctx *pipe_ctx_old =
1268 &dc->current_state->res_ctx.pipe_ctx[i];
1269 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1270
1271 /*if (!pipe_ctx_old->stream)
1272 continue;*/
1273
1274 if (pipe_ctx->stream && pipe_ctx->plane_state
1275 && !pipe_need_reprogram(pipe_ctx_old, pipe_ctx))
1276 continue;
1277
1278 plane_atomic_disable(dc, i);
1279
1280 if (!pipe_ctx->stream || !pipe_ctx->plane_state)
1281 plane_atomic_power_down(dc, i);
1282 }
1283
1284 /* Reset Back End*/
1285 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1286 struct pipe_ctx *pipe_ctx_old =
1287 &dc->current_state->res_ctx.pipe_ctx[i];
1288 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1289
1290 if (!pipe_ctx_old->stream)
1291 continue;
1292
1293 if (pipe_ctx_old->top_pipe)
1294 continue;
1295
1296 if (!pipe_ctx->stream ||
1297 pipe_need_reprogram(pipe_ctx_old, pipe_ctx)) {
1298 struct clock_source *old_clk = pipe_ctx_old->clock_source;
1299
1300 reset_back_end_for_pipe(dc, pipe_ctx_old, dc->current_state);
1301
1302 if (old_clk)
1303 old_clk->funcs->cs_power_down(old_clk);
1304 }
1305 }
1306
1307 }
1308
1309 static bool patch_address_for_sbs_tb_stereo(
1310 struct pipe_ctx *pipe_ctx, PHYSICAL_ADDRESS_LOC *addr)
1311 {
1312 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1313 bool sec_split = pipe_ctx->top_pipe &&
1314 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
1315 if (sec_split && plane_state->address.type == PLN_ADDR_TYPE_GRPH_STEREO &&
1316 (pipe_ctx->stream->timing.timing_3d_format ==
1317 TIMING_3D_FORMAT_SIDE_BY_SIDE ||
1318 pipe_ctx->stream->timing.timing_3d_format ==
1319 TIMING_3D_FORMAT_TOP_AND_BOTTOM)) {
1320 *addr = plane_state->address.grph_stereo.left_addr;
1321 plane_state->address.grph_stereo.left_addr =
1322 plane_state->address.grph_stereo.right_addr;
1323 return true;
1324 } else {
1325 if (pipe_ctx->stream->view_format != VIEW_3D_FORMAT_NONE &&
1326 plane_state->address.type != PLN_ADDR_TYPE_GRPH_STEREO) {
1327 plane_state->address.type = PLN_ADDR_TYPE_GRPH_STEREO;
1328 plane_state->address.grph_stereo.right_addr =
1329 plane_state->address.grph_stereo.left_addr;
1330 }
1331 }
1332 return false;
1333 }
1334
1335 static void toggle_watermark_change_req(struct dce_hwseq *hws)
1336 {
1337 uint32_t watermark_change_req;
1338
1339 REG_GET(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
1340 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, &watermark_change_req);
1341
1342 if (watermark_change_req)
1343 watermark_change_req = 0;
1344 else
1345 watermark_change_req = 1;
1346
1347 REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
1348 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, watermark_change_req);
1349 }
1350
1351 static void dcn10_update_plane_addr(const struct dc *dc, struct pipe_ctx *pipe_ctx)
1352 {
1353 bool addr_patched = false;
1354 PHYSICAL_ADDRESS_LOC addr;
1355 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1356
1357 if (plane_state == NULL)
1358 return;
1359 addr_patched = patch_address_for_sbs_tb_stereo(pipe_ctx, &addr);
1360 pipe_ctx->plane_res.hubp->funcs->hubp_program_surface_flip_and_addr(
1361 pipe_ctx->plane_res.hubp,
1362 &plane_state->address,
1363 plane_state->flip_immediate);
1364 plane_state->status.requested_address = plane_state->address;
1365 if (addr_patched)
1366 pipe_ctx->plane_state->address.grph_stereo.left_addr = addr;
1367 }
1368
1369 static bool dcn10_set_input_transfer_func(
1370 struct pipe_ctx *pipe_ctx, const struct dc_plane_state *plane_state)
1371 {
1372 struct dpp *dpp_base = pipe_ctx->plane_res.dpp;
1373 const struct dc_transfer_func *tf = NULL;
1374 bool result = true;
1375
1376 if (dpp_base == NULL)
1377 return false;
1378
1379 if (plane_state->in_transfer_func)
1380 tf = plane_state->in_transfer_func;
1381
1382 if (plane_state->gamma_correction && dce_use_lut(plane_state))
1383 dpp_base->funcs->ipp_program_input_lut(dpp_base,
1384 plane_state->gamma_correction);
1385
1386 if (tf == NULL)
1387 dpp_base->funcs->ipp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1388 else if (tf->type == TF_TYPE_PREDEFINED) {
1389 switch (tf->tf) {
1390 case TRANSFER_FUNCTION_SRGB:
1391 dpp_base->funcs->ipp_set_degamma(dpp_base,
1392 IPP_DEGAMMA_MODE_HW_sRGB);
1393 break;
1394 case TRANSFER_FUNCTION_BT709:
1395 dpp_base->funcs->ipp_set_degamma(dpp_base,
1396 IPP_DEGAMMA_MODE_HW_xvYCC);
1397 break;
1398 case TRANSFER_FUNCTION_LINEAR:
1399 dpp_base->funcs->ipp_set_degamma(dpp_base,
1400 IPP_DEGAMMA_MODE_BYPASS);
1401 break;
1402 case TRANSFER_FUNCTION_PQ:
1403 result = false;
1404 break;
1405 default:
1406 result = false;
1407 break;
1408 }
1409 } else if (tf->type == TF_TYPE_BYPASS) {
1410 dpp_base->funcs->ipp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1411 } else {
1412 /*TF_TYPE_DISTRIBUTED_POINTS*/
1413 result = false;
1414 }
1415
1416 return result;
1417 }
1418 /*modify the method to handle rgb for arr_points*/
1419 static bool convert_to_custom_float(
1420 struct pwl_result_data *rgb_resulted,
1421 struct curve_points *arr_points,
1422 uint32_t hw_points_num)
1423 {
1424 struct custom_float_format fmt;
1425
1426 struct pwl_result_data *rgb = rgb_resulted;
1427
1428 uint32_t i = 0;
1429
1430 fmt.exponenta_bits = 6;
1431 fmt.mantissa_bits = 12;
1432 fmt.sign = false;
1433
1434 if (!convert_to_custom_float_format(
1435 arr_points[0].x,
1436 &fmt,
1437 &arr_points[0].custom_float_x)) {
1438 BREAK_TO_DEBUGGER();
1439 return false;
1440 }
1441
1442 if (!convert_to_custom_float_format(
1443 arr_points[0].offset,
1444 &fmt,
1445 &arr_points[0].custom_float_offset)) {
1446 BREAK_TO_DEBUGGER();
1447 return false;
1448 }
1449
1450 if (!convert_to_custom_float_format(
1451 arr_points[0].slope,
1452 &fmt,
1453 &arr_points[0].custom_float_slope)) {
1454 BREAK_TO_DEBUGGER();
1455 return false;
1456 }
1457
1458 fmt.mantissa_bits = 10;
1459 fmt.sign = false;
1460
1461 if (!convert_to_custom_float_format(
1462 arr_points[1].x,
1463 &fmt,
1464 &arr_points[1].custom_float_x)) {
1465 BREAK_TO_DEBUGGER();
1466 return false;
1467 }
1468
1469 if (!convert_to_custom_float_format(
1470 arr_points[1].y,
1471 &fmt,
1472 &arr_points[1].custom_float_y)) {
1473 BREAK_TO_DEBUGGER();
1474 return false;
1475 }
1476
1477 if (!convert_to_custom_float_format(
1478 arr_points[1].slope,
1479 &fmt,
1480 &arr_points[1].custom_float_slope)) {
1481 BREAK_TO_DEBUGGER();
1482 return false;
1483 }
1484
1485 fmt.mantissa_bits = 12;
1486 fmt.sign = true;
1487
1488 while (i != hw_points_num) {
1489 if (!convert_to_custom_float_format(
1490 rgb->red,
1491 &fmt,
1492 &rgb->red_reg)) {
1493 BREAK_TO_DEBUGGER();
1494 return false;
1495 }
1496
1497 if (!convert_to_custom_float_format(
1498 rgb->green,
1499 &fmt,
1500 &rgb->green_reg)) {
1501 BREAK_TO_DEBUGGER();
1502 return false;
1503 }
1504
1505 if (!convert_to_custom_float_format(
1506 rgb->blue,
1507 &fmt,
1508 &rgb->blue_reg)) {
1509 BREAK_TO_DEBUGGER();
1510 return false;
1511 }
1512
1513 if (!convert_to_custom_float_format(
1514 rgb->delta_red,
1515 &fmt,
1516 &rgb->delta_red_reg)) {
1517 BREAK_TO_DEBUGGER();
1518 return false;
1519 }
1520
1521 if (!convert_to_custom_float_format(
1522 rgb->delta_green,
1523 &fmt,
1524 &rgb->delta_green_reg)) {
1525 BREAK_TO_DEBUGGER();
1526 return false;
1527 }
1528
1529 if (!convert_to_custom_float_format(
1530 rgb->delta_blue,
1531 &fmt,
1532 &rgb->delta_blue_reg)) {
1533 BREAK_TO_DEBUGGER();
1534 return false;
1535 }
1536
1537 ++rgb;
1538 ++i;
1539 }
1540
1541 return true;
1542 }
1543 #define MAX_REGIONS_NUMBER 34
1544 #define MAX_LOW_POINT 25
1545 #define NUMBER_SEGMENTS 32
1546
1547 static bool dcn10_translate_regamma_to_hw_format(const struct dc_transfer_func
1548 *output_tf, struct pwl_params *regamma_params)
1549 {
1550 struct curve_points *arr_points;
1551 struct pwl_result_data *rgb_resulted;
1552 struct pwl_result_data *rgb;
1553 struct pwl_result_data *rgb_plus_1;
1554 struct fixed31_32 y_r;
1555 struct fixed31_32 y_g;
1556 struct fixed31_32 y_b;
1557 struct fixed31_32 y1_min;
1558 struct fixed31_32 y3_max;
1559
1560 int32_t segment_start, segment_end;
1561 int32_t i;
1562 uint32_t j, k, seg_distr[MAX_REGIONS_NUMBER], increment, start_index, hw_points;
1563
1564 if (output_tf == NULL || regamma_params == NULL ||
1565 output_tf->type == TF_TYPE_BYPASS)
1566 return false;
1567
1568 arr_points = regamma_params->arr_points;
1569 rgb_resulted = regamma_params->rgb_resulted;
1570 hw_points = 0;
1571
1572 memset(regamma_params, 0, sizeof(struct pwl_params));
1573 memset(seg_distr, 0, sizeof(seg_distr));
1574
1575 if (output_tf->tf == TRANSFER_FUNCTION_PQ) {
1576 /* 32 segments
1577 * segments are from 2^-25 to 2^7
1578 */
1579 for (i = 0; i < 32 ; i++)
1580 seg_distr[i] = 3;
1581
1582 segment_start = -25;
1583 segment_end = 7;
1584 } else {
1585 /* 10 segments
1586 * segment is from 2^-10 to 2^0
1587 * There are less than 256 points, for optimization
1588 */
1589 seg_distr[0] = 3;
1590 seg_distr[1] = 4;
1591 seg_distr[2] = 4;
1592 seg_distr[3] = 4;
1593 seg_distr[4] = 4;
1594 seg_distr[5] = 4;
1595 seg_distr[6] = 4;
1596 seg_distr[7] = 4;
1597 seg_distr[8] = 5;
1598 seg_distr[9] = 5;
1599
1600 segment_start = -10;
1601 segment_end = 0;
1602 }
1603
1604 for (i = segment_end - segment_start; i < MAX_REGIONS_NUMBER ; i++)
1605 seg_distr[i] = -1;
1606
1607 for (k = 0; k < MAX_REGIONS_NUMBER; k++) {
1608 if (seg_distr[k] != -1)
1609 hw_points += (1 << seg_distr[k]);
1610 }
1611
1612 j = 0;
1613 for (k = 0; k < (segment_end - segment_start); k++) {
1614 increment = NUMBER_SEGMENTS / (1 << seg_distr[k]);
1615 start_index = (segment_start + k + MAX_LOW_POINT) * NUMBER_SEGMENTS;
1616 for (i = start_index; i < start_index + NUMBER_SEGMENTS; i += increment) {
1617 if (j == hw_points - 1)
1618 break;
1619 rgb_resulted[j].red = output_tf->tf_pts.red[i];
1620 rgb_resulted[j].green = output_tf->tf_pts.green[i];
1621 rgb_resulted[j].blue = output_tf->tf_pts.blue[i];
1622 j++;
1623 }
1624 }
1625
1626 /* last point */
1627 start_index = (segment_end + MAX_LOW_POINT) * NUMBER_SEGMENTS;
1628 rgb_resulted[hw_points - 1].red =
1629 output_tf->tf_pts.red[start_index];
1630 rgb_resulted[hw_points - 1].green =
1631 output_tf->tf_pts.green[start_index];
1632 rgb_resulted[hw_points - 1].blue =
1633 output_tf->tf_pts.blue[start_index];
1634
1635 arr_points[0].x = dal_fixed31_32_pow(dal_fixed31_32_from_int(2),
1636 dal_fixed31_32_from_int(segment_start));
1637 arr_points[1].x = dal_fixed31_32_pow(dal_fixed31_32_from_int(2),
1638 dal_fixed31_32_from_int(segment_end));
1639 arr_points[2].x = dal_fixed31_32_pow(dal_fixed31_32_from_int(2),
1640 dal_fixed31_32_from_int(segment_end));
1641
1642 y_r = rgb_resulted[0].red;
1643 y_g = rgb_resulted[0].green;
1644 y_b = rgb_resulted[0].blue;
1645
1646 y1_min = dal_fixed31_32_min(y_r, dal_fixed31_32_min(y_g, y_b));
1647
1648 arr_points[0].y = y1_min;
1649 arr_points[0].slope = dal_fixed31_32_div(
1650 arr_points[0].y,
1651 arr_points[0].x);
1652 y_r = rgb_resulted[hw_points - 1].red;
1653 y_g = rgb_resulted[hw_points - 1].green;
1654 y_b = rgb_resulted[hw_points - 1].blue;
1655
1656 /* see comment above, m_arrPoints[1].y should be the Y value for the
1657 * region end (m_numOfHwPoints), not last HW point(m_numOfHwPoints - 1)
1658 */
1659 y3_max = dal_fixed31_32_max(y_r, dal_fixed31_32_max(y_g, y_b));
1660
1661 arr_points[1].y = y3_max;
1662 arr_points[2].y = y3_max;
1663
1664 arr_points[1].slope = dal_fixed31_32_zero;
1665 arr_points[2].slope = dal_fixed31_32_zero;
1666
1667 if (output_tf->tf == TRANSFER_FUNCTION_PQ) {
1668 /* for PQ, we want to have a straight line from last HW X point,
1669 * and the slope to be such that we hit 1.0 at 10000 nits.
1670 */
1671 const struct fixed31_32 end_value =
1672 dal_fixed31_32_from_int(125);
1673
1674 arr_points[1].slope = dal_fixed31_32_div(
1675 dal_fixed31_32_sub(dal_fixed31_32_one, arr_points[1].y),
1676 dal_fixed31_32_sub(end_value, arr_points[1].x));
1677 arr_points[2].slope = dal_fixed31_32_div(
1678 dal_fixed31_32_sub(dal_fixed31_32_one, arr_points[1].y),
1679 dal_fixed31_32_sub(end_value, arr_points[1].x));
1680 }
1681
1682 regamma_params->hw_points_num = hw_points;
1683
1684 i = 1;
1685 for (k = 0; k < MAX_REGIONS_NUMBER && i < MAX_REGIONS_NUMBER; k++) {
1686 if (seg_distr[k] != -1) {
1687 regamma_params->arr_curve_points[k].segments_num =
1688 seg_distr[k];
1689 regamma_params->arr_curve_points[i].offset =
1690 regamma_params->arr_curve_points[k].
1691 offset + (1 << seg_distr[k]);
1692 }
1693 i++;
1694 }
1695
1696 if (seg_distr[k] != -1)
1697 regamma_params->arr_curve_points[k].segments_num =
1698 seg_distr[k];
1699
1700 rgb = rgb_resulted;
1701 rgb_plus_1 = rgb_resulted + 1;
1702
1703 i = 1;
1704
1705 while (i != hw_points + 1) {
1706 if (dal_fixed31_32_lt(rgb_plus_1->red, rgb->red))
1707 rgb_plus_1->red = rgb->red;
1708 if (dal_fixed31_32_lt(rgb_plus_1->green, rgb->green))
1709 rgb_plus_1->green = rgb->green;
1710 if (dal_fixed31_32_lt(rgb_plus_1->blue, rgb->blue))
1711 rgb_plus_1->blue = rgb->blue;
1712
1713 rgb->delta_red = dal_fixed31_32_sub(
1714 rgb_plus_1->red,
1715 rgb->red);
1716 rgb->delta_green = dal_fixed31_32_sub(
1717 rgb_plus_1->green,
1718 rgb->green);
1719 rgb->delta_blue = dal_fixed31_32_sub(
1720 rgb_plus_1->blue,
1721 rgb->blue);
1722
1723 ++rgb_plus_1;
1724 ++rgb;
1725 ++i;
1726 }
1727
1728 convert_to_custom_float(rgb_resulted, arr_points, hw_points);
1729
1730 return true;
1731 }
1732
1733 static bool dcn10_set_output_transfer_func(
1734 struct pipe_ctx *pipe_ctx,
1735 const struct dc_stream_state *stream)
1736 {
1737 struct dpp *dpp = pipe_ctx->plane_res.dpp;
1738
1739 if (dpp == NULL)
1740 return false;
1741
1742 dpp->regamma_params.hw_points_num = GAMMA_HW_POINTS_NUM;
1743
1744 if (stream->out_transfer_func &&
1745 stream->out_transfer_func->type ==
1746 TF_TYPE_PREDEFINED &&
1747 stream->out_transfer_func->tf ==
1748 TRANSFER_FUNCTION_SRGB) {
1749 dpp->funcs->opp_set_regamma_mode(dpp, OPP_REGAMMA_SRGB);
1750 } else if (dcn10_translate_regamma_to_hw_format(
1751 stream->out_transfer_func, &dpp->regamma_params)) {
1752 dpp->funcs->opp_program_regamma_pwl(dpp, &dpp->regamma_params);
1753 dpp->funcs->opp_set_regamma_mode(dpp, OPP_REGAMMA_USER);
1754 } else {
1755 dpp->funcs->opp_set_regamma_mode(dpp, OPP_REGAMMA_BYPASS);
1756 }
1757
1758 return true;
1759 }
1760
1761 static void dcn10_pipe_control_lock(
1762 struct dc *dc,
1763 struct pipe_ctx *pipe,
1764 bool lock)
1765 {
1766 struct hubp *hubp = NULL;
1767 hubp = dc->res_pool->hubps[pipe->pipe_idx];
1768 /* use TG master update lock to lock everything on the TG
1769 * therefore only top pipe need to lock
1770 */
1771 if (pipe->top_pipe)
1772 return;
1773
1774 if (dc->debug.sanity_checks)
1775 verify_allow_pstate_change_high(dc->hwseq);
1776
1777 if (lock)
1778 pipe->stream_res.tg->funcs->lock(pipe->stream_res.tg);
1779 else
1780 pipe->stream_res.tg->funcs->unlock(pipe->stream_res.tg);
1781
1782 if (dc->debug.sanity_checks)
1783 verify_allow_pstate_change_high(dc->hwseq);
1784 }
1785
1786 static bool wait_for_reset_trigger_to_occur(
1787 struct dc_context *dc_ctx,
1788 struct timing_generator *tg)
1789 {
1790 bool rc = false;
1791
1792 /* To avoid endless loop we wait at most
1793 * frames_to_wait_on_triggered_reset frames for the reset to occur. */
1794 const uint32_t frames_to_wait_on_triggered_reset = 10;
1795 int i;
1796
1797 for (i = 0; i < frames_to_wait_on_triggered_reset; i++) {
1798
1799 if (!tg->funcs->is_counter_moving(tg)) {
1800 DC_ERROR("TG counter is not moving!\n");
1801 break;
1802 }
1803
1804 if (tg->funcs->did_triggered_reset_occur(tg)) {
1805 rc = true;
1806 /* usually occurs at i=1 */
1807 DC_SYNC_INFO("GSL: reset occurred at wait count: %d\n",
1808 i);
1809 break;
1810 }
1811
1812 /* Wait for one frame. */
1813 tg->funcs->wait_for_state(tg, CRTC_STATE_VACTIVE);
1814 tg->funcs->wait_for_state(tg, CRTC_STATE_VBLANK);
1815 }
1816
1817 if (false == rc)
1818 DC_ERROR("GSL: Timeout on reset trigger!\n");
1819
1820 return rc;
1821 }
1822
1823 static void dcn10_enable_timing_synchronization(
1824 struct dc *dc,
1825 int group_index,
1826 int group_size,
1827 struct pipe_ctx *grouped_pipes[])
1828 {
1829 struct dc_context *dc_ctx = dc->ctx;
1830 int i;
1831
1832 DC_SYNC_INFO("Setting up OTG reset trigger\n");
1833
1834 for (i = 1; i < group_size; i++)
1835 grouped_pipes[i]->stream_res.tg->funcs->enable_reset_trigger(
1836 grouped_pipes[i]->stream_res.tg, grouped_pipes[0]->stream_res.tg->inst);
1837
1838
1839 DC_SYNC_INFO("Waiting for trigger\n");
1840
1841 /* Need to get only check 1 pipe for having reset as all the others are
1842 * synchronized. Look at last pipe programmed to reset.
1843 */
1844 wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[1]->stream_res.tg);
1845 for (i = 1; i < group_size; i++)
1846 grouped_pipes[i]->stream_res.tg->funcs->disable_reset_trigger(
1847 grouped_pipes[i]->stream_res.tg);
1848
1849 DC_SYNC_INFO("Sync complete\n");
1850 }
1851
1852 static void print_rq_dlg_ttu(
1853 struct dc *core_dc,
1854 struct pipe_ctx *pipe_ctx)
1855 {
1856 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1857 "\n============== DML TTU Output parameters [%d] ==============\n"
1858 "qos_level_low_wm: %d, \n"
1859 "qos_level_high_wm: %d, \n"
1860 "min_ttu_vblank: %d, \n"
1861 "qos_level_flip: %d, \n"
1862 "refcyc_per_req_delivery_l: %d, \n"
1863 "qos_level_fixed_l: %d, \n"
1864 "qos_ramp_disable_l: %d, \n"
1865 "refcyc_per_req_delivery_pre_l: %d, \n"
1866 "refcyc_per_req_delivery_c: %d, \n"
1867 "qos_level_fixed_c: %d, \n"
1868 "qos_ramp_disable_c: %d, \n"
1869 "refcyc_per_req_delivery_pre_c: %d\n"
1870 "=============================================================\n",
1871 pipe_ctx->pipe_idx,
1872 pipe_ctx->ttu_regs.qos_level_low_wm,
1873 pipe_ctx->ttu_regs.qos_level_high_wm,
1874 pipe_ctx->ttu_regs.min_ttu_vblank,
1875 pipe_ctx->ttu_regs.qos_level_flip,
1876 pipe_ctx->ttu_regs.refcyc_per_req_delivery_l,
1877 pipe_ctx->ttu_regs.qos_level_fixed_l,
1878 pipe_ctx->ttu_regs.qos_ramp_disable_l,
1879 pipe_ctx->ttu_regs.refcyc_per_req_delivery_pre_l,
1880 pipe_ctx->ttu_regs.refcyc_per_req_delivery_c,
1881 pipe_ctx->ttu_regs.qos_level_fixed_c,
1882 pipe_ctx->ttu_regs.qos_ramp_disable_c,
1883 pipe_ctx->ttu_regs.refcyc_per_req_delivery_pre_c
1884 );
1885
1886 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1887 "\n============== DML DLG Output parameters [%d] ==============\n"
1888 "refcyc_h_blank_end: %d, \n"
1889 "dlg_vblank_end: %d, \n"
1890 "min_dst_y_next_start: %d, \n"
1891 "refcyc_per_htotal: %d, \n"
1892 "refcyc_x_after_scaler: %d, \n"
1893 "dst_y_after_scaler: %d, \n"
1894 "dst_y_prefetch: %d, \n"
1895 "dst_y_per_vm_vblank: %d, \n"
1896 "dst_y_per_row_vblank: %d, \n"
1897 "ref_freq_to_pix_freq: %d, \n"
1898 "vratio_prefetch: %d, \n"
1899 "refcyc_per_pte_group_vblank_l: %d, \n"
1900 "refcyc_per_meta_chunk_vblank_l: %d, \n"
1901 "dst_y_per_pte_row_nom_l: %d, \n"
1902 "refcyc_per_pte_group_nom_l: %d, \n",
1903 pipe_ctx->pipe_idx,
1904 pipe_ctx->dlg_regs.refcyc_h_blank_end,
1905 pipe_ctx->dlg_regs.dlg_vblank_end,
1906 pipe_ctx->dlg_regs.min_dst_y_next_start,
1907 pipe_ctx->dlg_regs.refcyc_per_htotal,
1908 pipe_ctx->dlg_regs.refcyc_x_after_scaler,
1909 pipe_ctx->dlg_regs.dst_y_after_scaler,
1910 pipe_ctx->dlg_regs.dst_y_prefetch,
1911 pipe_ctx->dlg_regs.dst_y_per_vm_vblank,
1912 pipe_ctx->dlg_regs.dst_y_per_row_vblank,
1913 pipe_ctx->dlg_regs.ref_freq_to_pix_freq,
1914 pipe_ctx->dlg_regs.vratio_prefetch,
1915 pipe_ctx->dlg_regs.refcyc_per_pte_group_vblank_l,
1916 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_vblank_l,
1917 pipe_ctx->dlg_regs.dst_y_per_pte_row_nom_l,
1918 pipe_ctx->dlg_regs.refcyc_per_pte_group_nom_l
1919 );
1920
1921 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1922 "\ndst_y_per_meta_row_nom_l: %d, \n"
1923 "refcyc_per_meta_chunk_nom_l: %d, \n"
1924 "refcyc_per_line_delivery_pre_l: %d, \n"
1925 "refcyc_per_line_delivery_l: %d, \n"
1926 "vratio_prefetch_c: %d, \n"
1927 "refcyc_per_pte_group_vblank_c: %d, \n"
1928 "refcyc_per_meta_chunk_vblank_c: %d, \n"
1929 "dst_y_per_pte_row_nom_c: %d, \n"
1930 "refcyc_per_pte_group_nom_c: %d, \n"
1931 "dst_y_per_meta_row_nom_c: %d, \n"
1932 "refcyc_per_meta_chunk_nom_c: %d, \n"
1933 "refcyc_per_line_delivery_pre_c: %d, \n"
1934 "refcyc_per_line_delivery_c: %d \n"
1935 "========================================================\n",
1936 pipe_ctx->dlg_regs.dst_y_per_meta_row_nom_l,
1937 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_nom_l,
1938 pipe_ctx->dlg_regs.refcyc_per_line_delivery_pre_l,
1939 pipe_ctx->dlg_regs.refcyc_per_line_delivery_l,
1940 pipe_ctx->dlg_regs.vratio_prefetch_c,
1941 pipe_ctx->dlg_regs.refcyc_per_pte_group_vblank_c,
1942 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_vblank_c,
1943 pipe_ctx->dlg_regs.dst_y_per_pte_row_nom_c,
1944 pipe_ctx->dlg_regs.refcyc_per_pte_group_nom_c,
1945 pipe_ctx->dlg_regs.dst_y_per_meta_row_nom_c,
1946 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_nom_c,
1947 pipe_ctx->dlg_regs.refcyc_per_line_delivery_pre_c,
1948 pipe_ctx->dlg_regs.refcyc_per_line_delivery_c
1949 );
1950
1951 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1952 "\n============== DML RQ Output parameters [%d] ==============\n"
1953 "chunk_size: %d \n"
1954 "min_chunk_size: %d \n"
1955 "meta_chunk_size: %d \n"
1956 "min_meta_chunk_size: %d \n"
1957 "dpte_group_size: %d \n"
1958 "mpte_group_size: %d \n"
1959 "swath_height: %d \n"
1960 "pte_row_height_linear: %d \n"
1961 "========================================================\n",
1962 pipe_ctx->pipe_idx,
1963 pipe_ctx->rq_regs.rq_regs_l.chunk_size,
1964 pipe_ctx->rq_regs.rq_regs_l.min_chunk_size,
1965 pipe_ctx->rq_regs.rq_regs_l.meta_chunk_size,
1966 pipe_ctx->rq_regs.rq_regs_l.min_meta_chunk_size,
1967 pipe_ctx->rq_regs.rq_regs_l.dpte_group_size,
1968 pipe_ctx->rq_regs.rq_regs_l.mpte_group_size,
1969 pipe_ctx->rq_regs.rq_regs_l.swath_height,
1970 pipe_ctx->rq_regs.rq_regs_l.pte_row_height_linear
1971 );
1972 }
1973
1974 static void dcn10_power_on_fe(
1975 struct dc *dc,
1976 struct pipe_ctx *pipe_ctx,
1977 struct dc_state *context)
1978 {
1979 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1980 struct dce_hwseq *hws = dc->hwseq;
1981
1982 if (dc->debug.sanity_checks) {
1983 verify_allow_pstate_change_high(dc->hwseq);
1984 }
1985
1986 power_on_plane(dc->hwseq,
1987 pipe_ctx->pipe_idx);
1988
1989 /* enable DCFCLK current DCHUB */
1990 REG_UPDATE(HUBP_CLK_CNTL[pipe_ctx->pipe_idx],
1991 HUBP_CLOCK_ENABLE, 1);
1992
1993 /* make sure OPP_PIPE_CLOCK_EN = 1 */
1994 REG_UPDATE(OPP_PIPE_CONTROL[pipe_ctx->stream_res.tg->inst],
1995 OPP_PIPE_CLOCK_EN, 1);
1996 /*TODO: REG_UPDATE(DENTIST_DISPCLK_CNTL, DENTIST_DPPCLK_WDIVIDER, 0x1f);*/
1997
1998 if (plane_state) {
1999 dm_logger_write(dc->ctx->logger, LOG_DC,
2000 "Pipe:%d 0x%x: addr hi:0x%x, "
2001 "addr low:0x%x, "
2002 "src: %d, %d, %d,"
2003 " %d; dst: %d, %d, %d, %d;\n",
2004 pipe_ctx->pipe_idx,
2005 plane_state,
2006 plane_state->address.grph.addr.high_part,
2007 plane_state->address.grph.addr.low_part,
2008 plane_state->src_rect.x,
2009 plane_state->src_rect.y,
2010 plane_state->src_rect.width,
2011 plane_state->src_rect.height,
2012 plane_state->dst_rect.x,
2013 plane_state->dst_rect.y,
2014 plane_state->dst_rect.width,
2015 plane_state->dst_rect.height);
2016
2017 dm_logger_write(dc->ctx->logger, LOG_DC,
2018 "Pipe %d: width, height, x, y format:%d\n"
2019 "viewport:%d, %d, %d, %d\n"
2020 "recout: %d, %d, %d, %d\n",
2021 pipe_ctx->pipe_idx,
2022 plane_state->format,
2023 pipe_ctx->plane_res.scl_data.viewport.width,
2024 pipe_ctx->plane_res.scl_data.viewport.height,
2025 pipe_ctx->plane_res.scl_data.viewport.x,
2026 pipe_ctx->plane_res.scl_data.viewport.y,
2027 pipe_ctx->plane_res.scl_data.recout.width,
2028 pipe_ctx->plane_res.scl_data.recout.height,
2029 pipe_ctx->plane_res.scl_data.recout.x,
2030 pipe_ctx->plane_res.scl_data.recout.y);
2031 print_rq_dlg_ttu(dc, pipe_ctx);
2032 }
2033
2034 if (dc->debug.sanity_checks) {
2035 verify_allow_pstate_change_high(dc->hwseq);
2036 }
2037 }
2038
2039 static void program_gamut_remap(struct pipe_ctx *pipe_ctx)
2040 {
2041 struct dpp_grph_csc_adjustment adjust;
2042 memset(&adjust, 0, sizeof(adjust));
2043 adjust.gamut_adjust_type = GRAPHICS_GAMUT_ADJUST_TYPE_BYPASS;
2044
2045
2046 if (pipe_ctx->stream->gamut_remap_matrix.enable_remap == true) {
2047 adjust.gamut_adjust_type = GRAPHICS_GAMUT_ADJUST_TYPE_SW;
2048 adjust.temperature_matrix[0] =
2049 pipe_ctx->stream->
2050 gamut_remap_matrix.matrix[0];
2051 adjust.temperature_matrix[1] =
2052 pipe_ctx->stream->
2053 gamut_remap_matrix.matrix[1];
2054 adjust.temperature_matrix[2] =
2055 pipe_ctx->stream->
2056 gamut_remap_matrix.matrix[2];
2057 adjust.temperature_matrix[3] =
2058 pipe_ctx->stream->
2059 gamut_remap_matrix.matrix[4];
2060 adjust.temperature_matrix[4] =
2061 pipe_ctx->stream->
2062 gamut_remap_matrix.matrix[5];
2063 adjust.temperature_matrix[5] =
2064 pipe_ctx->stream->
2065 gamut_remap_matrix.matrix[6];
2066 adjust.temperature_matrix[6] =
2067 pipe_ctx->stream->
2068 gamut_remap_matrix.matrix[8];
2069 adjust.temperature_matrix[7] =
2070 pipe_ctx->stream->
2071 gamut_remap_matrix.matrix[9];
2072 adjust.temperature_matrix[8] =
2073 pipe_ctx->stream->
2074 gamut_remap_matrix.matrix[10];
2075 }
2076
2077 pipe_ctx->plane_res.dpp->funcs->dpp_set_gamut_remap(pipe_ctx->plane_res.dpp, &adjust);
2078 }
2079
2080
2081 static void program_csc_matrix(struct pipe_ctx *pipe_ctx,
2082 enum dc_color_space colorspace,
2083 uint16_t *matrix)
2084 {
2085 int i;
2086 struct out_csc_color_matrix tbl_entry;
2087
2088 if (pipe_ctx->stream->csc_color_matrix.enable_adjustment
2089 == true) {
2090 enum dc_color_space color_space =
2091 pipe_ctx->stream->output_color_space;
2092
2093 //uint16_t matrix[12];
2094 for (i = 0; i < 12; i++)
2095 tbl_entry.regval[i] = pipe_ctx->stream->csc_color_matrix.matrix[i];
2096
2097 tbl_entry.color_space = color_space;
2098 //tbl_entry.regval = matrix;
2099 pipe_ctx->plane_res.dpp->funcs->opp_set_csc_adjustment(pipe_ctx->plane_res.dpp, &tbl_entry);
2100 } else {
2101 pipe_ctx->plane_res.dpp->funcs->opp_set_csc_default(pipe_ctx->plane_res.dpp, colorspace);
2102 }
2103 }
2104 static bool is_lower_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
2105 {
2106 if (pipe_ctx->plane_state->visible)
2107 return true;
2108 if (pipe_ctx->bottom_pipe && is_lower_pipe_tree_visible(pipe_ctx->bottom_pipe))
2109 return true;
2110 return false;
2111 }
2112
2113 static bool is_upper_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
2114 {
2115 if (pipe_ctx->plane_state->visible)
2116 return true;
2117 if (pipe_ctx->top_pipe && is_upper_pipe_tree_visible(pipe_ctx->top_pipe))
2118 return true;
2119 return false;
2120 }
2121
2122 static bool is_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
2123 {
2124 if (pipe_ctx->plane_state->visible)
2125 return true;
2126 if (pipe_ctx->top_pipe && is_upper_pipe_tree_visible(pipe_ctx->top_pipe))
2127 return true;
2128 if (pipe_ctx->bottom_pipe && is_lower_pipe_tree_visible(pipe_ctx->bottom_pipe))
2129 return true;
2130 return false;
2131 }
2132
2133 static bool is_rgb_cspace(enum dc_color_space output_color_space)
2134 {
2135 switch (output_color_space) {
2136 case COLOR_SPACE_SRGB:
2137 case COLOR_SPACE_SRGB_LIMITED:
2138 case COLOR_SPACE_2020_RGB_FULLRANGE:
2139 case COLOR_SPACE_2020_RGB_LIMITEDRANGE:
2140 case COLOR_SPACE_ADOBERGB:
2141 return true;
2142 case COLOR_SPACE_YCBCR601:
2143 case COLOR_SPACE_YCBCR709:
2144 case COLOR_SPACE_YCBCR601_LIMITED:
2145 case COLOR_SPACE_YCBCR709_LIMITED:
2146 case COLOR_SPACE_2020_YCBCR:
2147 return false;
2148 default:
2149 /* Add a case to switch */
2150 BREAK_TO_DEBUGGER();
2151 return false;
2152 }
2153 }
2154
2155 static void dcn10_get_surface_visual_confirm_color(
2156 const struct pipe_ctx *pipe_ctx,
2157 struct tg_color *color)
2158 {
2159 uint32_t color_value = MAX_TG_COLOR_VALUE;
2160
2161 switch (pipe_ctx->plane_res.scl_data.format) {
2162 case PIXEL_FORMAT_ARGB8888:
2163 /* set boarder color to red */
2164 color->color_r_cr = color_value;
2165 break;
2166
2167 case PIXEL_FORMAT_ARGB2101010:
2168 /* set boarder color to blue */
2169 color->color_b_cb = color_value;
2170 break;
2171 case PIXEL_FORMAT_420BPP8:
2172 /* set boarder color to green */
2173 color->color_g_y = color_value;
2174 break;
2175 case PIXEL_FORMAT_420BPP10:
2176 /* set boarder color to yellow */
2177 color->color_g_y = color_value;
2178 color->color_r_cr = color_value;
2179 break;
2180 case PIXEL_FORMAT_FP16:
2181 /* set boarder color to white */
2182 color->color_r_cr = color_value;
2183 color->color_b_cb = color_value;
2184 color->color_g_y = color_value;
2185 break;
2186 default:
2187 break;
2188 }
2189 }
2190
2191 static void mmhub_read_vm_system_aperture_settings(struct dcn10_hubp *hubp1,
2192 struct vm_system_aperture_param *apt,
2193 struct dce_hwseq *hws)
2194 {
2195 PHYSICAL_ADDRESS_LOC physical_page_number;
2196 uint32_t logical_addr_low;
2197 uint32_t logical_addr_high;
2198
2199 REG_GET(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB,
2200 PHYSICAL_PAGE_NUMBER_MSB, &physical_page_number.high_part);
2201 REG_GET(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB,
2202 PHYSICAL_PAGE_NUMBER_LSB, &physical_page_number.low_part);
2203
2204 REG_GET(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2205 LOGICAL_ADDR, &logical_addr_low);
2206
2207 REG_GET(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2208 LOGICAL_ADDR, &logical_addr_high);
2209
2210 apt->sys_default.quad_part = physical_page_number.quad_part << 12;
2211 apt->sys_low.quad_part = (int64_t)logical_addr_low << 18;
2212 apt->sys_high.quad_part = (int64_t)logical_addr_high << 18;
2213 }
2214
2215 /* Temporary read settings, future will get values from kmd directly */
2216 static void mmhub_read_vm_context0_settings(struct dcn10_hubp *hubp1,
2217 struct vm_context0_param *vm0,
2218 struct dce_hwseq *hws)
2219 {
2220 PHYSICAL_ADDRESS_LOC fb_base;
2221 PHYSICAL_ADDRESS_LOC fb_offset;
2222 uint32_t fb_base_value;
2223 uint32_t fb_offset_value;
2224
2225 REG_GET(DCHUBBUB_SDPIF_FB_BASE, SDPIF_FB_BASE, &fb_base_value);
2226 REG_GET(DCHUBBUB_SDPIF_FB_OFFSET, SDPIF_FB_OFFSET, &fb_offset_value);
2227
2228 REG_GET(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR_HI32,
2229 PAGE_DIRECTORY_ENTRY_HI32, &vm0->pte_base.high_part);
2230 REG_GET(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR_LO32,
2231 PAGE_DIRECTORY_ENTRY_LO32, &vm0->pte_base.low_part);
2232
2233 REG_GET(VM_CONTEXT0_PAGE_TABLE_START_ADDR_HI32,
2234 LOGICAL_PAGE_NUMBER_HI4, &vm0->pte_start.high_part);
2235 REG_GET(VM_CONTEXT0_PAGE_TABLE_START_ADDR_LO32,
2236 LOGICAL_PAGE_NUMBER_LO32, &vm0->pte_start.low_part);
2237
2238 REG_GET(VM_CONTEXT0_PAGE_TABLE_END_ADDR_HI32,
2239 LOGICAL_PAGE_NUMBER_HI4, &vm0->pte_end.high_part);
2240 REG_GET(VM_CONTEXT0_PAGE_TABLE_END_ADDR_LO32,
2241 LOGICAL_PAGE_NUMBER_LO32, &vm0->pte_end.low_part);
2242
2243 REG_GET(VM_L2_PROTECTION_FAULT_DEFAULT_ADDR_HI32,
2244 PHYSICAL_PAGE_ADDR_HI4, &vm0->fault_default.high_part);
2245 REG_GET(VM_L2_PROTECTION_FAULT_DEFAULT_ADDR_LO32,
2246 PHYSICAL_PAGE_ADDR_LO32, &vm0->fault_default.low_part);
2247
2248 /*
2249 * The values in VM_CONTEXT0_PAGE_TABLE_BASE_ADDR is in UMA space.
2250 * Therefore we need to do
2251 * DCN_VM_CONTEXT0_PAGE_TABLE_BASE_ADDR = VM_CONTEXT0_PAGE_TABLE_BASE_ADDR
2252 * - DCHUBBUB_SDPIF_FB_OFFSET + DCHUBBUB_SDPIF_FB_BASE
2253 */
2254 fb_base.quad_part = (uint64_t)fb_base_value << 24;
2255 fb_offset.quad_part = (uint64_t)fb_offset_value << 24;
2256 vm0->pte_base.quad_part += fb_base.quad_part;
2257 vm0->pte_base.quad_part -= fb_offset.quad_part;
2258 }
2259
2260 static void dcn10_program_pte_vm(struct hubp *hubp,
2261 enum surface_pixel_format format,
2262 union dc_tiling_info *tiling_info,
2263 enum dc_rotation_angle rotation,
2264 struct dce_hwseq *hws)
2265 {
2266 struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp);
2267 struct vm_system_aperture_param apt = { {{ 0 } } };
2268 struct vm_context0_param vm0 = { { { 0 } } };
2269
2270
2271 mmhub_read_vm_system_aperture_settings(hubp1, &apt, hws);
2272 mmhub_read_vm_context0_settings(hubp1, &vm0, hws);
2273
2274 hubp->funcs->hubp_set_vm_system_aperture_settings(hubp, &apt);
2275 hubp->funcs->hubp_set_vm_context0_settings(hubp, &vm0);
2276 }
2277
2278 static void update_dchubp_dpp(
2279 struct dc *dc,
2280 struct pipe_ctx *pipe_ctx,
2281 struct dc_state *context)
2282 {
2283 struct dce_hwseq *hws = dc->hwseq;
2284 struct hubp *hubp = pipe_ctx->plane_res.hubp;
2285 struct dpp *dpp = pipe_ctx->plane_res.dpp;
2286 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
2287 union plane_size size = plane_state->plane_size;
2288 struct mpcc_cfg mpcc_cfg = {0};
2289 struct pipe_ctx *top_pipe;
2290 bool per_pixel_alpha = plane_state->per_pixel_alpha && pipe_ctx->bottom_pipe;
2291
2292 /* TODO: proper fix once fpga works */
2293 /* depends on DML calculation, DPP clock value may change dynamically */
2294 enable_dppclk(
2295 dc->hwseq,
2296 pipe_ctx->pipe_idx,
2297 pipe_ctx->stream_res.pix_clk_params.requested_pix_clk,
2298 context->bw.dcn.calc_clk.dppclk_div);
2299 dc->current_state->bw.dcn.cur_clk.dppclk_div =
2300 context->bw.dcn.calc_clk.dppclk_div;
2301 context->bw.dcn.cur_clk.dppclk_div = context->bw.dcn.calc_clk.dppclk_div;
2302
2303 /* TODO: Need input parameter to tell current DCHUB pipe tie to which OTG
2304 * VTG is within DCHUBBUB which is commond block share by each pipe HUBP.
2305 * VTG is 1:1 mapping with OTG. Each pipe HUBP will select which VTG
2306 */
2307 REG_UPDATE(DCHUBP_CNTL[pipe_ctx->pipe_idx], HUBP_VTG_SEL, pipe_ctx->stream_res.tg->inst);
2308
2309 hubp->funcs->hubp_setup(
2310 hubp,
2311 &pipe_ctx->dlg_regs,
2312 &pipe_ctx->ttu_regs,
2313 &pipe_ctx->rq_regs,
2314 &pipe_ctx->pipe_dlg_param);
2315
2316 size.grph.surface_size = pipe_ctx->plane_res.scl_data.viewport;
2317
2318 if (dc->config.gpu_vm_support)
2319 dcn10_program_pte_vm(
2320 pipe_ctx->plane_res.hubp,
2321 plane_state->format,
2322 &plane_state->tiling_info,
2323 plane_state->rotation,
2324 hws
2325 );
2326
2327 dpp->funcs->ipp_setup(dpp,
2328 plane_state->format,
2329 EXPANSION_MODE_ZERO);
2330
2331 mpcc_cfg.dpp_id = hubp->inst;
2332 mpcc_cfg.opp_id = pipe_ctx->stream_res.opp->inst;
2333 mpcc_cfg.tree_cfg = &(pipe_ctx->stream_res.opp->mpc_tree);
2334 for (top_pipe = pipe_ctx->top_pipe; top_pipe; top_pipe = top_pipe->top_pipe)
2335 mpcc_cfg.z_index++;
2336 if (dc->debug.surface_visual_confirm)
2337 dcn10_get_surface_visual_confirm_color(
2338 pipe_ctx, &mpcc_cfg.black_color);
2339 else
2340 color_space_to_black_color(
2341 dc, pipe_ctx->stream->output_color_space,
2342 &mpcc_cfg.black_color);
2343 mpcc_cfg.per_pixel_alpha = per_pixel_alpha;
2344 /* DCN1.0 has output CM before MPC which seems to screw with
2345 * pre-multiplied alpha.
2346 */
2347 mpcc_cfg.pre_multiplied_alpha = is_rgb_cspace(
2348 pipe_ctx->stream->output_color_space)
2349 && per_pixel_alpha;
2350 hubp->mpcc_id = dc->res_pool->mpc->funcs->add(dc->res_pool->mpc, &mpcc_cfg);
2351 hubp->opp_id = mpcc_cfg.opp_id;
2352
2353 pipe_ctx->plane_res.scl_data.lb_params.alpha_en = per_pixel_alpha;
2354 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
2355 /* scaler configuration */
2356 pipe_ctx->plane_res.dpp->funcs->dpp_set_scaler(
2357 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data);
2358
2359 hubp->funcs->mem_program_viewport(hubp,
2360 &pipe_ctx->plane_res.scl_data.viewport, &pipe_ctx->plane_res.scl_data.viewport_c);
2361
2362 /*gamut remap*/
2363 program_gamut_remap(pipe_ctx);
2364
2365 program_csc_matrix(pipe_ctx,
2366 pipe_ctx->stream->output_color_space,
2367 pipe_ctx->stream->csc_color_matrix.matrix);
2368
2369 hubp->funcs->hubp_program_surface_config(
2370 hubp,
2371 plane_state->format,
2372 &plane_state->tiling_info,
2373 &size,
2374 plane_state->rotation,
2375 &plane_state->dcc,
2376 plane_state->horizontal_mirror);
2377
2378 dc->hwss.update_plane_addr(dc, pipe_ctx);
2379
2380 if (is_pipe_tree_visible(pipe_ctx))
2381 hubp->funcs->set_blank(hubp, false);
2382 }
2383
2384
2385 static void program_all_pipe_in_tree(
2386 struct dc *dc,
2387 struct pipe_ctx *pipe_ctx,
2388 struct dc_state *context)
2389 {
2390 unsigned int ref_clk_mhz = dc->res_pool->ref_clock_inKhz/1000;
2391
2392 if (pipe_ctx->top_pipe == NULL) {
2393
2394 /* lock otg_master_update to process all pipes associated with
2395 * this OTG. this is done only one time.
2396 */
2397 /* watermark is for all pipes */
2398 program_watermarks(dc->hwseq, &context->bw.dcn.watermarks, ref_clk_mhz);
2399
2400 if (dc->debug.sanity_checks) {
2401 /* pstate stuck check after watermark update */
2402 verify_allow_pstate_change_high(dc->hwseq);
2403 }
2404
2405 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
2406
2407 pipe_ctx->stream_res.tg->dlg_otg_param.vready_offset = pipe_ctx->pipe_dlg_param.vready_offset;
2408 pipe_ctx->stream_res.tg->dlg_otg_param.vstartup_start = pipe_ctx->pipe_dlg_param.vstartup_start;
2409 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_offset = pipe_ctx->pipe_dlg_param.vupdate_offset;
2410 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_width = pipe_ctx->pipe_dlg_param.vupdate_width;
2411 pipe_ctx->stream_res.tg->dlg_otg_param.signal = pipe_ctx->stream->signal;
2412
2413 pipe_ctx->stream_res.tg->funcs->program_global_sync(
2414 pipe_ctx->stream_res.tg);
2415 pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, !is_pipe_tree_visible(pipe_ctx));
2416 }
2417
2418 if (pipe_ctx->plane_state != NULL) {
2419 struct dc_cursor_position position = { 0 };
2420 struct pipe_ctx *cur_pipe_ctx =
2421 &dc->current_state->res_ctx.pipe_ctx[pipe_ctx->pipe_idx];
2422
2423 dcn10_power_on_fe(dc, pipe_ctx, context);
2424
2425 /* temporary dcn1 wa:
2426 * watermark update requires toggle after a/b/c/d sets are programmed
2427 * if hubp is pg then wm value doesn't get properaged to hubp
2428 * need to toggle after ungate to ensure wm gets to hubp.
2429 *
2430 * final solution: we need to get SMU to do the toggle as
2431 * DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST is owned by SMU we should have
2432 * both driver and fw accessing same register
2433 */
2434 toggle_watermark_change_req(dc->hwseq);
2435
2436 update_dchubp_dpp(dc, pipe_ctx, context);
2437
2438 /* TODO: this is a hack w/a for switching from mpo to pipe split */
2439 dc_stream_set_cursor_position(pipe_ctx->stream, &position);
2440
2441 dc_stream_set_cursor_attributes(pipe_ctx->stream,
2442 &pipe_ctx->stream->cursor_attributes);
2443
2444 if (cur_pipe_ctx->plane_state != pipe_ctx->plane_state) {
2445 dc->hwss.set_input_transfer_func(
2446 pipe_ctx, pipe_ctx->plane_state);
2447 dc->hwss.set_output_transfer_func(
2448 pipe_ctx, pipe_ctx->stream);
2449 }
2450 }
2451
2452 if (dc->debug.sanity_checks) {
2453 /* pstate stuck check after each pipe is programmed */
2454 verify_allow_pstate_change_high(dc->hwseq);
2455 }
2456
2457 if (pipe_ctx->bottom_pipe != NULL && pipe_ctx->bottom_pipe != pipe_ctx)
2458 program_all_pipe_in_tree(dc, pipe_ctx->bottom_pipe, context);
2459 }
2460
2461 static void dcn10_pplib_apply_display_requirements(
2462 struct dc *dc,
2463 struct dc_state *context)
2464 {
2465 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
2466
2467 pp_display_cfg->all_displays_in_sync = false;/*todo*/
2468 pp_display_cfg->nb_pstate_switch_disable = false;
2469 pp_display_cfg->min_engine_clock_khz = context->bw.dcn.cur_clk.dcfclk_khz;
2470 pp_display_cfg->min_memory_clock_khz = context->bw.dcn.cur_clk.fclk_khz;
2471 pp_display_cfg->min_engine_clock_deep_sleep_khz = context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz;
2472 pp_display_cfg->min_dcfc_deep_sleep_clock_khz = context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz;
2473 pp_display_cfg->avail_mclk_switch_time_us =
2474 context->bw.dcn.cur_clk.dram_ccm_us > 0 ? context->bw.dcn.cur_clk.dram_ccm_us : 0;
2475 pp_display_cfg->avail_mclk_switch_time_in_disp_active_us =
2476 context->bw.dcn.cur_clk.min_active_dram_ccm_us > 0 ? context->bw.dcn.cur_clk.min_active_dram_ccm_us : 0;
2477 pp_display_cfg->min_dcfclock_khz = context->bw.dcn.cur_clk.dcfclk_khz;
2478 pp_display_cfg->disp_clk_khz = context->bw.dcn.cur_clk.dispclk_khz;
2479 dce110_fill_display_configs(context, pp_display_cfg);
2480
2481 if (memcmp(&dc->prev_display_config, pp_display_cfg, sizeof(
2482 struct dm_pp_display_configuration)) != 0)
2483 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
2484
2485 dc->prev_display_config = *pp_display_cfg;
2486 }
2487
2488 static void optimize_shared_resources(struct dc *dc)
2489 {
2490 if (dc->current_state->stream_count == 0) {
2491 apply_DEGVIDCN10_253_wa(dc);
2492 /* S0i2 message */
2493 dcn10_pplib_apply_display_requirements(dc, dc->current_state);
2494 }
2495
2496 if (dc->debug.pplib_wm_report_mode == WM_REPORT_OVERRIDE)
2497 dcn_bw_notify_pplib_of_wm_ranges(dc);
2498 }
2499
2500 static void ready_shared_resources(struct dc *dc, struct dc_state *context)
2501 {
2502 if (dc->current_state->stream_count == 0 &&
2503 !dc->debug.disable_stutter)
2504 undo_DEGVIDCN10_253_wa(dc);
2505
2506 /* S0i2 message */
2507 if (dc->current_state->stream_count == 0 &&
2508 context->stream_count != 0)
2509 dcn10_pplib_apply_display_requirements(dc, context);
2510 }
2511
2512 static void dcn10_apply_ctx_for_surface(
2513 struct dc *dc,
2514 const struct dc_stream_state *stream,
2515 int num_planes,
2516 struct dc_state *context)
2517 {
2518 int i, be_idx;
2519
2520 if (dc->debug.sanity_checks)
2521 verify_allow_pstate_change_high(dc->hwseq);
2522
2523 be_idx = -1;
2524 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2525 if (stream == context->res_ctx.pipe_ctx[i].stream) {
2526 be_idx = context->res_ctx.pipe_ctx[i].stream_res.tg->inst;
2527 break;
2528 }
2529 }
2530
2531 ASSERT(be_idx != -1);
2532
2533 if (num_planes == 0) {
2534 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
2535 struct pipe_ctx *old_pipe_ctx =
2536 &dc->current_state->res_ctx.pipe_ctx[i];
2537
2538 if (old_pipe_ctx->stream_res.tg && old_pipe_ctx->stream_res.tg->inst == be_idx) {
2539 old_pipe_ctx->stream_res.tg->funcs->set_blank(old_pipe_ctx->stream_res.tg, true);
2540 dcn10_power_down_fe(dc, old_pipe_ctx->pipe_idx);
2541 }
2542 }
2543 return;
2544 }
2545
2546 /* reset unused mpcc */
2547 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2548 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2549 struct pipe_ctx *old_pipe_ctx =
2550 &dc->current_state->res_ctx.pipe_ctx[i];
2551
2552 if (!pipe_ctx->plane_state && !old_pipe_ctx->plane_state)
2553 continue;
2554
2555 /*
2556 * Powergate reused pipes that are not powergated
2557 * fairly hacky right now, using opp_id as indicator
2558 */
2559
2560 if (pipe_ctx->plane_state && !old_pipe_ctx->plane_state) {
2561 if (pipe_ctx->plane_res.hubp->opp_id != 0xf && pipe_ctx->stream_res.tg->inst == be_idx) {
2562 dcn10_power_down_fe(dc, pipe_ctx->pipe_idx);
2563 /*
2564 * power down fe will unlock when calling reset, need
2565 * to lock it back here. Messy, need rework.
2566 */
2567 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
2568 }
2569 }
2570
2571
2572 if ((!pipe_ctx->plane_state && old_pipe_ctx->plane_state)
2573 || (!pipe_ctx->stream && old_pipe_ctx->stream)) {
2574 if (old_pipe_ctx->stream_res.tg->inst != be_idx)
2575 continue;
2576
2577 if (!old_pipe_ctx->top_pipe) {
2578 ASSERT(0);
2579 continue;
2580 }
2581
2582 /* reset mpc */
2583 dc->res_pool->mpc->funcs->remove(
2584 dc->res_pool->mpc,
2585 &(old_pipe_ctx->stream_res.opp->mpc_tree),
2586 old_pipe_ctx->stream_res.opp->inst,
2587 old_pipe_ctx->pipe_idx);
2588 old_pipe_ctx->stream_res.opp->mpcc_disconnect_pending[old_pipe_ctx->plane_res.hubp->mpcc_id] = true;
2589
2590 /*dm_logger_write(dc->ctx->logger, LOG_ERROR,
2591 "[debug_mpo: apply_ctx disconnect pending on mpcc %d]\n",
2592 old_pipe_ctx->mpcc->inst);*/
2593
2594 if (dc->debug.sanity_checks)
2595 verify_allow_pstate_change_high(dc->hwseq);
2596
2597 old_pipe_ctx->top_pipe = NULL;
2598 old_pipe_ctx->bottom_pipe = NULL;
2599 old_pipe_ctx->plane_state = NULL;
2600 old_pipe_ctx->stream = NULL;
2601
2602 dm_logger_write(dc->ctx->logger, LOG_DC,
2603 "Reset mpcc for pipe %d\n",
2604 old_pipe_ctx->pipe_idx);
2605 }
2606 }
2607
2608 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2609 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2610
2611 if (pipe_ctx->stream != stream)
2612 continue;
2613
2614 /* looking for top pipe to program */
2615 if (!pipe_ctx->top_pipe)
2616 program_all_pipe_in_tree(dc, pipe_ctx, context);
2617 }
2618
2619 dm_logger_write(dc->ctx->logger, LOG_BANDWIDTH_CALCS,
2620 "\n============== Watermark parameters ==============\n"
2621 "a.urgent_ns: %d \n"
2622 "a.cstate_enter_plus_exit: %d \n"
2623 "a.cstate_exit: %d \n"
2624 "a.pstate_change: %d \n"
2625 "a.pte_meta_urgent: %d \n"
2626 "b.urgent_ns: %d \n"
2627 "b.cstate_enter_plus_exit: %d \n"
2628 "b.cstate_exit: %d \n"
2629 "b.pstate_change: %d \n"
2630 "b.pte_meta_urgent: %d \n",
2631 context->bw.dcn.watermarks.a.urgent_ns,
2632 context->bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns,
2633 context->bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns,
2634 context->bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns,
2635 context->bw.dcn.watermarks.a.pte_meta_urgent_ns,
2636 context->bw.dcn.watermarks.b.urgent_ns,
2637 context->bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns,
2638 context->bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns,
2639 context->bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns,
2640 context->bw.dcn.watermarks.b.pte_meta_urgent_ns
2641 );
2642 dm_logger_write(dc->ctx->logger, LOG_BANDWIDTH_CALCS,
2643 "\nc.urgent_ns: %d \n"
2644 "c.cstate_enter_plus_exit: %d \n"
2645 "c.cstate_exit: %d \n"
2646 "c.pstate_change: %d \n"
2647 "c.pte_meta_urgent: %d \n"
2648 "d.urgent_ns: %d \n"
2649 "d.cstate_enter_plus_exit: %d \n"
2650 "d.cstate_exit: %d \n"
2651 "d.pstate_change: %d \n"
2652 "d.pte_meta_urgent: %d \n"
2653 "========================================================\n",
2654 context->bw.dcn.watermarks.c.urgent_ns,
2655 context->bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns,
2656 context->bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns,
2657 context->bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns,
2658 context->bw.dcn.watermarks.c.pte_meta_urgent_ns,
2659 context->bw.dcn.watermarks.d.urgent_ns,
2660 context->bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns,
2661 context->bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns,
2662 context->bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns,
2663 context->bw.dcn.watermarks.d.pte_meta_urgent_ns
2664 );
2665
2666 if (dc->debug.sanity_checks)
2667 verify_allow_pstate_change_high(dc->hwseq);
2668 }
2669
2670 static void dcn10_set_bandwidth(
2671 struct dc *dc,
2672 struct dc_state *context,
2673 bool decrease_allowed)
2674 {
2675 struct pp_smu_display_requirement_rv *smu_req_cur =
2676 &dc->res_pool->pp_smu_req;
2677 struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
2678 struct pp_smu_funcs_rv *pp_smu = dc->res_pool->pp_smu;
2679
2680 if (dc->debug.sanity_checks) {
2681 verify_allow_pstate_change_high(dc->hwseq);
2682 }
2683
2684 if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment))
2685 return;
2686
2687 if (decrease_allowed || context->bw.dcn.calc_clk.dispclk_khz
2688 > dc->current_state->bw.dcn.cur_clk.dispclk_khz) {
2689 dc->res_pool->display_clock->funcs->set_clock(
2690 dc->res_pool->display_clock,
2691 context->bw.dcn.calc_clk.dispclk_khz);
2692 dc->current_state->bw.dcn.cur_clk.dispclk_khz =
2693 context->bw.dcn.calc_clk.dispclk_khz;
2694 }
2695 if (decrease_allowed || context->bw.dcn.calc_clk.dcfclk_khz
2696 > dc->current_state->bw.dcn.cur_clk.dcfclk_khz) {
2697 smu_req.hard_min_dcefclk_khz =
2698 context->bw.dcn.calc_clk.dcfclk_khz;
2699 }
2700 if (decrease_allowed || context->bw.dcn.calc_clk.fclk_khz
2701 > dc->current_state->bw.dcn.cur_clk.fclk_khz) {
2702 smu_req.hard_min_fclk_khz = context->bw.dcn.calc_clk.fclk_khz;
2703 }
2704 if (decrease_allowed || context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz
2705 > dc->current_state->bw.dcn.cur_clk.dcfclk_deep_sleep_khz) {
2706 dc->current_state->bw.dcn.calc_clk.dcfclk_deep_sleep_khz =
2707 context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz;
2708 context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz =
2709 context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz;
2710 }
2711
2712 smu_req.display_count = context->stream_count;
2713
2714 if (pp_smu->set_display_requirement)
2715 pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
2716
2717 *smu_req_cur = smu_req;
2718
2719 /* Decrease in freq is increase in period so opposite comparison for dram_ccm */
2720 if (decrease_allowed || context->bw.dcn.calc_clk.dram_ccm_us
2721 < dc->current_state->bw.dcn.cur_clk.dram_ccm_us) {
2722 dc->current_state->bw.dcn.calc_clk.dram_ccm_us =
2723 context->bw.dcn.calc_clk.dram_ccm_us;
2724 context->bw.dcn.cur_clk.dram_ccm_us =
2725 context->bw.dcn.calc_clk.dram_ccm_us;
2726 }
2727 if (decrease_allowed || context->bw.dcn.calc_clk.min_active_dram_ccm_us
2728 < dc->current_state->bw.dcn.cur_clk.min_active_dram_ccm_us) {
2729 dc->current_state->bw.dcn.calc_clk.min_active_dram_ccm_us =
2730 context->bw.dcn.calc_clk.min_active_dram_ccm_us;
2731 context->bw.dcn.cur_clk.min_active_dram_ccm_us =
2732 context->bw.dcn.calc_clk.min_active_dram_ccm_us;
2733 }
2734 dcn10_pplib_apply_display_requirements(dc, context);
2735
2736 if (dc->debug.sanity_checks) {
2737 verify_allow_pstate_change_high(dc->hwseq);
2738 }
2739
2740 /* need to fix this function. not doing the right thing here */
2741 }
2742
2743 static void set_drr(struct pipe_ctx **pipe_ctx,
2744 int num_pipes, int vmin, int vmax)
2745 {
2746 int i = 0;
2747 struct drr_params params = {0};
2748
2749 params.vertical_total_max = vmax;
2750 params.vertical_total_min = vmin;
2751
2752 /* TODO: If multiple pipes are to be supported, you need
2753 * some GSL stuff
2754 */
2755 for (i = 0; i < num_pipes; i++) {
2756 pipe_ctx[i]->stream_res.tg->funcs->set_drr(pipe_ctx[i]->stream_res.tg, &params);
2757 }
2758 }
2759
2760 static void get_position(struct pipe_ctx **pipe_ctx,
2761 int num_pipes,
2762 struct crtc_position *position)
2763 {
2764 int i = 0;
2765
2766 /* TODO: handle pipes > 1
2767 */
2768 for (i = 0; i < num_pipes; i++)
2769 pipe_ctx[i]->stream_res.tg->funcs->get_position(pipe_ctx[i]->stream_res.tg, position);
2770 }
2771
2772 static void set_static_screen_control(struct pipe_ctx **pipe_ctx,
2773 int num_pipes, const struct dc_static_screen_events *events)
2774 {
2775 unsigned int i;
2776 unsigned int value = 0;
2777
2778 if (events->surface_update)
2779 value |= 0x80;
2780 if (events->cursor_update)
2781 value |= 0x2;
2782
2783 for (i = 0; i < num_pipes; i++)
2784 pipe_ctx[i]->stream_res.tg->funcs->
2785 set_static_screen_control(pipe_ctx[i]->stream_res.tg, value);
2786 }
2787
2788 static void set_plane_config(
2789 const struct dc *dc,
2790 struct pipe_ctx *pipe_ctx,
2791 struct resource_context *res_ctx)
2792 {
2793 /* TODO */
2794 program_gamut_remap(pipe_ctx);
2795 }
2796
2797 static void dcn10_config_stereo_parameters(
2798 struct dc_stream_state *stream, struct crtc_stereo_flags *flags)
2799 {
2800 enum view_3d_format view_format = stream->view_format;
2801 enum dc_timing_3d_format timing_3d_format =\
2802 stream->timing.timing_3d_format;
2803 bool non_stereo_timing = false;
2804
2805 if (timing_3d_format == TIMING_3D_FORMAT_NONE ||
2806 timing_3d_format == TIMING_3D_FORMAT_SIDE_BY_SIDE ||
2807 timing_3d_format == TIMING_3D_FORMAT_TOP_AND_BOTTOM)
2808 non_stereo_timing = true;
2809
2810 if (non_stereo_timing == false &&
2811 view_format == VIEW_3D_FORMAT_FRAME_SEQUENTIAL) {
2812
2813 flags->PROGRAM_STEREO = 1;
2814 flags->PROGRAM_POLARITY = 1;
2815 if (timing_3d_format == TIMING_3D_FORMAT_INBAND_FA ||
2816 timing_3d_format == TIMING_3D_FORMAT_DP_HDMI_INBAND_FA ||
2817 timing_3d_format == TIMING_3D_FORMAT_SIDEBAND_FA) {
2818 enum display_dongle_type dongle = \
2819 stream->sink->link->ddc->dongle_type;
2820 if (dongle == DISPLAY_DONGLE_DP_VGA_CONVERTER ||
2821 dongle == DISPLAY_DONGLE_DP_DVI_CONVERTER ||
2822 dongle == DISPLAY_DONGLE_DP_HDMI_CONVERTER)
2823 flags->DISABLE_STEREO_DP_SYNC = 1;
2824 }
2825 flags->RIGHT_EYE_POLARITY =\
2826 stream->timing.flags.RIGHT_EYE_3D_POLARITY;
2827 if (timing_3d_format == TIMING_3D_FORMAT_HW_FRAME_PACKING)
2828 flags->FRAME_PACKED = 1;
2829 }
2830
2831 return;
2832 }
2833
2834 static void dcn10_setup_stereo(struct pipe_ctx *pipe_ctx, struct dc *dc)
2835 {
2836 struct crtc_stereo_flags flags = { 0 };
2837 struct dc_stream_state *stream = pipe_ctx->stream;
2838
2839 dcn10_config_stereo_parameters(stream, &flags);
2840
2841 pipe_ctx->stream_res.opp->funcs->opp_set_stereo_polarity(
2842 pipe_ctx->stream_res.opp,
2843 flags.PROGRAM_STEREO == 1 ? true:false,
2844 stream->timing.flags.RIGHT_EYE_3D_POLARITY == 1 ? true:false);
2845
2846 pipe_ctx->stream_res.tg->funcs->program_stereo(
2847 pipe_ctx->stream_res.tg,
2848 &stream->timing,
2849 &flags);
2850
2851 return;
2852 }
2853
2854 static void dcn10_wait_for_mpcc_disconnect(
2855 struct dc *dc,
2856 struct resource_pool *res_pool,
2857 struct pipe_ctx *pipe_ctx)
2858 {
2859 int i;
2860
2861 if (dc->debug.sanity_checks) {
2862 verify_allow_pstate_change_high(dc->hwseq);
2863 }
2864
2865 if (!pipe_ctx->stream_res.opp)
2866 return;
2867
2868 for (i = 0; i < MAX_PIPES; i++) {
2869 if (pipe_ctx->stream_res.opp->mpcc_disconnect_pending[i]) {
2870 res_pool->mpc->funcs->wait_for_idle(res_pool->mpc, i);
2871 pipe_ctx->stream_res.opp->mpcc_disconnect_pending[i] = false;
2872 res_pool->hubps[i]->funcs->set_blank(res_pool->hubps[i], true);
2873 /*dm_logger_write(dc->ctx->logger, LOG_ERROR,
2874 "[debug_mpo: wait_for_mpcc finished waiting on mpcc %d]\n",
2875 i);*/
2876 }
2877 }
2878
2879 if (dc->debug.sanity_checks) {
2880 verify_allow_pstate_change_high(dc->hwseq);
2881 }
2882
2883 }
2884
2885 static bool dcn10_dummy_display_power_gating(
2886 struct dc *dc,
2887 uint8_t controller_id,
2888 struct dc_bios *dcb,
2889 enum pipe_gating_control power_gating)
2890 {
2891 return true;
2892 }
2893
2894 void dcn10_update_pending_status(struct pipe_ctx *pipe_ctx)
2895 {
2896 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
2897 struct timing_generator *tg = pipe_ctx->stream_res.tg;
2898
2899 if (plane_state == NULL)
2900 return;
2901
2902 plane_state->status.is_flip_pending =
2903 pipe_ctx->plane_res.hubp->funcs->hubp_is_flip_pending(
2904 pipe_ctx->plane_res.hubp);
2905
2906 plane_state->status.current_address = pipe_ctx->plane_res.hubp->current_address;
2907 if (pipe_ctx->plane_res.hubp->current_address.type == PLN_ADDR_TYPE_GRPH_STEREO &&
2908 tg->funcs->is_stereo_left_eye) {
2909 plane_state->status.is_right_eye =
2910 !tg->funcs->is_stereo_left_eye(pipe_ctx->stream_res.tg);
2911 }
2912 }
2913
2914
2915
2916 static const struct hw_sequencer_funcs dcn10_funcs = {
2917 .program_gamut_remap = program_gamut_remap,
2918 .program_csc_matrix = program_csc_matrix,
2919 .init_hw = dcn10_init_hw,
2920 .apply_ctx_to_hw = dce110_apply_ctx_to_hw,
2921 .apply_ctx_for_surface = dcn10_apply_ctx_for_surface,
2922 .set_plane_config = set_plane_config,
2923 .update_plane_addr = dcn10_update_plane_addr,
2924 .update_dchub = dcn10_update_dchub,
2925 .update_pending_status = dcn10_update_pending_status,
2926 .set_input_transfer_func = dcn10_set_input_transfer_func,
2927 .set_output_transfer_func = dcn10_set_output_transfer_func,
2928 .power_down = dce110_power_down,
2929 .enable_accelerated_mode = dce110_enable_accelerated_mode,
2930 .enable_timing_synchronization = dcn10_enable_timing_synchronization,
2931 .update_info_frame = dce110_update_info_frame,
2932 .enable_stream = dce110_enable_stream,
2933 .disable_stream = dce110_disable_stream,
2934 .unblank_stream = dce110_unblank_stream,
2935 .enable_display_power_gating = dcn10_dummy_display_power_gating,
2936 .power_down_front_end = dcn10_power_down_fe,
2937 .power_on_front_end = dcn10_power_on_fe,
2938 .pipe_control_lock = dcn10_pipe_control_lock,
2939 .set_bandwidth = dcn10_set_bandwidth,
2940 .reset_hw_ctx_wrap = reset_hw_ctx_wrap,
2941 .prog_pixclk_crtc_otg = dcn10_prog_pixclk_crtc_otg,
2942 .set_drr = set_drr,
2943 .get_position = get_position,
2944 .set_static_screen_control = set_static_screen_control,
2945 .setup_stereo = dcn10_setup_stereo,
2946 .set_avmute = dce110_set_avmute,
2947 .log_hw_state = dcn10_log_hw_state,
2948 .wait_for_mpcc_disconnect = dcn10_wait_for_mpcc_disconnect,
2949 .ready_shared_resources = ready_shared_resources,
2950 .optimize_shared_resources = optimize_shared_resources,
2951 .edp_backlight_control = hwss_edp_backlight_control,
2952 .edp_power_control = hwss_edp_power_control
2953 };
2954
2955
2956 void dcn10_hw_sequencer_construct(struct dc *dc)
2957 {
2958 dc->hwss = dcn10_funcs;
2959 }
2960