]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
a9308463adec7f42b9d3b307d15a9c833ffed449
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / amd / display / dc / dcn10 / dcn10_hw_sequencer.c
1 /*
2 * Copyright 2016 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include "dm_services.h"
27 #include "core_types.h"
28 #include "resource.h"
29 #include "custom_float.h"
30 #include "dcn10_hw_sequencer.h"
31 #include "dce110/dce110_hw_sequencer.h"
32 #include "dce/dce_hwseq.h"
33 #include "abm.h"
34 #include "dcn10/dcn10_timing_generator.h"
35 #include "dcn10/dcn10_dpp.h"
36 #include "dcn10/dcn10_mpc.h"
37 #include "timing_generator.h"
38 #include "opp.h"
39 #include "ipp.h"
40 #include "mpc.h"
41 #include "reg_helper.h"
42 #include "custom_float.h"
43 #include "dcn10_hubp.h"
44
45 #define CTX \
46 hws->ctx
47 #define REG(reg)\
48 hws->regs->reg
49
50 #undef FN
51 #define FN(reg_name, field_name) \
52 hws->shifts->field_name, hws->masks->field_name
53
54 static void log_mpc_crc(struct dc *dc)
55 {
56 struct dc_context *dc_ctx = dc->ctx;
57 struct dce_hwseq *hws = dc->hwseq;
58
59 if (REG(MPC_CRC_RESULT_GB))
60 DTN_INFO("MPC_CRC_RESULT_GB:%d MPC_CRC_RESULT_C:%d MPC_CRC_RESULT_AR:%d\n",
61 REG_READ(MPC_CRC_RESULT_GB), REG_READ(MPC_CRC_RESULT_C), REG_READ(MPC_CRC_RESULT_AR));
62 if (REG(DPP_TOP0_DPP_CRC_VAL_B_A))
63 DTN_INFO("DPP_TOP0_DPP_CRC_VAL_B_A:%d DPP_TOP0_DPP_CRC_VAL_R_G:%d\n",
64 REG_READ(DPP_TOP0_DPP_CRC_VAL_B_A), REG_READ(DPP_TOP0_DPP_CRC_VAL_R_G));
65 }
66
67 void print_microsec(struct dc_context *dc_ctx, uint32_t ref_cycle)
68 {
69 static const uint32_t ref_clk_mhz = 48;
70 static const unsigned int frac = 10;
71 uint32_t us_x10 = (ref_cycle * frac) / ref_clk_mhz;
72
73 DTN_INFO("%d.%d \t ",
74 us_x10 / frac,
75 us_x10 % frac);
76 }
77
78 #define DTN_INFO_MICRO_SEC(ref_cycle) \
79 print_microsec(dc_ctx, ref_cycle)
80
81 struct dcn_hubbub_wm_set {
82 uint32_t wm_set;
83 uint32_t data_urgent;
84 uint32_t pte_meta_urgent;
85 uint32_t sr_enter;
86 uint32_t sr_exit;
87 uint32_t dram_clk_chanage;
88 };
89
90 struct dcn_hubbub_wm {
91 struct dcn_hubbub_wm_set sets[4];
92 };
93
94 static void dcn10_hubbub_wm_read_state(struct dce_hwseq *hws,
95 struct dcn_hubbub_wm *wm)
96 {
97 struct dcn_hubbub_wm_set *s;
98
99 s = &wm->sets[0];
100 s->wm_set = 0;
101 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_A);
102 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_A);
103 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A);
104 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_A);
105 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_A);
106
107 s = &wm->sets[1];
108 s->wm_set = 1;
109 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_B);
110 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_B);
111 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B);
112 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_B);
113 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_B);
114
115 s = &wm->sets[2];
116 s->wm_set = 2;
117 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_C);
118 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_C);
119 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C);
120 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_C);
121 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_C);
122
123 s = &wm->sets[3];
124 s->wm_set = 3;
125 s->data_urgent = REG_READ(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_D);
126 s->pte_meta_urgent = REG_READ(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_D);
127 s->sr_enter = REG_READ(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D);
128 s->sr_exit = REG_READ(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_D);
129 s->dram_clk_chanage = REG_READ(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_D);
130 }
131
132 static void dcn10_log_hubbub_state(struct dc *dc)
133 {
134 struct dc_context *dc_ctx = dc->ctx;
135 struct dcn_hubbub_wm wm;
136 int i;
137
138 dcn10_hubbub_wm_read_state(dc->hwseq, &wm);
139
140 DTN_INFO("HUBBUB WM: \t data_urgent \t pte_meta_urgent \t "
141 "sr_enter \t sr_exit \t dram_clk_change \n");
142
143 for (i = 0; i < 4; i++) {
144 struct dcn_hubbub_wm_set *s;
145
146 s = &wm.sets[i];
147 DTN_INFO("WM_Set[%d]:\t ", s->wm_set);
148 DTN_INFO_MICRO_SEC(s->data_urgent);
149 DTN_INFO_MICRO_SEC(s->pte_meta_urgent);
150 DTN_INFO_MICRO_SEC(s->sr_enter);
151 DTN_INFO_MICRO_SEC(s->sr_exit);
152 DTN_INFO_MICRO_SEC(s->dram_clk_chanage);
153 DTN_INFO("\n");
154 }
155
156 DTN_INFO("\n");
157 }
158
159 static void dcn10_log_hw_state(struct dc *dc)
160 {
161 struct dc_context *dc_ctx = dc->ctx;
162 struct resource_pool *pool = dc->res_pool;
163 int i;
164
165 DTN_INFO_BEGIN();
166
167 dcn10_log_hubbub_state(dc);
168
169 DTN_INFO("HUBP:\t format \t addr_hi \t width \t height \t "
170 "rotation \t mirror \t sw_mode \t "
171 "dcc_en \t blank_en \t ttu_dis \t underflow \t "
172 "min_ttu_vblank \t qos_low_wm \t qos_high_wm \n");
173
174 for (i = 0; i < pool->pipe_count; i++) {
175 struct hubp *hubp = pool->hubps[i];
176 struct dcn_hubp_state s;
177
178 hubp1_read_state(TO_DCN10_HUBP(hubp), &s);
179
180 DTN_INFO("[%d]:\t %xh \t %xh \t %d \t %d \t "
181 "%xh \t %xh \t %xh \t "
182 "%d \t %d \t %d \t %xh \t",
183 i,
184 s.pixel_format,
185 s.inuse_addr_hi,
186 s.viewport_width,
187 s.viewport_height,
188 s.rotation_angle,
189 s.h_mirror_en,
190 s.sw_mode,
191 s.dcc_en,
192 s.blank_en,
193 s.ttu_disable,
194 s.underflow_status);
195 DTN_INFO_MICRO_SEC(s.min_ttu_vblank);
196 DTN_INFO_MICRO_SEC(s.qos_level_low_wm);
197 DTN_INFO_MICRO_SEC(s.qos_level_high_wm);
198 DTN_INFO("\n");
199 }
200 DTN_INFO("\n");
201
202 DTN_INFO("OTG:\t v_bs \t v_be \t v_ss \t v_se \t vpol \t vmax \t vmin \t "
203 "h_bs \t h_be \t h_ss \t h_se \t hpol \t htot \t vtot \t underflow\n");
204
205 for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
206 struct timing_generator *tg = pool->timing_generators[i];
207 struct dcn_otg_state s = {0};
208
209 tgn10_read_otg_state(DCN10TG_FROM_TG(tg), &s);
210
211 //only print if OTG master is enabled
212 if ((s.otg_enabled & 1) == 0)
213 continue;
214
215 DTN_INFO("[%d]:\t %d \t %d \t %d \t %d \t "
216 "%d \t %d \t %d \t %d \t %d \t %d \t "
217 "%d \t %d \t %d \t %d \t %d \t ",
218 i,
219 s.v_blank_start,
220 s.v_blank_end,
221 s.v_sync_a_start,
222 s.v_sync_a_end,
223 s.v_sync_a_pol,
224 s.v_total_max,
225 s.v_total_min,
226 s.h_blank_start,
227 s.h_blank_end,
228 s.h_sync_a_start,
229 s.h_sync_a_end,
230 s.h_sync_a_pol,
231 s.h_total,
232 s.v_total,
233 s.underflow_occurred_status);
234 DTN_INFO("\n");
235 }
236 DTN_INFO("\n");
237
238 log_mpc_crc(dc);
239
240 DTN_INFO_END();
241 }
242
243 static void verify_allow_pstate_change_high(
244 struct dce_hwseq *hws)
245 {
246 /* pstate latency is ~20us so if we wait over 40us and pstate allow
247 * still not asserted, we are probably stuck and going to hang
248 *
249 * TODO: Figure out why it takes ~100us on linux
250 * pstate takes around ~100us on linux. Unknown currently as to
251 * why it takes that long on linux
252 */
253 static unsigned int pstate_wait_timeout_us = 200;
254 static unsigned int pstate_wait_expected_timeout_us = 40;
255 static unsigned int max_sampled_pstate_wait_us; /* data collection */
256 static bool forced_pstate_allow; /* help with revert wa */
257 static bool should_log_hw_state; /* prevent hw state log by default */
258
259 unsigned int debug_index = 0x7;
260 unsigned int debug_data;
261 unsigned int i;
262
263 if (forced_pstate_allow) {
264 /* we hacked to force pstate allow to prevent hang last time
265 * we verify_allow_pstate_change_high. so disable force
266 * here so we can check status
267 */
268 REG_UPDATE_2(DCHUBBUB_ARB_DRAM_STATE_CNTL,
269 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_VALUE, 0,
270 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_ENABLE, 0);
271 forced_pstate_allow = false;
272 }
273
274 /* description "3-0: Pipe0 cursor0 QOS
275 * 7-4: Pipe1 cursor0 QOS
276 * 11-8: Pipe2 cursor0 QOS
277 * 15-12: Pipe3 cursor0 QOS
278 * 16: Pipe0 Plane0 Allow Pstate Change
279 * 17: Pipe1 Plane0 Allow Pstate Change
280 * 18: Pipe2 Plane0 Allow Pstate Change
281 * 19: Pipe3 Plane0 Allow Pstate Change
282 * 20: Pipe0 Plane1 Allow Pstate Change
283 * 21: Pipe1 Plane1 Allow Pstate Change
284 * 22: Pipe2 Plane1 Allow Pstate Change
285 * 23: Pipe3 Plane1 Allow Pstate Change
286 * 24: Pipe0 cursor0 Allow Pstate Change
287 * 25: Pipe1 cursor0 Allow Pstate Change
288 * 26: Pipe2 cursor0 Allow Pstate Change
289 * 27: Pipe3 cursor0 Allow Pstate Change
290 * 28: WB0 Allow Pstate Change
291 * 29: WB1 Allow Pstate Change
292 * 30: Arbiter's allow_pstate_change
293 * 31: SOC pstate change request
294 */
295
296 REG_WRITE(DCHUBBUB_TEST_DEBUG_INDEX, debug_index);
297
298 for (i = 0; i < pstate_wait_timeout_us; i++) {
299 debug_data = REG_READ(DCHUBBUB_TEST_DEBUG_DATA);
300
301 if (debug_data & (1 << 30)) {
302
303 if (i > pstate_wait_expected_timeout_us)
304 dm_logger_write(hws->ctx->logger, LOG_WARNING,
305 "pstate took longer than expected ~%dus\n",
306 i);
307
308 return;
309 }
310 if (max_sampled_pstate_wait_us < i)
311 max_sampled_pstate_wait_us = i;
312
313 udelay(1);
314 }
315
316 /* force pstate allow to prevent system hang
317 * and break to debugger to investigate
318 */
319 REG_UPDATE_2(DCHUBBUB_ARB_DRAM_STATE_CNTL,
320 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_VALUE, 1,
321 DCHUBBUB_ARB_ALLOW_PSTATE_CHANGE_FORCE_ENABLE, 1);
322 forced_pstate_allow = true;
323
324 if (should_log_hw_state) {
325 dcn10_log_hw_state(hws->ctx->dc);
326 }
327
328 dm_logger_write(hws->ctx->logger, LOG_WARNING,
329 "pstate TEST_DEBUG_DATA: 0x%X\n",
330 debug_data);
331 BREAK_TO_DEBUGGER();
332 }
333
334 static void enable_dppclk(
335 struct dce_hwseq *hws,
336 uint8_t plane_id,
337 uint32_t requested_pix_clk,
338 bool dppclk_div)
339 {
340 dm_logger_write(hws->ctx->logger, LOG_SURFACE,
341 "dppclk_rate_control for pipe %d programed to %d\n",
342 plane_id,
343 dppclk_div);
344
345 if (hws->shifts->DPPCLK_RATE_CONTROL)
346 REG_UPDATE_2(DPP_CONTROL[plane_id],
347 DPPCLK_RATE_CONTROL, dppclk_div,
348 DPP_CLOCK_ENABLE, 1);
349 else
350 REG_UPDATE(DPP_CONTROL[plane_id],
351 DPP_CLOCK_ENABLE, 1);
352 }
353
354 static void enable_power_gating_plane(
355 struct dce_hwseq *hws,
356 bool enable)
357 {
358 bool force_on = 1; /* disable power gating */
359
360 if (enable)
361 force_on = 0;
362
363 /* DCHUBP0/1/2/3 */
364 REG_UPDATE(DOMAIN0_PG_CONFIG, DOMAIN0_POWER_FORCEON, force_on);
365 REG_UPDATE(DOMAIN2_PG_CONFIG, DOMAIN2_POWER_FORCEON, force_on);
366 REG_UPDATE(DOMAIN4_PG_CONFIG, DOMAIN4_POWER_FORCEON, force_on);
367 REG_UPDATE(DOMAIN6_PG_CONFIG, DOMAIN6_POWER_FORCEON, force_on);
368
369 /* DPP0/1/2/3 */
370 REG_UPDATE(DOMAIN1_PG_CONFIG, DOMAIN1_POWER_FORCEON, force_on);
371 REG_UPDATE(DOMAIN3_PG_CONFIG, DOMAIN3_POWER_FORCEON, force_on);
372 REG_UPDATE(DOMAIN5_PG_CONFIG, DOMAIN5_POWER_FORCEON, force_on);
373 REG_UPDATE(DOMAIN7_PG_CONFIG, DOMAIN7_POWER_FORCEON, force_on);
374 }
375
376 static void disable_vga(
377 struct dce_hwseq *hws)
378 {
379 REG_WRITE(D1VGA_CONTROL, 0);
380 REG_WRITE(D2VGA_CONTROL, 0);
381 REG_WRITE(D3VGA_CONTROL, 0);
382 REG_WRITE(D4VGA_CONTROL, 0);
383
384 /* HW Engineer's Notes:
385 * During switch from vga->extended, if we set the VGA_TEST_ENABLE and
386 * then hit the VGA_TEST_RENDER_START, then the DCHUBP timing gets updated correctly.
387 *
388 * Then vBIOS will have it poll for the VGA_TEST_RENDER_DONE and unset
389 * VGA_TEST_ENABLE, to leave it in the same state as before.
390 */
391 REG_UPDATE(VGA_TEST_CONTROL, VGA_TEST_ENABLE, 1);
392 REG_UPDATE(VGA_TEST_CONTROL, VGA_TEST_RENDER_START, 1);
393 }
394
395 static void dpp_pg_control(
396 struct dce_hwseq *hws,
397 unsigned int dpp_inst,
398 bool power_on)
399 {
400 uint32_t power_gate = power_on ? 0 : 1;
401 uint32_t pwr_status = power_on ? 0 : 2;
402
403 if (hws->ctx->dc->debug.disable_dpp_power_gate)
404 return;
405
406 switch (dpp_inst) {
407 case 0: /* DPP0 */
408 REG_UPDATE(DOMAIN1_PG_CONFIG,
409 DOMAIN1_POWER_GATE, power_gate);
410
411 REG_WAIT(DOMAIN1_PG_STATUS,
412 DOMAIN1_PGFSM_PWR_STATUS, pwr_status,
413 1, 1000);
414 break;
415 case 1: /* DPP1 */
416 REG_UPDATE(DOMAIN3_PG_CONFIG,
417 DOMAIN3_POWER_GATE, power_gate);
418
419 REG_WAIT(DOMAIN3_PG_STATUS,
420 DOMAIN3_PGFSM_PWR_STATUS, pwr_status,
421 1, 1000);
422 break;
423 case 2: /* DPP2 */
424 REG_UPDATE(DOMAIN5_PG_CONFIG,
425 DOMAIN5_POWER_GATE, power_gate);
426
427 REG_WAIT(DOMAIN5_PG_STATUS,
428 DOMAIN5_PGFSM_PWR_STATUS, pwr_status,
429 1, 1000);
430 break;
431 case 3: /* DPP3 */
432 REG_UPDATE(DOMAIN7_PG_CONFIG,
433 DOMAIN7_POWER_GATE, power_gate);
434
435 REG_WAIT(DOMAIN7_PG_STATUS,
436 DOMAIN7_PGFSM_PWR_STATUS, pwr_status,
437 1, 1000);
438 break;
439 default:
440 BREAK_TO_DEBUGGER();
441 break;
442 }
443 }
444
445 static uint32_t convert_and_clamp(
446 uint32_t wm_ns,
447 uint32_t refclk_mhz,
448 uint32_t clamp_value)
449 {
450 uint32_t ret_val = 0;
451 ret_val = wm_ns * refclk_mhz;
452 ret_val /= 1000;
453
454 if (ret_val > clamp_value)
455 ret_val = clamp_value;
456
457 return ret_val;
458 }
459
460 static void program_watermarks(
461 struct dce_hwseq *hws,
462 struct dcn_watermark_set *watermarks,
463 unsigned int refclk_mhz)
464 {
465 uint32_t force_en = hws->ctx->dc->debug.disable_stutter ? 1 : 0;
466 /*
467 * Need to clamp to max of the register values (i.e. no wrap)
468 * for dcn1, all wm registers are 21-bit wide
469 */
470 uint32_t prog_wm_value;
471
472 REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
473 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, 0);
474
475 /* Repeat for water mark set A, B, C and D. */
476 /* clock state A */
477 prog_wm_value = convert_and_clamp(watermarks->a.urgent_ns,
478 refclk_mhz, 0x1fffff);
479 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_A, prog_wm_value);
480
481 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
482 "URGENCY_WATERMARK_A calculated =%d\n"
483 "HW register value = 0x%x\n",
484 watermarks->a.urgent_ns, prog_wm_value);
485
486 prog_wm_value = convert_and_clamp(watermarks->a.pte_meta_urgent_ns,
487 refclk_mhz, 0x1fffff);
488 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_A, prog_wm_value);
489 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
490 "PTE_META_URGENCY_WATERMARK_A calculated =%d\n"
491 "HW register value = 0x%x\n",
492 watermarks->a.pte_meta_urgent_ns, prog_wm_value);
493
494 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A)) {
495 prog_wm_value = convert_and_clamp(
496 watermarks->a.cstate_pstate.cstate_enter_plus_exit_ns,
497 refclk_mhz, 0x1fffff);
498 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_A, prog_wm_value);
499 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
500 "SR_ENTER_EXIT_WATERMARK_A calculated =%d\n"
501 "HW register value = 0x%x\n",
502 watermarks->a.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
503
504
505 prog_wm_value = convert_and_clamp(
506 watermarks->a.cstate_pstate.cstate_exit_ns,
507 refclk_mhz, 0x1fffff);
508 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_A, prog_wm_value);
509 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
510 "SR_EXIT_WATERMARK_A calculated =%d\n"
511 "HW register value = 0x%x\n",
512 watermarks->a.cstate_pstate.cstate_exit_ns, prog_wm_value);
513 }
514
515 prog_wm_value = convert_and_clamp(
516 watermarks->a.cstate_pstate.pstate_change_ns,
517 refclk_mhz, 0x1fffff);
518 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_A, prog_wm_value);
519 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
520 "DRAM_CLK_CHANGE_WATERMARK_A calculated =%d\n"
521 "HW register value = 0x%x\n\n",
522 watermarks->a.cstate_pstate.pstate_change_ns, prog_wm_value);
523
524
525 /* clock state B */
526 prog_wm_value = convert_and_clamp(
527 watermarks->b.urgent_ns, refclk_mhz, 0x1fffff);
528 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_B, prog_wm_value);
529 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
530 "URGENCY_WATERMARK_B calculated =%d\n"
531 "HW register value = 0x%x\n",
532 watermarks->b.urgent_ns, prog_wm_value);
533
534
535 prog_wm_value = convert_and_clamp(
536 watermarks->b.pte_meta_urgent_ns,
537 refclk_mhz, 0x1fffff);
538 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_B, prog_wm_value);
539 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
540 "PTE_META_URGENCY_WATERMARK_B calculated =%d\n"
541 "HW register value = 0x%x\n",
542 watermarks->b.pte_meta_urgent_ns, prog_wm_value);
543
544
545 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B)) {
546 prog_wm_value = convert_and_clamp(
547 watermarks->b.cstate_pstate.cstate_enter_plus_exit_ns,
548 refclk_mhz, 0x1fffff);
549 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_B, prog_wm_value);
550 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
551 "SR_ENTER_WATERMARK_B calculated =%d\n"
552 "HW register value = 0x%x\n",
553 watermarks->b.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
554
555
556 prog_wm_value = convert_and_clamp(
557 watermarks->b.cstate_pstate.cstate_exit_ns,
558 refclk_mhz, 0x1fffff);
559 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_B, prog_wm_value);
560 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
561 "SR_EXIT_WATERMARK_B calculated =%d\n"
562 "HW register value = 0x%x\n",
563 watermarks->b.cstate_pstate.cstate_exit_ns, prog_wm_value);
564 }
565
566 prog_wm_value = convert_and_clamp(
567 watermarks->b.cstate_pstate.pstate_change_ns,
568 refclk_mhz, 0x1fffff);
569 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_B, prog_wm_value);
570 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
571 "DRAM_CLK_CHANGE_WATERMARK_B calculated =%d\n\n"
572 "HW register value = 0x%x\n",
573 watermarks->b.cstate_pstate.pstate_change_ns, prog_wm_value);
574
575 /* clock state C */
576 prog_wm_value = convert_and_clamp(
577 watermarks->c.urgent_ns, refclk_mhz, 0x1fffff);
578 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_C, prog_wm_value);
579 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
580 "URGENCY_WATERMARK_C calculated =%d\n"
581 "HW register value = 0x%x\n",
582 watermarks->c.urgent_ns, prog_wm_value);
583
584
585 prog_wm_value = convert_and_clamp(
586 watermarks->c.pte_meta_urgent_ns,
587 refclk_mhz, 0x1fffff);
588 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_C, prog_wm_value);
589 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
590 "PTE_META_URGENCY_WATERMARK_C calculated =%d\n"
591 "HW register value = 0x%x\n",
592 watermarks->c.pte_meta_urgent_ns, prog_wm_value);
593
594
595 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C)) {
596 prog_wm_value = convert_and_clamp(
597 watermarks->c.cstate_pstate.cstate_enter_plus_exit_ns,
598 refclk_mhz, 0x1fffff);
599 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_C, prog_wm_value);
600 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
601 "SR_ENTER_WATERMARK_C calculated =%d\n"
602 "HW register value = 0x%x\n",
603 watermarks->c.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
604
605
606 prog_wm_value = convert_and_clamp(
607 watermarks->c.cstate_pstate.cstate_exit_ns,
608 refclk_mhz, 0x1fffff);
609 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_C, prog_wm_value);
610 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
611 "SR_EXIT_WATERMARK_C calculated =%d\n"
612 "HW register value = 0x%x\n",
613 watermarks->c.cstate_pstate.cstate_exit_ns, prog_wm_value);
614 }
615
616 prog_wm_value = convert_and_clamp(
617 watermarks->c.cstate_pstate.pstate_change_ns,
618 refclk_mhz, 0x1fffff);
619 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_C, prog_wm_value);
620 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
621 "DRAM_CLK_CHANGE_WATERMARK_C calculated =%d\n\n"
622 "HW register value = 0x%x\n",
623 watermarks->c.cstate_pstate.pstate_change_ns, prog_wm_value);
624
625 /* clock state D */
626 prog_wm_value = convert_and_clamp(
627 watermarks->d.urgent_ns, refclk_mhz, 0x1fffff);
628 REG_WRITE(DCHUBBUB_ARB_DATA_URGENCY_WATERMARK_D, prog_wm_value);
629 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
630 "URGENCY_WATERMARK_D calculated =%d\n"
631 "HW register value = 0x%x\n",
632 watermarks->d.urgent_ns, prog_wm_value);
633
634 prog_wm_value = convert_and_clamp(
635 watermarks->d.pte_meta_urgent_ns,
636 refclk_mhz, 0x1fffff);
637 REG_WRITE(DCHUBBUB_ARB_PTE_META_URGENCY_WATERMARK_D, prog_wm_value);
638 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
639 "PTE_META_URGENCY_WATERMARK_D calculated =%d\n"
640 "HW register value = 0x%x\n",
641 watermarks->d.pte_meta_urgent_ns, prog_wm_value);
642
643
644 if (REG(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D)) {
645 prog_wm_value = convert_and_clamp(
646 watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns,
647 refclk_mhz, 0x1fffff);
648 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_D, prog_wm_value);
649 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
650 "SR_ENTER_WATERMARK_D calculated =%d\n"
651 "HW register value = 0x%x\n",
652 watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
653
654
655 prog_wm_value = convert_and_clamp(
656 watermarks->d.cstate_pstate.cstate_exit_ns,
657 refclk_mhz, 0x1fffff);
658 REG_WRITE(DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_D, prog_wm_value);
659 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
660 "SR_EXIT_WATERMARK_D calculated =%d\n"
661 "HW register value = 0x%x\n",
662 watermarks->d.cstate_pstate.cstate_exit_ns, prog_wm_value);
663 }
664
665
666 prog_wm_value = convert_and_clamp(
667 watermarks->d.cstate_pstate.pstate_change_ns,
668 refclk_mhz, 0x1fffff);
669 REG_WRITE(DCHUBBUB_ARB_ALLOW_DRAM_CLK_CHANGE_WATERMARK_D, prog_wm_value);
670 dm_logger_write(hws->ctx->logger, LOG_BANDWIDTH_CALCS,
671 "DRAM_CLK_CHANGE_WATERMARK_D calculated =%d\n"
672 "HW register value = 0x%x\n\n",
673 watermarks->d.cstate_pstate.pstate_change_ns, prog_wm_value);
674
675 REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
676 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, 1);
677
678 REG_UPDATE(DCHUBBUB_ARB_SAT_LEVEL,
679 DCHUBBUB_ARB_SAT_LEVEL, 60 * refclk_mhz);
680 REG_UPDATE(DCHUBBUB_ARB_DF_REQ_OUTSTAND,
681 DCHUBBUB_ARB_MIN_REQ_OUTSTAND, 68);
682
683 REG_UPDATE_2(DCHUBBUB_ARB_DRAM_STATE_CNTL,
684 DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_VALUE, 0,
685 DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_ENABLE, force_en);
686
687 #if 0
688 REG_UPDATE_2(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
689 DCHUBBUB_ARB_WATERMARK_CHANGE_DONE_INTERRUPT_DISABLE, 1,
690 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, 1);
691 #endif
692 }
693
694
695 static void dcn10_update_dchub(
696 struct dce_hwseq *hws,
697 struct dchub_init_data *dh_data)
698 {
699 /* TODO: port code from dal2 */
700 switch (dh_data->fb_mode) {
701 case FRAME_BUFFER_MODE_ZFB_ONLY:
702 /*For ZFB case need to put DCHUB FB BASE and TOP upside down to indicate ZFB mode*/
703 REG_UPDATE(DCHUBBUB_SDPIF_FB_TOP,
704 SDPIF_FB_TOP, 0);
705
706 REG_UPDATE(DCHUBBUB_SDPIF_FB_BASE,
707 SDPIF_FB_BASE, 0x0FFFF);
708
709 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BASE,
710 SDPIF_AGP_BASE, dh_data->zfb_phys_addr_base >> 22);
711
712 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BOT,
713 SDPIF_AGP_BOT, dh_data->zfb_mc_base_addr >> 22);
714
715 REG_UPDATE(DCHUBBUB_SDPIF_AGP_TOP,
716 SDPIF_AGP_TOP, (dh_data->zfb_mc_base_addr +
717 dh_data->zfb_size_in_byte - 1) >> 22);
718 break;
719 case FRAME_BUFFER_MODE_MIXED_ZFB_AND_LOCAL:
720 /*Should not touch FB LOCATION (done by VBIOS on AsicInit table)*/
721
722 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BASE,
723 SDPIF_AGP_BASE, dh_data->zfb_phys_addr_base >> 22);
724
725 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BOT,
726 SDPIF_AGP_BOT, dh_data->zfb_mc_base_addr >> 22);
727
728 REG_UPDATE(DCHUBBUB_SDPIF_AGP_TOP,
729 SDPIF_AGP_TOP, (dh_data->zfb_mc_base_addr +
730 dh_data->zfb_size_in_byte - 1) >> 22);
731 break;
732 case FRAME_BUFFER_MODE_LOCAL_ONLY:
733 /*Should not touch FB LOCATION (done by VBIOS on AsicInit table)*/
734 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BASE,
735 SDPIF_AGP_BASE, 0);
736
737 REG_UPDATE(DCHUBBUB_SDPIF_AGP_BOT,
738 SDPIF_AGP_BOT, 0X03FFFF);
739
740 REG_UPDATE(DCHUBBUB_SDPIF_AGP_TOP,
741 SDPIF_AGP_TOP, 0);
742 break;
743 default:
744 break;
745 }
746
747 dh_data->dchub_initialzied = true;
748 dh_data->dchub_info_valid = false;
749 }
750
751 static void hubp_pg_control(
752 struct dce_hwseq *hws,
753 unsigned int hubp_inst,
754 bool power_on)
755 {
756 uint32_t power_gate = power_on ? 0 : 1;
757 uint32_t pwr_status = power_on ? 0 : 2;
758
759 if (hws->ctx->dc->debug.disable_hubp_power_gate)
760 return;
761
762 switch (hubp_inst) {
763 case 0: /* DCHUBP0 */
764 REG_UPDATE(DOMAIN0_PG_CONFIG,
765 DOMAIN0_POWER_GATE, power_gate);
766
767 REG_WAIT(DOMAIN0_PG_STATUS,
768 DOMAIN0_PGFSM_PWR_STATUS, pwr_status,
769 1, 1000);
770 break;
771 case 1: /* DCHUBP1 */
772 REG_UPDATE(DOMAIN2_PG_CONFIG,
773 DOMAIN2_POWER_GATE, power_gate);
774
775 REG_WAIT(DOMAIN2_PG_STATUS,
776 DOMAIN2_PGFSM_PWR_STATUS, pwr_status,
777 1, 1000);
778 break;
779 case 2: /* DCHUBP2 */
780 REG_UPDATE(DOMAIN4_PG_CONFIG,
781 DOMAIN4_POWER_GATE, power_gate);
782
783 REG_WAIT(DOMAIN4_PG_STATUS,
784 DOMAIN4_PGFSM_PWR_STATUS, pwr_status,
785 1, 1000);
786 break;
787 case 3: /* DCHUBP3 */
788 REG_UPDATE(DOMAIN6_PG_CONFIG,
789 DOMAIN6_POWER_GATE, power_gate);
790
791 REG_WAIT(DOMAIN6_PG_STATUS,
792 DOMAIN6_PGFSM_PWR_STATUS, pwr_status,
793 1, 1000);
794 break;
795 default:
796 BREAK_TO_DEBUGGER();
797 break;
798 }
799 }
800
801 static void power_on_plane(
802 struct dce_hwseq *hws,
803 int plane_id)
804 {
805 if (REG(DC_IP_REQUEST_CNTL)) {
806 REG_SET(DC_IP_REQUEST_CNTL, 0,
807 IP_REQUEST_EN, 1);
808 dpp_pg_control(hws, plane_id, true);
809 hubp_pg_control(hws, plane_id, true);
810 REG_SET(DC_IP_REQUEST_CNTL, 0,
811 IP_REQUEST_EN, 0);
812 dm_logger_write(hws->ctx->logger, LOG_DEBUG,
813 "Un-gated front end for pipe %d\n", plane_id);
814 }
815 }
816
817 static void undo_DEGVIDCN10_253_wa(struct dc *dc)
818 {
819 struct dce_hwseq *hws = dc->hwseq;
820 struct hubp *hubp = dc->res_pool->hubps[0];
821 int pwr_status = 0;
822
823 REG_GET(DOMAIN0_PG_STATUS, DOMAIN0_PGFSM_PWR_STATUS, &pwr_status);
824 /* Don't need to blank if hubp is power gated*/
825 if (pwr_status == 2)
826 return;
827
828 hubp->funcs->set_blank(hubp, true);
829
830 REG_SET(DC_IP_REQUEST_CNTL, 0,
831 IP_REQUEST_EN, 1);
832
833 hubp_pg_control(hws, 0, false);
834 REG_SET(DC_IP_REQUEST_CNTL, 0,
835 IP_REQUEST_EN, 0);
836 }
837
838 static void apply_DEGVIDCN10_253_wa(struct dc *dc)
839 {
840 struct dce_hwseq *hws = dc->hwseq;
841 struct hubp *hubp = dc->res_pool->hubps[0];
842
843 if (dc->debug.disable_stutter)
844 return;
845
846 REG_SET(DC_IP_REQUEST_CNTL, 0,
847 IP_REQUEST_EN, 1);
848
849 hubp_pg_control(hws, 0, true);
850 REG_SET(DC_IP_REQUEST_CNTL, 0,
851 IP_REQUEST_EN, 0);
852
853 hubp->funcs->set_hubp_blank_en(hubp, false);
854 }
855
856 static void bios_golden_init(struct dc *dc)
857 {
858 struct dc_bios *bp = dc->ctx->dc_bios;
859 int i;
860
861 /* initialize dcn global */
862 bp->funcs->enable_disp_power_gating(bp,
863 CONTROLLER_ID_D0, ASIC_PIPE_INIT);
864
865 for (i = 0; i < dc->res_pool->pipe_count; i++) {
866 /* initialize dcn per pipe */
867 bp->funcs->enable_disp_power_gating(bp,
868 CONTROLLER_ID_D0 + i, ASIC_PIPE_DISABLE);
869 }
870 }
871
872 static void dcn10_init_hw(struct dc *dc)
873 {
874 int i;
875 struct abm *abm = dc->res_pool->abm;
876 struct dce_hwseq *hws = dc->hwseq;
877
878 if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
879 REG_WRITE(REFCLK_CNTL, 0);
880 REG_UPDATE(DCHUBBUB_GLOBAL_TIMER_CNTL, DCHUBBUB_GLOBAL_TIMER_ENABLE, 1);
881 REG_WRITE(DIO_MEM_PWR_CTRL, 0);
882
883 if (!dc->debug.disable_clock_gate) {
884 /* enable all DCN clock gating */
885 REG_WRITE(DCCG_GATE_DISABLE_CNTL, 0);
886
887 REG_WRITE(DCCG_GATE_DISABLE_CNTL2, 0);
888
889 REG_UPDATE(DCFCLK_CNTL, DCFCLK_GATE_DIS, 0);
890 }
891
892 enable_power_gating_plane(dc->hwseq, true);
893 return;
894 }
895 /* end of FPGA. Below if real ASIC */
896
897 bios_golden_init(dc);
898
899 disable_vga(dc->hwseq);
900
901 for (i = 0; i < dc->link_count; i++) {
902 /* Power up AND update implementation according to the
903 * required signal (which may be different from the
904 * default signal on connector).
905 */
906 struct dc_link *link = dc->links[i];
907
908 link->link_enc->funcs->hw_init(link->link_enc);
909 }
910
911 for (i = 0; i < dc->res_pool->pipe_count; i++) {
912 struct dpp *dpp = dc->res_pool->dpps[i];
913 struct timing_generator *tg = dc->res_pool->timing_generators[i];
914
915 dpp->funcs->dpp_reset(dpp);
916 dc->res_pool->mpc->funcs->remove(
917 dc->res_pool->mpc, &(dc->res_pool->opps[i]->mpc_tree),
918 dc->res_pool->opps[i]->inst, i);
919
920 /* Blank controller using driver code instead of
921 * command table.
922 */
923 tg->funcs->set_blank(tg, true);
924 hwss_wait_for_blank_complete(tg);
925 }
926
927 for (i = 0; i < dc->res_pool->audio_count; i++) {
928 struct audio *audio = dc->res_pool->audios[i];
929
930 audio->funcs->hw_init(audio);
931 }
932
933 if (abm != NULL) {
934 abm->funcs->init_backlight(abm);
935 abm->funcs->abm_init(abm);
936 }
937
938 /* power AFMT HDMI memory TODO: may move to dis/en output save power*/
939 REG_WRITE(DIO_MEM_PWR_CTRL, 0);
940
941 if (!dc->debug.disable_clock_gate) {
942 /* enable all DCN clock gating */
943 REG_WRITE(DCCG_GATE_DISABLE_CNTL, 0);
944
945 REG_WRITE(DCCG_GATE_DISABLE_CNTL2, 0);
946
947 REG_UPDATE(DCFCLK_CNTL, DCFCLK_GATE_DIS, 0);
948 }
949
950 enable_power_gating_plane(dc->hwseq, true);
951 }
952
953 static enum dc_status dcn10_prog_pixclk_crtc_otg(
954 struct pipe_ctx *pipe_ctx,
955 struct dc_state *context,
956 struct dc *dc)
957 {
958 struct dc_stream_state *stream = pipe_ctx->stream;
959 enum dc_color_space color_space;
960 struct tg_color black_color = {0};
961 bool enableStereo = stream->timing.timing_3d_format == TIMING_3D_FORMAT_NONE ?
962 false:true;
963 bool rightEyePolarity = stream->timing.flags.RIGHT_EYE_3D_POLARITY;
964
965
966 /* by upper caller loop, pipe0 is parent pipe and be called first.
967 * back end is set up by for pipe0. Other children pipe share back end
968 * with pipe 0. No program is needed.
969 */
970 if (pipe_ctx->top_pipe != NULL)
971 return DC_OK;
972
973 /* TODO check if timing_changed, disable stream if timing changed */
974
975 /* HW program guide assume display already disable
976 * by unplug sequence. OTG assume stop.
977 */
978 pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, true);
979
980 if (false == pipe_ctx->clock_source->funcs->program_pix_clk(
981 pipe_ctx->clock_source,
982 &pipe_ctx->stream_res.pix_clk_params,
983 &pipe_ctx->pll_settings)) {
984 BREAK_TO_DEBUGGER();
985 return DC_ERROR_UNEXPECTED;
986 }
987 pipe_ctx->stream_res.tg->dlg_otg_param.vready_offset = pipe_ctx->pipe_dlg_param.vready_offset;
988 pipe_ctx->stream_res.tg->dlg_otg_param.vstartup_start = pipe_ctx->pipe_dlg_param.vstartup_start;
989 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_offset = pipe_ctx->pipe_dlg_param.vupdate_offset;
990 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_width = pipe_ctx->pipe_dlg_param.vupdate_width;
991
992 pipe_ctx->stream_res.tg->dlg_otg_param.signal = pipe_ctx->stream->signal;
993
994 pipe_ctx->stream_res.tg->funcs->program_timing(
995 pipe_ctx->stream_res.tg,
996 &stream->timing,
997 true);
998
999 pipe_ctx->stream_res.opp->funcs->opp_set_stereo_polarity(
1000 pipe_ctx->stream_res.opp,
1001 enableStereo,
1002 rightEyePolarity);
1003
1004 #if 0 /* move to after enable_crtc */
1005 /* TODO: OPP FMT, ABM. etc. should be done here. */
1006 /* or FPGA now. instance 0 only. TODO: move to opp.c */
1007
1008 inst_offset = reg_offsets[pipe_ctx->stream_res.tg->inst].fmt;
1009
1010 pipe_ctx->stream_res.opp->funcs->opp_program_fmt(
1011 pipe_ctx->stream_res.opp,
1012 &stream->bit_depth_params,
1013 &stream->clamping);
1014 #endif
1015 /* program otg blank color */
1016 color_space = stream->output_color_space;
1017 color_space_to_black_color(dc, color_space, &black_color);
1018 pipe_ctx->stream_res.tg->funcs->set_blank_color(
1019 pipe_ctx->stream_res.tg,
1020 &black_color);
1021
1022 pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, true);
1023 hwss_wait_for_blank_complete(pipe_ctx->stream_res.tg);
1024
1025 /* VTG is within DCHUB command block. DCFCLK is always on */
1026 if (false == pipe_ctx->stream_res.tg->funcs->enable_crtc(pipe_ctx->stream_res.tg)) {
1027 BREAK_TO_DEBUGGER();
1028 return DC_ERROR_UNEXPECTED;
1029 }
1030
1031 /* TODO program crtc source select for non-virtual signal*/
1032 /* TODO program FMT */
1033 /* TODO setup link_enc */
1034 /* TODO set stream attributes */
1035 /* TODO program audio */
1036 /* TODO enable stream if timing changed */
1037 /* TODO unblank stream if DP */
1038
1039 return DC_OK;
1040 }
1041
1042 static void reset_back_end_for_pipe(
1043 struct dc *dc,
1044 struct pipe_ctx *pipe_ctx,
1045 struct dc_state *context)
1046 {
1047 int i;
1048
1049 if (pipe_ctx->stream_res.stream_enc == NULL) {
1050 pipe_ctx->stream = NULL;
1051 return;
1052 }
1053
1054 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
1055 /* DPMS may already disable */
1056 if (!pipe_ctx->stream->dpms_off)
1057 core_link_disable_stream(pipe_ctx, FREE_ACQUIRED_RESOURCE);
1058 }
1059
1060 /* by upper caller loop, parent pipe: pipe0, will be reset last.
1061 * back end share by all pipes and will be disable only when disable
1062 * parent pipe.
1063 */
1064 if (pipe_ctx->top_pipe == NULL) {
1065 pipe_ctx->stream_res.tg->funcs->disable_crtc(pipe_ctx->stream_res.tg);
1066
1067 pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, false);
1068 }
1069
1070 for (i = 0; i < dc->res_pool->pipe_count; i++)
1071 if (&dc->current_state->res_ctx.pipe_ctx[i] == pipe_ctx)
1072 break;
1073
1074 if (i == dc->res_pool->pipe_count)
1075 return;
1076
1077 pipe_ctx->stream = NULL;
1078 dm_logger_write(dc->ctx->logger, LOG_DEBUG,
1079 "Reset back end for pipe %d, tg:%d\n",
1080 pipe_ctx->pipe_idx, pipe_ctx->stream_res.tg->inst);
1081 }
1082
1083 /* trigger HW to start disconnect plane from stream on the next vsync */
1084 static void plane_atomic_disconnect(struct dc *dc,
1085 int fe_idx)
1086 {
1087 struct hubp *hubp = dc->res_pool->hubps[fe_idx];
1088 struct mpc *mpc = dc->res_pool->mpc;
1089 int opp_id, z_idx;
1090 int mpcc_id = -1;
1091
1092 /* look at tree rather than mi here to know if we already reset */
1093 for (opp_id = 0; opp_id < dc->res_pool->pipe_count; opp_id++) {
1094 struct output_pixel_processor *opp = dc->res_pool->opps[opp_id];
1095
1096 for (z_idx = 0; z_idx < opp->mpc_tree.num_pipes; z_idx++) {
1097 if (opp->mpc_tree.dpp[z_idx] == fe_idx) {
1098 mpcc_id = opp->mpc_tree.mpcc[z_idx];
1099 break;
1100 }
1101 }
1102 if (mpcc_id != -1)
1103 break;
1104 }
1105 /*Already reset*/
1106 if (opp_id == dc->res_pool->pipe_count)
1107 return;
1108
1109 if (dc->debug.sanity_checks)
1110 verify_allow_pstate_change_high(dc->hwseq);
1111 hubp->funcs->dcc_control(hubp, false, false);
1112 if (dc->debug.sanity_checks)
1113 verify_allow_pstate_change_high(dc->hwseq);
1114
1115 mpc->funcs->remove(mpc, &(dc->res_pool->opps[opp_id]->mpc_tree),
1116 dc->res_pool->opps[opp_id]->inst, fe_idx);
1117 }
1118
1119 /* disable HW used by plane.
1120 * note: cannot disable until disconnect is complete */
1121 static void plane_atomic_disable(struct dc *dc,
1122 int fe_idx)
1123 {
1124 struct dce_hwseq *hws = dc->hwseq;
1125 struct hubp *hubp = dc->res_pool->hubps[fe_idx];
1126 struct mpc *mpc = dc->res_pool->mpc;
1127 int opp_id = hubp->opp_id;
1128
1129 if (opp_id == 0xf)
1130 return;
1131
1132 mpc->funcs->wait_for_idle(mpc, hubp->mpcc_id);
1133 dc->res_pool->opps[hubp->opp_id]->mpcc_disconnect_pending[hubp->mpcc_id] = false;
1134 /*dm_logger_write(dc->ctx->logger, LOG_ERROR,
1135 "[debug_mpo: atomic disable finished on mpcc %d]\n",
1136 fe_idx);*/
1137
1138 hubp->funcs->set_blank(hubp, true);
1139
1140 if (dc->debug.sanity_checks)
1141 verify_allow_pstate_change_high(dc->hwseq);
1142
1143 REG_UPDATE(HUBP_CLK_CNTL[fe_idx],
1144 HUBP_CLOCK_ENABLE, 0);
1145 REG_UPDATE(DPP_CONTROL[fe_idx],
1146 DPP_CLOCK_ENABLE, 0);
1147
1148 if (dc->res_pool->opps[opp_id]->mpc_tree.num_pipes == 0)
1149 REG_UPDATE(OPP_PIPE_CONTROL[opp_id],
1150 OPP_PIPE_CLOCK_EN, 0);
1151
1152 if (dc->debug.sanity_checks)
1153 verify_allow_pstate_change_high(dc->hwseq);
1154 }
1155
1156 /*
1157 * kill power to plane hw
1158 * note: cannot power down until plane is disable
1159 */
1160 static void plane_atomic_power_down(struct dc *dc, int fe_idx)
1161 {
1162 struct dce_hwseq *hws = dc->hwseq;
1163 struct dpp *dpp = dc->res_pool->dpps[fe_idx];
1164
1165 if (REG(DC_IP_REQUEST_CNTL)) {
1166 REG_SET(DC_IP_REQUEST_CNTL, 0,
1167 IP_REQUEST_EN, 1);
1168 dpp_pg_control(hws, fe_idx, false);
1169 hubp_pg_control(hws, fe_idx, false);
1170 dpp->funcs->dpp_reset(dpp);
1171 REG_SET(DC_IP_REQUEST_CNTL, 0,
1172 IP_REQUEST_EN, 0);
1173 dm_logger_write(dc->ctx->logger, LOG_DEBUG,
1174 "Power gated front end %d\n", fe_idx);
1175
1176 if (dc->debug.sanity_checks)
1177 verify_allow_pstate_change_high(dc->hwseq);
1178 }
1179 }
1180
1181
1182 static void reset_front_end(
1183 struct dc *dc,
1184 int fe_idx)
1185 {
1186 struct dce_hwseq *hws = dc->hwseq;
1187 struct timing_generator *tg;
1188 int opp_id = dc->res_pool->hubps[fe_idx]->opp_id;
1189
1190 /*Already reset*/
1191 if (opp_id == 0xf)
1192 return;
1193
1194 tg = dc->res_pool->timing_generators[opp_id];
1195 tg->funcs->lock(tg);
1196
1197 plane_atomic_disconnect(dc, fe_idx);
1198
1199 REG_UPDATE(OTG_GLOBAL_SYNC_STATUS[tg->inst], VUPDATE_NO_LOCK_EVENT_CLEAR, 1);
1200 tg->funcs->unlock(tg);
1201
1202 if (dc->debug.sanity_checks)
1203 verify_allow_pstate_change_high(hws);
1204
1205 if (tg->ctx->dce_environment != DCE_ENV_FPGA_MAXIMUS)
1206 REG_WAIT(OTG_GLOBAL_SYNC_STATUS[tg->inst],
1207 VUPDATE_NO_LOCK_EVENT_OCCURRED, 1,
1208 1, 100000);
1209
1210 plane_atomic_disable(dc, fe_idx);
1211
1212 dm_logger_write(dc->ctx->logger, LOG_DC,
1213 "Reset front end %d\n",
1214 fe_idx);
1215 }
1216
1217 static void dcn10_power_down_fe(struct dc *dc, int fe_idx)
1218 {
1219 struct dce_hwseq *hws = dc->hwseq;
1220 struct dpp *dpp = dc->res_pool->dpps[fe_idx];
1221
1222 reset_front_end(dc, fe_idx);
1223
1224 REG_SET(DC_IP_REQUEST_CNTL, 0,
1225 IP_REQUEST_EN, 1);
1226 dpp_pg_control(hws, fe_idx, false);
1227 hubp_pg_control(hws, fe_idx, false);
1228 dpp->funcs->dpp_reset(dpp);
1229 REG_SET(DC_IP_REQUEST_CNTL, 0,
1230 IP_REQUEST_EN, 0);
1231 dm_logger_write(dc->ctx->logger, LOG_DEBUG,
1232 "Power gated front end %d\n", fe_idx);
1233
1234 if (dc->debug.sanity_checks)
1235 verify_allow_pstate_change_high(dc->hwseq);
1236 }
1237
1238 static void reset_hw_ctx_wrap(
1239 struct dc *dc,
1240 struct dc_state *context)
1241 {
1242 int i;
1243
1244 /* Reset Front End*/
1245 /* Lock*/
1246 for (i = 0; i < dc->res_pool->pipe_count; i++) {
1247 struct pipe_ctx *cur_pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
1248 struct timing_generator *tg = cur_pipe_ctx->stream_res.tg;
1249
1250 if (cur_pipe_ctx->stream)
1251 tg->funcs->lock(tg);
1252 }
1253 /* Disconnect*/
1254 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1255 struct pipe_ctx *pipe_ctx_old =
1256 &dc->current_state->res_ctx.pipe_ctx[i];
1257 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1258
1259 if (!pipe_ctx->stream ||
1260 !pipe_ctx->plane_state ||
1261 pipe_need_reprogram(pipe_ctx_old, pipe_ctx)) {
1262
1263 plane_atomic_disconnect(dc, i);
1264 }
1265 }
1266 /* Unlock*/
1267 for (i = dc->res_pool->pipe_count - 1; i >= 0; i--) {
1268 struct pipe_ctx *cur_pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
1269 struct timing_generator *tg = cur_pipe_ctx->stream_res.tg;
1270
1271 if (cur_pipe_ctx->stream)
1272 tg->funcs->unlock(tg);
1273 }
1274
1275 /* Disable and Powerdown*/
1276 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1277 struct pipe_ctx *pipe_ctx_old =
1278 &dc->current_state->res_ctx.pipe_ctx[i];
1279 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1280
1281 /*if (!pipe_ctx_old->stream)
1282 continue;*/
1283
1284 if (pipe_ctx->stream && pipe_ctx->plane_state
1285 && !pipe_need_reprogram(pipe_ctx_old, pipe_ctx))
1286 continue;
1287
1288 plane_atomic_disable(dc, i);
1289
1290 if (!pipe_ctx->stream || !pipe_ctx->plane_state)
1291 plane_atomic_power_down(dc, i);
1292 }
1293
1294 /* Reset Back End*/
1295 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
1296 struct pipe_ctx *pipe_ctx_old =
1297 &dc->current_state->res_ctx.pipe_ctx[i];
1298 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1299
1300 if (!pipe_ctx_old->stream)
1301 continue;
1302
1303 if (pipe_ctx_old->top_pipe)
1304 continue;
1305
1306 if (!pipe_ctx->stream ||
1307 pipe_need_reprogram(pipe_ctx_old, pipe_ctx)) {
1308 struct clock_source *old_clk = pipe_ctx_old->clock_source;
1309
1310 reset_back_end_for_pipe(dc, pipe_ctx_old, dc->current_state);
1311
1312 if (old_clk)
1313 old_clk->funcs->cs_power_down(old_clk);
1314 }
1315 }
1316
1317 }
1318
1319 static bool patch_address_for_sbs_tb_stereo(
1320 struct pipe_ctx *pipe_ctx, PHYSICAL_ADDRESS_LOC *addr)
1321 {
1322 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1323 bool sec_split = pipe_ctx->top_pipe &&
1324 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
1325 if (sec_split && plane_state->address.type == PLN_ADDR_TYPE_GRPH_STEREO &&
1326 (pipe_ctx->stream->timing.timing_3d_format ==
1327 TIMING_3D_FORMAT_SIDE_BY_SIDE ||
1328 pipe_ctx->stream->timing.timing_3d_format ==
1329 TIMING_3D_FORMAT_TOP_AND_BOTTOM)) {
1330 *addr = plane_state->address.grph_stereo.left_addr;
1331 plane_state->address.grph_stereo.left_addr =
1332 plane_state->address.grph_stereo.right_addr;
1333 return true;
1334 } else {
1335 if (pipe_ctx->stream->view_format != VIEW_3D_FORMAT_NONE &&
1336 plane_state->address.type != PLN_ADDR_TYPE_GRPH_STEREO) {
1337 plane_state->address.type = PLN_ADDR_TYPE_GRPH_STEREO;
1338 plane_state->address.grph_stereo.right_addr =
1339 plane_state->address.grph_stereo.left_addr;
1340 }
1341 }
1342 return false;
1343 }
1344
1345 static void toggle_watermark_change_req(struct dce_hwseq *hws)
1346 {
1347 uint32_t watermark_change_req;
1348
1349 REG_GET(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
1350 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, &watermark_change_req);
1351
1352 if (watermark_change_req)
1353 watermark_change_req = 0;
1354 else
1355 watermark_change_req = 1;
1356
1357 REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL,
1358 DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, watermark_change_req);
1359 }
1360
1361 static void dcn10_update_plane_addr(const struct dc *dc, struct pipe_ctx *pipe_ctx)
1362 {
1363 bool addr_patched = false;
1364 PHYSICAL_ADDRESS_LOC addr;
1365 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1366
1367 if (plane_state == NULL)
1368 return;
1369 addr_patched = patch_address_for_sbs_tb_stereo(pipe_ctx, &addr);
1370 pipe_ctx->plane_res.hubp->funcs->hubp_program_surface_flip_and_addr(
1371 pipe_ctx->plane_res.hubp,
1372 &plane_state->address,
1373 plane_state->flip_immediate);
1374 plane_state->status.requested_address = plane_state->address;
1375 if (addr_patched)
1376 pipe_ctx->plane_state->address.grph_stereo.left_addr = addr;
1377 }
1378
1379 static bool dcn10_set_input_transfer_func(
1380 struct pipe_ctx *pipe_ctx, const struct dc_plane_state *plane_state)
1381 {
1382 struct dpp *dpp_base = pipe_ctx->plane_res.dpp;
1383 const struct dc_transfer_func *tf = NULL;
1384 bool result = true;
1385
1386 if (dpp_base == NULL)
1387 return false;
1388
1389 if (plane_state->in_transfer_func)
1390 tf = plane_state->in_transfer_func;
1391
1392 if (plane_state->gamma_correction && dce_use_lut(plane_state))
1393 dpp_base->funcs->ipp_program_input_lut(dpp_base,
1394 plane_state->gamma_correction);
1395
1396 if (tf == NULL)
1397 dpp_base->funcs->ipp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1398 else if (tf->type == TF_TYPE_PREDEFINED) {
1399 switch (tf->tf) {
1400 case TRANSFER_FUNCTION_SRGB:
1401 dpp_base->funcs->ipp_set_degamma(dpp_base,
1402 IPP_DEGAMMA_MODE_HW_sRGB);
1403 break;
1404 case TRANSFER_FUNCTION_BT709:
1405 dpp_base->funcs->ipp_set_degamma(dpp_base,
1406 IPP_DEGAMMA_MODE_HW_xvYCC);
1407 break;
1408 case TRANSFER_FUNCTION_LINEAR:
1409 dpp_base->funcs->ipp_set_degamma(dpp_base,
1410 IPP_DEGAMMA_MODE_BYPASS);
1411 break;
1412 case TRANSFER_FUNCTION_PQ:
1413 result = false;
1414 break;
1415 default:
1416 result = false;
1417 break;
1418 }
1419 } else if (tf->type == TF_TYPE_BYPASS) {
1420 dpp_base->funcs->ipp_set_degamma(dpp_base, IPP_DEGAMMA_MODE_BYPASS);
1421 } else {
1422 /*TF_TYPE_DISTRIBUTED_POINTS*/
1423 result = false;
1424 }
1425
1426 return result;
1427 }
1428 /*modify the method to handle rgb for arr_points*/
1429 static bool convert_to_custom_float(
1430 struct pwl_result_data *rgb_resulted,
1431 struct curve_points *arr_points,
1432 uint32_t hw_points_num)
1433 {
1434 struct custom_float_format fmt;
1435
1436 struct pwl_result_data *rgb = rgb_resulted;
1437
1438 uint32_t i = 0;
1439
1440 fmt.exponenta_bits = 6;
1441 fmt.mantissa_bits = 12;
1442 fmt.sign = false;
1443
1444 if (!convert_to_custom_float_format(
1445 arr_points[0].x,
1446 &fmt,
1447 &arr_points[0].custom_float_x)) {
1448 BREAK_TO_DEBUGGER();
1449 return false;
1450 }
1451
1452 if (!convert_to_custom_float_format(
1453 arr_points[0].offset,
1454 &fmt,
1455 &arr_points[0].custom_float_offset)) {
1456 BREAK_TO_DEBUGGER();
1457 return false;
1458 }
1459
1460 if (!convert_to_custom_float_format(
1461 arr_points[0].slope,
1462 &fmt,
1463 &arr_points[0].custom_float_slope)) {
1464 BREAK_TO_DEBUGGER();
1465 return false;
1466 }
1467
1468 fmt.mantissa_bits = 10;
1469 fmt.sign = false;
1470
1471 if (!convert_to_custom_float_format(
1472 arr_points[1].x,
1473 &fmt,
1474 &arr_points[1].custom_float_x)) {
1475 BREAK_TO_DEBUGGER();
1476 return false;
1477 }
1478
1479 if (!convert_to_custom_float_format(
1480 arr_points[1].y,
1481 &fmt,
1482 &arr_points[1].custom_float_y)) {
1483 BREAK_TO_DEBUGGER();
1484 return false;
1485 }
1486
1487 if (!convert_to_custom_float_format(
1488 arr_points[1].slope,
1489 &fmt,
1490 &arr_points[1].custom_float_slope)) {
1491 BREAK_TO_DEBUGGER();
1492 return false;
1493 }
1494
1495 fmt.mantissa_bits = 12;
1496 fmt.sign = true;
1497
1498 while (i != hw_points_num) {
1499 if (!convert_to_custom_float_format(
1500 rgb->red,
1501 &fmt,
1502 &rgb->red_reg)) {
1503 BREAK_TO_DEBUGGER();
1504 return false;
1505 }
1506
1507 if (!convert_to_custom_float_format(
1508 rgb->green,
1509 &fmt,
1510 &rgb->green_reg)) {
1511 BREAK_TO_DEBUGGER();
1512 return false;
1513 }
1514
1515 if (!convert_to_custom_float_format(
1516 rgb->blue,
1517 &fmt,
1518 &rgb->blue_reg)) {
1519 BREAK_TO_DEBUGGER();
1520 return false;
1521 }
1522
1523 if (!convert_to_custom_float_format(
1524 rgb->delta_red,
1525 &fmt,
1526 &rgb->delta_red_reg)) {
1527 BREAK_TO_DEBUGGER();
1528 return false;
1529 }
1530
1531 if (!convert_to_custom_float_format(
1532 rgb->delta_green,
1533 &fmt,
1534 &rgb->delta_green_reg)) {
1535 BREAK_TO_DEBUGGER();
1536 return false;
1537 }
1538
1539 if (!convert_to_custom_float_format(
1540 rgb->delta_blue,
1541 &fmt,
1542 &rgb->delta_blue_reg)) {
1543 BREAK_TO_DEBUGGER();
1544 return false;
1545 }
1546
1547 ++rgb;
1548 ++i;
1549 }
1550
1551 return true;
1552 }
1553 #define MAX_REGIONS_NUMBER 34
1554 #define MAX_LOW_POINT 25
1555 #define NUMBER_SEGMENTS 32
1556
1557 static bool dcn10_translate_regamma_to_hw_format(const struct dc_transfer_func
1558 *output_tf, struct pwl_params *regamma_params)
1559 {
1560 struct curve_points *arr_points;
1561 struct pwl_result_data *rgb_resulted;
1562 struct pwl_result_data *rgb;
1563 struct pwl_result_data *rgb_plus_1;
1564 struct fixed31_32 y_r;
1565 struct fixed31_32 y_g;
1566 struct fixed31_32 y_b;
1567 struct fixed31_32 y1_min;
1568 struct fixed31_32 y3_max;
1569
1570 int32_t segment_start, segment_end;
1571 int32_t i;
1572 uint32_t j, k, seg_distr[MAX_REGIONS_NUMBER], increment, start_index, hw_points;
1573
1574 if (output_tf == NULL || regamma_params == NULL ||
1575 output_tf->type == TF_TYPE_BYPASS)
1576 return false;
1577
1578 arr_points = regamma_params->arr_points;
1579 rgb_resulted = regamma_params->rgb_resulted;
1580 hw_points = 0;
1581
1582 memset(regamma_params, 0, sizeof(struct pwl_params));
1583 memset(seg_distr, 0, sizeof(seg_distr));
1584
1585 if (output_tf->tf == TRANSFER_FUNCTION_PQ) {
1586 /* 32 segments
1587 * segments are from 2^-25 to 2^7
1588 */
1589 for (i = 0; i < 32 ; i++)
1590 seg_distr[i] = 3;
1591
1592 segment_start = -25;
1593 segment_end = 7;
1594 } else {
1595 /* 10 segments
1596 * segment is from 2^-10 to 2^0
1597 * There are less than 256 points, for optimization
1598 */
1599 seg_distr[0] = 3;
1600 seg_distr[1] = 4;
1601 seg_distr[2] = 4;
1602 seg_distr[3] = 4;
1603 seg_distr[4] = 4;
1604 seg_distr[5] = 4;
1605 seg_distr[6] = 4;
1606 seg_distr[7] = 4;
1607 seg_distr[8] = 5;
1608 seg_distr[9] = 5;
1609
1610 segment_start = -10;
1611 segment_end = 0;
1612 }
1613
1614 for (i = segment_end - segment_start; i < MAX_REGIONS_NUMBER ; i++)
1615 seg_distr[i] = -1;
1616
1617 for (k = 0; k < MAX_REGIONS_NUMBER; k++) {
1618 if (seg_distr[k] != -1)
1619 hw_points += (1 << seg_distr[k]);
1620 }
1621
1622 j = 0;
1623 for (k = 0; k < (segment_end - segment_start); k++) {
1624 increment = NUMBER_SEGMENTS / (1 << seg_distr[k]);
1625 start_index = (segment_start + k + MAX_LOW_POINT) * NUMBER_SEGMENTS;
1626 for (i = start_index; i < start_index + NUMBER_SEGMENTS; i += increment) {
1627 if (j == hw_points - 1)
1628 break;
1629 rgb_resulted[j].red = output_tf->tf_pts.red[i];
1630 rgb_resulted[j].green = output_tf->tf_pts.green[i];
1631 rgb_resulted[j].blue = output_tf->tf_pts.blue[i];
1632 j++;
1633 }
1634 }
1635
1636 /* last point */
1637 start_index = (segment_end + MAX_LOW_POINT) * NUMBER_SEGMENTS;
1638 rgb_resulted[hw_points - 1].red =
1639 output_tf->tf_pts.red[start_index];
1640 rgb_resulted[hw_points - 1].green =
1641 output_tf->tf_pts.green[start_index];
1642 rgb_resulted[hw_points - 1].blue =
1643 output_tf->tf_pts.blue[start_index];
1644
1645 arr_points[0].x = dal_fixed31_32_pow(dal_fixed31_32_from_int(2),
1646 dal_fixed31_32_from_int(segment_start));
1647 arr_points[1].x = dal_fixed31_32_pow(dal_fixed31_32_from_int(2),
1648 dal_fixed31_32_from_int(segment_end));
1649 arr_points[2].x = dal_fixed31_32_pow(dal_fixed31_32_from_int(2),
1650 dal_fixed31_32_from_int(segment_end));
1651
1652 y_r = rgb_resulted[0].red;
1653 y_g = rgb_resulted[0].green;
1654 y_b = rgb_resulted[0].blue;
1655
1656 y1_min = dal_fixed31_32_min(y_r, dal_fixed31_32_min(y_g, y_b));
1657
1658 arr_points[0].y = y1_min;
1659 arr_points[0].slope = dal_fixed31_32_div(
1660 arr_points[0].y,
1661 arr_points[0].x);
1662 y_r = rgb_resulted[hw_points - 1].red;
1663 y_g = rgb_resulted[hw_points - 1].green;
1664 y_b = rgb_resulted[hw_points - 1].blue;
1665
1666 /* see comment above, m_arrPoints[1].y should be the Y value for the
1667 * region end (m_numOfHwPoints), not last HW point(m_numOfHwPoints - 1)
1668 */
1669 y3_max = dal_fixed31_32_max(y_r, dal_fixed31_32_max(y_g, y_b));
1670
1671 arr_points[1].y = y3_max;
1672 arr_points[2].y = y3_max;
1673
1674 arr_points[1].slope = dal_fixed31_32_zero;
1675 arr_points[2].slope = dal_fixed31_32_zero;
1676
1677 if (output_tf->tf == TRANSFER_FUNCTION_PQ) {
1678 /* for PQ, we want to have a straight line from last HW X point,
1679 * and the slope to be such that we hit 1.0 at 10000 nits.
1680 */
1681 const struct fixed31_32 end_value =
1682 dal_fixed31_32_from_int(125);
1683
1684 arr_points[1].slope = dal_fixed31_32_div(
1685 dal_fixed31_32_sub(dal_fixed31_32_one, arr_points[1].y),
1686 dal_fixed31_32_sub(end_value, arr_points[1].x));
1687 arr_points[2].slope = dal_fixed31_32_div(
1688 dal_fixed31_32_sub(dal_fixed31_32_one, arr_points[1].y),
1689 dal_fixed31_32_sub(end_value, arr_points[1].x));
1690 }
1691
1692 regamma_params->hw_points_num = hw_points;
1693
1694 i = 1;
1695 for (k = 0; k < MAX_REGIONS_NUMBER && i < MAX_REGIONS_NUMBER; k++) {
1696 if (seg_distr[k] != -1) {
1697 regamma_params->arr_curve_points[k].segments_num =
1698 seg_distr[k];
1699 regamma_params->arr_curve_points[i].offset =
1700 regamma_params->arr_curve_points[k].
1701 offset + (1 << seg_distr[k]);
1702 }
1703 i++;
1704 }
1705
1706 if (seg_distr[k] != -1)
1707 regamma_params->arr_curve_points[k].segments_num =
1708 seg_distr[k];
1709
1710 rgb = rgb_resulted;
1711 rgb_plus_1 = rgb_resulted + 1;
1712
1713 i = 1;
1714
1715 while (i != hw_points + 1) {
1716 if (dal_fixed31_32_lt(rgb_plus_1->red, rgb->red))
1717 rgb_plus_1->red = rgb->red;
1718 if (dal_fixed31_32_lt(rgb_plus_1->green, rgb->green))
1719 rgb_plus_1->green = rgb->green;
1720 if (dal_fixed31_32_lt(rgb_plus_1->blue, rgb->blue))
1721 rgb_plus_1->blue = rgb->blue;
1722
1723 rgb->delta_red = dal_fixed31_32_sub(
1724 rgb_plus_1->red,
1725 rgb->red);
1726 rgb->delta_green = dal_fixed31_32_sub(
1727 rgb_plus_1->green,
1728 rgb->green);
1729 rgb->delta_blue = dal_fixed31_32_sub(
1730 rgb_plus_1->blue,
1731 rgb->blue);
1732
1733 ++rgb_plus_1;
1734 ++rgb;
1735 ++i;
1736 }
1737
1738 convert_to_custom_float(rgb_resulted, arr_points, hw_points);
1739
1740 return true;
1741 }
1742
1743 static bool dcn10_set_output_transfer_func(
1744 struct pipe_ctx *pipe_ctx,
1745 const struct dc_stream_state *stream)
1746 {
1747 struct dpp *dpp = pipe_ctx->plane_res.dpp;
1748
1749 if (dpp == NULL)
1750 return false;
1751
1752 dpp->regamma_params.hw_points_num = GAMMA_HW_POINTS_NUM;
1753
1754 if (stream->out_transfer_func &&
1755 stream->out_transfer_func->type ==
1756 TF_TYPE_PREDEFINED &&
1757 stream->out_transfer_func->tf ==
1758 TRANSFER_FUNCTION_SRGB) {
1759 dpp->funcs->opp_set_regamma_mode(dpp, OPP_REGAMMA_SRGB);
1760 } else if (dcn10_translate_regamma_to_hw_format(
1761 stream->out_transfer_func, &dpp->regamma_params)) {
1762 dpp->funcs->opp_program_regamma_pwl(dpp, &dpp->regamma_params);
1763 dpp->funcs->opp_set_regamma_mode(dpp, OPP_REGAMMA_USER);
1764 } else {
1765 dpp->funcs->opp_set_regamma_mode(dpp, OPP_REGAMMA_BYPASS);
1766 }
1767
1768 return true;
1769 }
1770
1771 static void dcn10_pipe_control_lock(
1772 struct dc *dc,
1773 struct pipe_ctx *pipe,
1774 bool lock)
1775 {
1776 struct hubp *hubp = NULL;
1777 hubp = dc->res_pool->hubps[pipe->pipe_idx];
1778 /* use TG master update lock to lock everything on the TG
1779 * therefore only top pipe need to lock
1780 */
1781 if (pipe->top_pipe)
1782 return;
1783
1784 if (dc->debug.sanity_checks)
1785 verify_allow_pstate_change_high(dc->hwseq);
1786
1787 if (lock)
1788 pipe->stream_res.tg->funcs->lock(pipe->stream_res.tg);
1789 else
1790 pipe->stream_res.tg->funcs->unlock(pipe->stream_res.tg);
1791
1792 if (dc->debug.sanity_checks)
1793 verify_allow_pstate_change_high(dc->hwseq);
1794 }
1795
1796 static bool wait_for_reset_trigger_to_occur(
1797 struct dc_context *dc_ctx,
1798 struct timing_generator *tg)
1799 {
1800 bool rc = false;
1801
1802 /* To avoid endless loop we wait at most
1803 * frames_to_wait_on_triggered_reset frames for the reset to occur. */
1804 const uint32_t frames_to_wait_on_triggered_reset = 10;
1805 int i;
1806
1807 for (i = 0; i < frames_to_wait_on_triggered_reset; i++) {
1808
1809 if (!tg->funcs->is_counter_moving(tg)) {
1810 DC_ERROR("TG counter is not moving!\n");
1811 break;
1812 }
1813
1814 if (tg->funcs->did_triggered_reset_occur(tg)) {
1815 rc = true;
1816 /* usually occurs at i=1 */
1817 DC_SYNC_INFO("GSL: reset occurred at wait count: %d\n",
1818 i);
1819 break;
1820 }
1821
1822 /* Wait for one frame. */
1823 tg->funcs->wait_for_state(tg, CRTC_STATE_VACTIVE);
1824 tg->funcs->wait_for_state(tg, CRTC_STATE_VBLANK);
1825 }
1826
1827 if (false == rc)
1828 DC_ERROR("GSL: Timeout on reset trigger!\n");
1829
1830 return rc;
1831 }
1832
1833 static void dcn10_enable_timing_synchronization(
1834 struct dc *dc,
1835 int group_index,
1836 int group_size,
1837 struct pipe_ctx *grouped_pipes[])
1838 {
1839 struct dc_context *dc_ctx = dc->ctx;
1840 int i;
1841
1842 DC_SYNC_INFO("Setting up OTG reset trigger\n");
1843
1844 for (i = 1; i < group_size; i++)
1845 grouped_pipes[i]->stream_res.tg->funcs->enable_reset_trigger(
1846 grouped_pipes[i]->stream_res.tg, grouped_pipes[0]->stream_res.tg->inst);
1847
1848
1849 DC_SYNC_INFO("Waiting for trigger\n");
1850
1851 /* Need to get only check 1 pipe for having reset as all the others are
1852 * synchronized. Look at last pipe programmed to reset.
1853 */
1854 wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[1]->stream_res.tg);
1855 for (i = 1; i < group_size; i++)
1856 grouped_pipes[i]->stream_res.tg->funcs->disable_reset_trigger(
1857 grouped_pipes[i]->stream_res.tg);
1858
1859 DC_SYNC_INFO("Sync complete\n");
1860 }
1861
1862 static void print_rq_dlg_ttu(
1863 struct dc *core_dc,
1864 struct pipe_ctx *pipe_ctx)
1865 {
1866 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1867 "\n============== DML TTU Output parameters [%d] ==============\n"
1868 "qos_level_low_wm: %d, \n"
1869 "qos_level_high_wm: %d, \n"
1870 "min_ttu_vblank: %d, \n"
1871 "qos_level_flip: %d, \n"
1872 "refcyc_per_req_delivery_l: %d, \n"
1873 "qos_level_fixed_l: %d, \n"
1874 "qos_ramp_disable_l: %d, \n"
1875 "refcyc_per_req_delivery_pre_l: %d, \n"
1876 "refcyc_per_req_delivery_c: %d, \n"
1877 "qos_level_fixed_c: %d, \n"
1878 "qos_ramp_disable_c: %d, \n"
1879 "refcyc_per_req_delivery_pre_c: %d\n"
1880 "=============================================================\n",
1881 pipe_ctx->pipe_idx,
1882 pipe_ctx->ttu_regs.qos_level_low_wm,
1883 pipe_ctx->ttu_regs.qos_level_high_wm,
1884 pipe_ctx->ttu_regs.min_ttu_vblank,
1885 pipe_ctx->ttu_regs.qos_level_flip,
1886 pipe_ctx->ttu_regs.refcyc_per_req_delivery_l,
1887 pipe_ctx->ttu_regs.qos_level_fixed_l,
1888 pipe_ctx->ttu_regs.qos_ramp_disable_l,
1889 pipe_ctx->ttu_regs.refcyc_per_req_delivery_pre_l,
1890 pipe_ctx->ttu_regs.refcyc_per_req_delivery_c,
1891 pipe_ctx->ttu_regs.qos_level_fixed_c,
1892 pipe_ctx->ttu_regs.qos_ramp_disable_c,
1893 pipe_ctx->ttu_regs.refcyc_per_req_delivery_pre_c
1894 );
1895
1896 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1897 "\n============== DML DLG Output parameters [%d] ==============\n"
1898 "refcyc_h_blank_end: %d, \n"
1899 "dlg_vblank_end: %d, \n"
1900 "min_dst_y_next_start: %d, \n"
1901 "refcyc_per_htotal: %d, \n"
1902 "refcyc_x_after_scaler: %d, \n"
1903 "dst_y_after_scaler: %d, \n"
1904 "dst_y_prefetch: %d, \n"
1905 "dst_y_per_vm_vblank: %d, \n"
1906 "dst_y_per_row_vblank: %d, \n"
1907 "ref_freq_to_pix_freq: %d, \n"
1908 "vratio_prefetch: %d, \n"
1909 "refcyc_per_pte_group_vblank_l: %d, \n"
1910 "refcyc_per_meta_chunk_vblank_l: %d, \n"
1911 "dst_y_per_pte_row_nom_l: %d, \n"
1912 "refcyc_per_pte_group_nom_l: %d, \n",
1913 pipe_ctx->pipe_idx,
1914 pipe_ctx->dlg_regs.refcyc_h_blank_end,
1915 pipe_ctx->dlg_regs.dlg_vblank_end,
1916 pipe_ctx->dlg_regs.min_dst_y_next_start,
1917 pipe_ctx->dlg_regs.refcyc_per_htotal,
1918 pipe_ctx->dlg_regs.refcyc_x_after_scaler,
1919 pipe_ctx->dlg_regs.dst_y_after_scaler,
1920 pipe_ctx->dlg_regs.dst_y_prefetch,
1921 pipe_ctx->dlg_regs.dst_y_per_vm_vblank,
1922 pipe_ctx->dlg_regs.dst_y_per_row_vblank,
1923 pipe_ctx->dlg_regs.ref_freq_to_pix_freq,
1924 pipe_ctx->dlg_regs.vratio_prefetch,
1925 pipe_ctx->dlg_regs.refcyc_per_pte_group_vblank_l,
1926 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_vblank_l,
1927 pipe_ctx->dlg_regs.dst_y_per_pte_row_nom_l,
1928 pipe_ctx->dlg_regs.refcyc_per_pte_group_nom_l
1929 );
1930
1931 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1932 "\ndst_y_per_meta_row_nom_l: %d, \n"
1933 "refcyc_per_meta_chunk_nom_l: %d, \n"
1934 "refcyc_per_line_delivery_pre_l: %d, \n"
1935 "refcyc_per_line_delivery_l: %d, \n"
1936 "vratio_prefetch_c: %d, \n"
1937 "refcyc_per_pte_group_vblank_c: %d, \n"
1938 "refcyc_per_meta_chunk_vblank_c: %d, \n"
1939 "dst_y_per_pte_row_nom_c: %d, \n"
1940 "refcyc_per_pte_group_nom_c: %d, \n"
1941 "dst_y_per_meta_row_nom_c: %d, \n"
1942 "refcyc_per_meta_chunk_nom_c: %d, \n"
1943 "refcyc_per_line_delivery_pre_c: %d, \n"
1944 "refcyc_per_line_delivery_c: %d \n"
1945 "========================================================\n",
1946 pipe_ctx->dlg_regs.dst_y_per_meta_row_nom_l,
1947 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_nom_l,
1948 pipe_ctx->dlg_regs.refcyc_per_line_delivery_pre_l,
1949 pipe_ctx->dlg_regs.refcyc_per_line_delivery_l,
1950 pipe_ctx->dlg_regs.vratio_prefetch_c,
1951 pipe_ctx->dlg_regs.refcyc_per_pte_group_vblank_c,
1952 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_vblank_c,
1953 pipe_ctx->dlg_regs.dst_y_per_pte_row_nom_c,
1954 pipe_ctx->dlg_regs.refcyc_per_pte_group_nom_c,
1955 pipe_ctx->dlg_regs.dst_y_per_meta_row_nom_c,
1956 pipe_ctx->dlg_regs.refcyc_per_meta_chunk_nom_c,
1957 pipe_ctx->dlg_regs.refcyc_per_line_delivery_pre_c,
1958 pipe_ctx->dlg_regs.refcyc_per_line_delivery_c
1959 );
1960
1961 dm_logger_write(core_dc->ctx->logger, LOG_BANDWIDTH_CALCS,
1962 "\n============== DML RQ Output parameters [%d] ==============\n"
1963 "chunk_size: %d \n"
1964 "min_chunk_size: %d \n"
1965 "meta_chunk_size: %d \n"
1966 "min_meta_chunk_size: %d \n"
1967 "dpte_group_size: %d \n"
1968 "mpte_group_size: %d \n"
1969 "swath_height: %d \n"
1970 "pte_row_height_linear: %d \n"
1971 "========================================================\n",
1972 pipe_ctx->pipe_idx,
1973 pipe_ctx->rq_regs.rq_regs_l.chunk_size,
1974 pipe_ctx->rq_regs.rq_regs_l.min_chunk_size,
1975 pipe_ctx->rq_regs.rq_regs_l.meta_chunk_size,
1976 pipe_ctx->rq_regs.rq_regs_l.min_meta_chunk_size,
1977 pipe_ctx->rq_regs.rq_regs_l.dpte_group_size,
1978 pipe_ctx->rq_regs.rq_regs_l.mpte_group_size,
1979 pipe_ctx->rq_regs.rq_regs_l.swath_height,
1980 pipe_ctx->rq_regs.rq_regs_l.pte_row_height_linear
1981 );
1982 }
1983
1984 static void dcn10_power_on_fe(
1985 struct dc *dc,
1986 struct pipe_ctx *pipe_ctx,
1987 struct dc_state *context)
1988 {
1989 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1990 struct dce_hwseq *hws = dc->hwseq;
1991
1992 if (dc->debug.sanity_checks) {
1993 verify_allow_pstate_change_high(dc->hwseq);
1994 }
1995
1996 power_on_plane(dc->hwseq,
1997 pipe_ctx->pipe_idx);
1998
1999 /* enable DCFCLK current DCHUB */
2000 REG_UPDATE(HUBP_CLK_CNTL[pipe_ctx->pipe_idx],
2001 HUBP_CLOCK_ENABLE, 1);
2002
2003 /* make sure OPP_PIPE_CLOCK_EN = 1 */
2004 REG_UPDATE(OPP_PIPE_CONTROL[pipe_ctx->stream_res.tg->inst],
2005 OPP_PIPE_CLOCK_EN, 1);
2006 /*TODO: REG_UPDATE(DENTIST_DISPCLK_CNTL, DENTIST_DPPCLK_WDIVIDER, 0x1f);*/
2007
2008 if (plane_state) {
2009 dm_logger_write(dc->ctx->logger, LOG_DC,
2010 "Pipe:%d 0x%x: addr hi:0x%x, "
2011 "addr low:0x%x, "
2012 "src: %d, %d, %d,"
2013 " %d; dst: %d, %d, %d, %d;\n",
2014 pipe_ctx->pipe_idx,
2015 plane_state,
2016 plane_state->address.grph.addr.high_part,
2017 plane_state->address.grph.addr.low_part,
2018 plane_state->src_rect.x,
2019 plane_state->src_rect.y,
2020 plane_state->src_rect.width,
2021 plane_state->src_rect.height,
2022 plane_state->dst_rect.x,
2023 plane_state->dst_rect.y,
2024 plane_state->dst_rect.width,
2025 plane_state->dst_rect.height);
2026
2027 dm_logger_write(dc->ctx->logger, LOG_DC,
2028 "Pipe %d: width, height, x, y format:%d\n"
2029 "viewport:%d, %d, %d, %d\n"
2030 "recout: %d, %d, %d, %d\n",
2031 pipe_ctx->pipe_idx,
2032 plane_state->format,
2033 pipe_ctx->plane_res.scl_data.viewport.width,
2034 pipe_ctx->plane_res.scl_data.viewport.height,
2035 pipe_ctx->plane_res.scl_data.viewport.x,
2036 pipe_ctx->plane_res.scl_data.viewport.y,
2037 pipe_ctx->plane_res.scl_data.recout.width,
2038 pipe_ctx->plane_res.scl_data.recout.height,
2039 pipe_ctx->plane_res.scl_data.recout.x,
2040 pipe_ctx->plane_res.scl_data.recout.y);
2041 print_rq_dlg_ttu(dc, pipe_ctx);
2042 }
2043
2044 if (dc->debug.sanity_checks) {
2045 verify_allow_pstate_change_high(dc->hwseq);
2046 }
2047 }
2048
2049 static void program_gamut_remap(struct pipe_ctx *pipe_ctx)
2050 {
2051 struct dpp_grph_csc_adjustment adjust;
2052 memset(&adjust, 0, sizeof(adjust));
2053 adjust.gamut_adjust_type = GRAPHICS_GAMUT_ADJUST_TYPE_BYPASS;
2054
2055
2056 if (pipe_ctx->stream->gamut_remap_matrix.enable_remap == true) {
2057 adjust.gamut_adjust_type = GRAPHICS_GAMUT_ADJUST_TYPE_SW;
2058 adjust.temperature_matrix[0] =
2059 pipe_ctx->stream->
2060 gamut_remap_matrix.matrix[0];
2061 adjust.temperature_matrix[1] =
2062 pipe_ctx->stream->
2063 gamut_remap_matrix.matrix[1];
2064 adjust.temperature_matrix[2] =
2065 pipe_ctx->stream->
2066 gamut_remap_matrix.matrix[2];
2067 adjust.temperature_matrix[3] =
2068 pipe_ctx->stream->
2069 gamut_remap_matrix.matrix[4];
2070 adjust.temperature_matrix[4] =
2071 pipe_ctx->stream->
2072 gamut_remap_matrix.matrix[5];
2073 adjust.temperature_matrix[5] =
2074 pipe_ctx->stream->
2075 gamut_remap_matrix.matrix[6];
2076 adjust.temperature_matrix[6] =
2077 pipe_ctx->stream->
2078 gamut_remap_matrix.matrix[8];
2079 adjust.temperature_matrix[7] =
2080 pipe_ctx->stream->
2081 gamut_remap_matrix.matrix[9];
2082 adjust.temperature_matrix[8] =
2083 pipe_ctx->stream->
2084 gamut_remap_matrix.matrix[10];
2085 }
2086
2087 pipe_ctx->plane_res.dpp->funcs->dpp_set_gamut_remap(pipe_ctx->plane_res.dpp, &adjust);
2088 }
2089
2090
2091 static void program_csc_matrix(struct pipe_ctx *pipe_ctx,
2092 enum dc_color_space colorspace,
2093 uint16_t *matrix)
2094 {
2095 int i;
2096 struct out_csc_color_matrix tbl_entry;
2097
2098 if (pipe_ctx->stream->csc_color_matrix.enable_adjustment
2099 == true) {
2100 enum dc_color_space color_space =
2101 pipe_ctx->stream->output_color_space;
2102
2103 //uint16_t matrix[12];
2104 for (i = 0; i < 12; i++)
2105 tbl_entry.regval[i] = pipe_ctx->stream->csc_color_matrix.matrix[i];
2106
2107 tbl_entry.color_space = color_space;
2108 //tbl_entry.regval = matrix;
2109 pipe_ctx->plane_res.dpp->funcs->opp_set_csc_adjustment(pipe_ctx->plane_res.dpp, &tbl_entry);
2110 } else {
2111 pipe_ctx->plane_res.dpp->funcs->opp_set_csc_default(pipe_ctx->plane_res.dpp, colorspace);
2112 }
2113 }
2114 static bool is_lower_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
2115 {
2116 if (pipe_ctx->plane_state->visible)
2117 return true;
2118 if (pipe_ctx->bottom_pipe && is_lower_pipe_tree_visible(pipe_ctx->bottom_pipe))
2119 return true;
2120 return false;
2121 }
2122
2123 static bool is_upper_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
2124 {
2125 if (pipe_ctx->plane_state->visible)
2126 return true;
2127 if (pipe_ctx->top_pipe && is_upper_pipe_tree_visible(pipe_ctx->top_pipe))
2128 return true;
2129 return false;
2130 }
2131
2132 static bool is_pipe_tree_visible(struct pipe_ctx *pipe_ctx)
2133 {
2134 if (pipe_ctx->plane_state->visible)
2135 return true;
2136 if (pipe_ctx->top_pipe && is_upper_pipe_tree_visible(pipe_ctx->top_pipe))
2137 return true;
2138 if (pipe_ctx->bottom_pipe && is_lower_pipe_tree_visible(pipe_ctx->bottom_pipe))
2139 return true;
2140 return false;
2141 }
2142
2143 static bool is_rgb_cspace(enum dc_color_space output_color_space)
2144 {
2145 switch (output_color_space) {
2146 case COLOR_SPACE_SRGB:
2147 case COLOR_SPACE_SRGB_LIMITED:
2148 case COLOR_SPACE_2020_RGB_FULLRANGE:
2149 case COLOR_SPACE_2020_RGB_LIMITEDRANGE:
2150 case COLOR_SPACE_ADOBERGB:
2151 return true;
2152 case COLOR_SPACE_YCBCR601:
2153 case COLOR_SPACE_YCBCR709:
2154 case COLOR_SPACE_YCBCR601_LIMITED:
2155 case COLOR_SPACE_YCBCR709_LIMITED:
2156 case COLOR_SPACE_2020_YCBCR:
2157 return false;
2158 default:
2159 /* Add a case to switch */
2160 BREAK_TO_DEBUGGER();
2161 return false;
2162 }
2163 }
2164
2165 static void dcn10_get_surface_visual_confirm_color(
2166 const struct pipe_ctx *pipe_ctx,
2167 struct tg_color *color)
2168 {
2169 uint32_t color_value = MAX_TG_COLOR_VALUE;
2170
2171 switch (pipe_ctx->plane_res.scl_data.format) {
2172 case PIXEL_FORMAT_ARGB8888:
2173 /* set boarder color to red */
2174 color->color_r_cr = color_value;
2175 break;
2176
2177 case PIXEL_FORMAT_ARGB2101010:
2178 /* set boarder color to blue */
2179 color->color_b_cb = color_value;
2180 break;
2181 case PIXEL_FORMAT_420BPP8:
2182 /* set boarder color to green */
2183 color->color_g_y = color_value;
2184 break;
2185 case PIXEL_FORMAT_420BPP10:
2186 /* set boarder color to yellow */
2187 color->color_g_y = color_value;
2188 color->color_r_cr = color_value;
2189 break;
2190 case PIXEL_FORMAT_FP16:
2191 /* set boarder color to white */
2192 color->color_r_cr = color_value;
2193 color->color_b_cb = color_value;
2194 color->color_g_y = color_value;
2195 break;
2196 default:
2197 break;
2198 }
2199 }
2200
2201 static void mmhub_read_vm_system_aperture_settings(struct dcn10_hubp *hubp1,
2202 struct vm_system_aperture_param *apt,
2203 struct dce_hwseq *hws)
2204 {
2205 PHYSICAL_ADDRESS_LOC physical_page_number;
2206 uint32_t logical_addr_low;
2207 uint32_t logical_addr_high;
2208
2209 REG_GET(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB,
2210 PHYSICAL_PAGE_NUMBER_MSB, &physical_page_number.high_part);
2211 REG_GET(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB,
2212 PHYSICAL_PAGE_NUMBER_LSB, &physical_page_number.low_part);
2213
2214 REG_GET(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2215 LOGICAL_ADDR, &logical_addr_low);
2216
2217 REG_GET(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2218 LOGICAL_ADDR, &logical_addr_high);
2219
2220 apt->sys_default.quad_part = physical_page_number.quad_part << 12;
2221 apt->sys_low.quad_part = (int64_t)logical_addr_low << 18;
2222 apt->sys_high.quad_part = (int64_t)logical_addr_high << 18;
2223 }
2224
2225 /* Temporary read settings, future will get values from kmd directly */
2226 static void mmhub_read_vm_context0_settings(struct dcn10_hubp *hubp1,
2227 struct vm_context0_param *vm0,
2228 struct dce_hwseq *hws)
2229 {
2230 PHYSICAL_ADDRESS_LOC fb_base;
2231 PHYSICAL_ADDRESS_LOC fb_offset;
2232 uint32_t fb_base_value;
2233 uint32_t fb_offset_value;
2234
2235 REG_GET(DCHUBBUB_SDPIF_FB_BASE, SDPIF_FB_BASE, &fb_base_value);
2236 REG_GET(DCHUBBUB_SDPIF_FB_OFFSET, SDPIF_FB_OFFSET, &fb_offset_value);
2237
2238 REG_GET(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR_HI32,
2239 PAGE_DIRECTORY_ENTRY_HI32, &vm0->pte_base.high_part);
2240 REG_GET(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR_LO32,
2241 PAGE_DIRECTORY_ENTRY_LO32, &vm0->pte_base.low_part);
2242
2243 REG_GET(VM_CONTEXT0_PAGE_TABLE_START_ADDR_HI32,
2244 LOGICAL_PAGE_NUMBER_HI4, &vm0->pte_start.high_part);
2245 REG_GET(VM_CONTEXT0_PAGE_TABLE_START_ADDR_LO32,
2246 LOGICAL_PAGE_NUMBER_LO32, &vm0->pte_start.low_part);
2247
2248 REG_GET(VM_CONTEXT0_PAGE_TABLE_END_ADDR_HI32,
2249 LOGICAL_PAGE_NUMBER_HI4, &vm0->pte_end.high_part);
2250 REG_GET(VM_CONTEXT0_PAGE_TABLE_END_ADDR_LO32,
2251 LOGICAL_PAGE_NUMBER_LO32, &vm0->pte_end.low_part);
2252
2253 REG_GET(VM_L2_PROTECTION_FAULT_DEFAULT_ADDR_HI32,
2254 PHYSICAL_PAGE_ADDR_HI4, &vm0->fault_default.high_part);
2255 REG_GET(VM_L2_PROTECTION_FAULT_DEFAULT_ADDR_LO32,
2256 PHYSICAL_PAGE_ADDR_LO32, &vm0->fault_default.low_part);
2257
2258 /*
2259 * The values in VM_CONTEXT0_PAGE_TABLE_BASE_ADDR is in UMA space.
2260 * Therefore we need to do
2261 * DCN_VM_CONTEXT0_PAGE_TABLE_BASE_ADDR = VM_CONTEXT0_PAGE_TABLE_BASE_ADDR
2262 * - DCHUBBUB_SDPIF_FB_OFFSET + DCHUBBUB_SDPIF_FB_BASE
2263 */
2264 fb_base.quad_part = (uint64_t)fb_base_value << 24;
2265 fb_offset.quad_part = (uint64_t)fb_offset_value << 24;
2266 vm0->pte_base.quad_part += fb_base.quad_part;
2267 vm0->pte_base.quad_part -= fb_offset.quad_part;
2268 }
2269
2270 static void dcn10_program_pte_vm(struct hubp *hubp,
2271 enum surface_pixel_format format,
2272 union dc_tiling_info *tiling_info,
2273 enum dc_rotation_angle rotation,
2274 struct dce_hwseq *hws)
2275 {
2276 struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp);
2277 struct vm_system_aperture_param apt = { {{ 0 } } };
2278 struct vm_context0_param vm0 = { { { 0 } } };
2279
2280
2281 mmhub_read_vm_system_aperture_settings(hubp1, &apt, hws);
2282 mmhub_read_vm_context0_settings(hubp1, &vm0, hws);
2283
2284 hubp->funcs->hubp_set_vm_system_aperture_settings(hubp, &apt);
2285 hubp->funcs->hubp_set_vm_context0_settings(hubp, &vm0);
2286 }
2287
2288 static void update_dchubp_dpp(
2289 struct dc *dc,
2290 struct pipe_ctx *pipe_ctx,
2291 struct dc_state *context)
2292 {
2293 struct dce_hwseq *hws = dc->hwseq;
2294 struct hubp *hubp = pipe_ctx->plane_res.hubp;
2295 struct dpp *dpp = pipe_ctx->plane_res.dpp;
2296 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
2297 union plane_size size = plane_state->plane_size;
2298 struct mpcc_cfg mpcc_cfg = {0};
2299 struct pipe_ctx *top_pipe;
2300 bool per_pixel_alpha = plane_state->per_pixel_alpha && pipe_ctx->bottom_pipe;
2301
2302 /* TODO: proper fix once fpga works */
2303 /* depends on DML calculation, DPP clock value may change dynamically */
2304 enable_dppclk(
2305 dc->hwseq,
2306 pipe_ctx->pipe_idx,
2307 pipe_ctx->stream_res.pix_clk_params.requested_pix_clk,
2308 context->bw.dcn.calc_clk.dppclk_div);
2309 dc->current_state->bw.dcn.cur_clk.dppclk_div =
2310 context->bw.dcn.calc_clk.dppclk_div;
2311 context->bw.dcn.cur_clk.dppclk_div = context->bw.dcn.calc_clk.dppclk_div;
2312
2313 /* TODO: Need input parameter to tell current DCHUB pipe tie to which OTG
2314 * VTG is within DCHUBBUB which is commond block share by each pipe HUBP.
2315 * VTG is 1:1 mapping with OTG. Each pipe HUBP will select which VTG
2316 */
2317 REG_UPDATE(DCHUBP_CNTL[pipe_ctx->pipe_idx], HUBP_VTG_SEL, pipe_ctx->stream_res.tg->inst);
2318
2319 hubp->funcs->hubp_setup(
2320 hubp,
2321 &pipe_ctx->dlg_regs,
2322 &pipe_ctx->ttu_regs,
2323 &pipe_ctx->rq_regs,
2324 &pipe_ctx->pipe_dlg_param);
2325
2326 size.grph.surface_size = pipe_ctx->plane_res.scl_data.viewport;
2327
2328 if (dc->config.gpu_vm_support)
2329 dcn10_program_pte_vm(
2330 pipe_ctx->plane_res.hubp,
2331 plane_state->format,
2332 &plane_state->tiling_info,
2333 plane_state->rotation,
2334 hws
2335 );
2336
2337 dpp->funcs->ipp_setup(dpp,
2338 plane_state->format,
2339 EXPANSION_MODE_ZERO);
2340
2341 mpcc_cfg.dpp_id = hubp->inst;
2342 mpcc_cfg.opp_id = pipe_ctx->stream_res.opp->inst;
2343 mpcc_cfg.tree_cfg = &(pipe_ctx->stream_res.opp->mpc_tree);
2344 for (top_pipe = pipe_ctx->top_pipe; top_pipe; top_pipe = top_pipe->top_pipe)
2345 mpcc_cfg.z_index++;
2346 if (dc->debug.surface_visual_confirm)
2347 dcn10_get_surface_visual_confirm_color(
2348 pipe_ctx, &mpcc_cfg.black_color);
2349 else
2350 color_space_to_black_color(
2351 dc, pipe_ctx->stream->output_color_space,
2352 &mpcc_cfg.black_color);
2353 mpcc_cfg.per_pixel_alpha = per_pixel_alpha;
2354 /* DCN1.0 has output CM before MPC which seems to screw with
2355 * pre-multiplied alpha.
2356 */
2357 mpcc_cfg.pre_multiplied_alpha = is_rgb_cspace(
2358 pipe_ctx->stream->output_color_space)
2359 && per_pixel_alpha;
2360 hubp->mpcc_id = dc->res_pool->mpc->funcs->add(dc->res_pool->mpc, &mpcc_cfg);
2361 hubp->opp_id = mpcc_cfg.opp_id;
2362
2363 pipe_ctx->plane_res.scl_data.lb_params.alpha_en = per_pixel_alpha;
2364 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
2365 /* scaler configuration */
2366 pipe_ctx->plane_res.dpp->funcs->dpp_set_scaler(
2367 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data);
2368
2369 hubp->funcs->mem_program_viewport(hubp,
2370 &pipe_ctx->plane_res.scl_data.viewport, &pipe_ctx->plane_res.scl_data.viewport_c);
2371
2372 /*gamut remap*/
2373 program_gamut_remap(pipe_ctx);
2374
2375 program_csc_matrix(pipe_ctx,
2376 pipe_ctx->stream->output_color_space,
2377 pipe_ctx->stream->csc_color_matrix.matrix);
2378
2379 hubp->funcs->hubp_program_surface_config(
2380 hubp,
2381 plane_state->format,
2382 &plane_state->tiling_info,
2383 &size,
2384 plane_state->rotation,
2385 &plane_state->dcc,
2386 plane_state->horizontal_mirror);
2387
2388 dc->hwss.update_plane_addr(dc, pipe_ctx);
2389
2390 if (is_pipe_tree_visible(pipe_ctx))
2391 hubp->funcs->set_blank(hubp, false);
2392 }
2393
2394
2395 static void program_all_pipe_in_tree(
2396 struct dc *dc,
2397 struct pipe_ctx *pipe_ctx,
2398 struct dc_state *context)
2399 {
2400 unsigned int ref_clk_mhz = dc->res_pool->ref_clock_inKhz/1000;
2401
2402 if (pipe_ctx->top_pipe == NULL) {
2403
2404 /* lock otg_master_update to process all pipes associated with
2405 * this OTG. this is done only one time.
2406 */
2407 /* watermark is for all pipes */
2408 program_watermarks(dc->hwseq, &context->bw.dcn.watermarks, ref_clk_mhz);
2409
2410 if (dc->debug.sanity_checks) {
2411 /* pstate stuck check after watermark update */
2412 verify_allow_pstate_change_high(dc->hwseq);
2413 }
2414
2415 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
2416
2417 pipe_ctx->stream_res.tg->dlg_otg_param.vready_offset = pipe_ctx->pipe_dlg_param.vready_offset;
2418 pipe_ctx->stream_res.tg->dlg_otg_param.vstartup_start = pipe_ctx->pipe_dlg_param.vstartup_start;
2419 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_offset = pipe_ctx->pipe_dlg_param.vupdate_offset;
2420 pipe_ctx->stream_res.tg->dlg_otg_param.vupdate_width = pipe_ctx->pipe_dlg_param.vupdate_width;
2421 pipe_ctx->stream_res.tg->dlg_otg_param.signal = pipe_ctx->stream->signal;
2422
2423 pipe_ctx->stream_res.tg->funcs->program_global_sync(
2424 pipe_ctx->stream_res.tg);
2425 pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, !is_pipe_tree_visible(pipe_ctx));
2426 }
2427
2428 if (pipe_ctx->plane_state != NULL) {
2429 struct dc_cursor_position position = { 0 };
2430 struct pipe_ctx *cur_pipe_ctx =
2431 &dc->current_state->res_ctx.pipe_ctx[pipe_ctx->pipe_idx];
2432
2433 dcn10_power_on_fe(dc, pipe_ctx, context);
2434
2435 /* temporary dcn1 wa:
2436 * watermark update requires toggle after a/b/c/d sets are programmed
2437 * if hubp is pg then wm value doesn't get properaged to hubp
2438 * need to toggle after ungate to ensure wm gets to hubp.
2439 *
2440 * final solution: we need to get SMU to do the toggle as
2441 * DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST is owned by SMU we should have
2442 * both driver and fw accessing same register
2443 */
2444 toggle_watermark_change_req(dc->hwseq);
2445
2446 update_dchubp_dpp(dc, pipe_ctx, context);
2447
2448 /* TODO: this is a hack w/a for switching from mpo to pipe split */
2449 dc_stream_set_cursor_position(pipe_ctx->stream, &position);
2450
2451 dc_stream_set_cursor_attributes(pipe_ctx->stream,
2452 &pipe_ctx->stream->cursor_attributes);
2453
2454 if (cur_pipe_ctx->plane_state != pipe_ctx->plane_state) {
2455 dc->hwss.set_input_transfer_func(
2456 pipe_ctx, pipe_ctx->plane_state);
2457 dc->hwss.set_output_transfer_func(
2458 pipe_ctx, pipe_ctx->stream);
2459 }
2460 }
2461
2462 if (dc->debug.sanity_checks) {
2463 /* pstate stuck check after each pipe is programmed */
2464 verify_allow_pstate_change_high(dc->hwseq);
2465 }
2466
2467 if (pipe_ctx->bottom_pipe != NULL && pipe_ctx->bottom_pipe != pipe_ctx)
2468 program_all_pipe_in_tree(dc, pipe_ctx->bottom_pipe, context);
2469 }
2470
2471 static void dcn10_pplib_apply_display_requirements(
2472 struct dc *dc,
2473 struct dc_state *context)
2474 {
2475 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
2476
2477 pp_display_cfg->all_displays_in_sync = false;/*todo*/
2478 pp_display_cfg->nb_pstate_switch_disable = false;
2479 pp_display_cfg->min_engine_clock_khz = context->bw.dcn.cur_clk.dcfclk_khz;
2480 pp_display_cfg->min_memory_clock_khz = context->bw.dcn.cur_clk.fclk_khz;
2481 pp_display_cfg->min_engine_clock_deep_sleep_khz = context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz;
2482 pp_display_cfg->min_dcfc_deep_sleep_clock_khz = context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz;
2483 pp_display_cfg->avail_mclk_switch_time_us =
2484 context->bw.dcn.cur_clk.dram_ccm_us > 0 ? context->bw.dcn.cur_clk.dram_ccm_us : 0;
2485 pp_display_cfg->avail_mclk_switch_time_in_disp_active_us =
2486 context->bw.dcn.cur_clk.min_active_dram_ccm_us > 0 ? context->bw.dcn.cur_clk.min_active_dram_ccm_us : 0;
2487 pp_display_cfg->min_dcfclock_khz = context->bw.dcn.cur_clk.dcfclk_khz;
2488 pp_display_cfg->disp_clk_khz = context->bw.dcn.cur_clk.dispclk_khz;
2489 dce110_fill_display_configs(context, pp_display_cfg);
2490
2491 if (memcmp(&dc->prev_display_config, pp_display_cfg, sizeof(
2492 struct dm_pp_display_configuration)) != 0)
2493 dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
2494
2495 dc->prev_display_config = *pp_display_cfg;
2496 }
2497
2498 static void optimize_shared_resources(struct dc *dc)
2499 {
2500 if (dc->current_state->stream_count == 0) {
2501 apply_DEGVIDCN10_253_wa(dc);
2502 /* S0i2 message */
2503 dcn10_pplib_apply_display_requirements(dc, dc->current_state);
2504 }
2505
2506 if (dc->debug.pplib_wm_report_mode == WM_REPORT_OVERRIDE)
2507 dcn_bw_notify_pplib_of_wm_ranges(dc);
2508 }
2509
2510 static void ready_shared_resources(struct dc *dc, struct dc_state *context)
2511 {
2512 if (dc->current_state->stream_count == 0 &&
2513 !dc->debug.disable_stutter)
2514 undo_DEGVIDCN10_253_wa(dc);
2515
2516 /* S0i2 message */
2517 if (dc->current_state->stream_count == 0 &&
2518 context->stream_count != 0)
2519 dcn10_pplib_apply_display_requirements(dc, context);
2520 }
2521
2522 static void dcn10_apply_ctx_for_surface(
2523 struct dc *dc,
2524 const struct dc_stream_state *stream,
2525 int num_planes,
2526 struct dc_state *context)
2527 {
2528 int i, be_idx;
2529
2530 if (dc->debug.sanity_checks)
2531 verify_allow_pstate_change_high(dc->hwseq);
2532
2533 be_idx = -1;
2534 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2535 if (stream == context->res_ctx.pipe_ctx[i].stream) {
2536 be_idx = context->res_ctx.pipe_ctx[i].stream_res.tg->inst;
2537 break;
2538 }
2539 }
2540
2541 ASSERT(be_idx != -1);
2542
2543 if (num_planes == 0) {
2544 for (i = dc->res_pool->pipe_count - 1; i >= 0 ; i--) {
2545 struct pipe_ctx *old_pipe_ctx =
2546 &dc->current_state->res_ctx.pipe_ctx[i];
2547
2548 if (old_pipe_ctx->stream_res.tg && old_pipe_ctx->stream_res.tg->inst == be_idx) {
2549 old_pipe_ctx->stream_res.tg->funcs->set_blank(old_pipe_ctx->stream_res.tg, true);
2550 dcn10_power_down_fe(dc, old_pipe_ctx->pipe_idx);
2551 }
2552 }
2553 return;
2554 }
2555
2556 /* reset unused mpcc */
2557 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2558 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2559 struct pipe_ctx *old_pipe_ctx =
2560 &dc->current_state->res_ctx.pipe_ctx[i];
2561
2562 if (!pipe_ctx->plane_state && !old_pipe_ctx->plane_state)
2563 continue;
2564
2565 /*
2566 * Powergate reused pipes that are not powergated
2567 * fairly hacky right now, using opp_id as indicator
2568 */
2569
2570 if (pipe_ctx->plane_state && !old_pipe_ctx->plane_state) {
2571 if (pipe_ctx->plane_res.hubp->opp_id != 0xf && pipe_ctx->stream_res.tg->inst == be_idx) {
2572 dcn10_power_down_fe(dc, pipe_ctx->pipe_idx);
2573 /*
2574 * power down fe will unlock when calling reset, need
2575 * to lock it back here. Messy, need rework.
2576 */
2577 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
2578 }
2579 }
2580
2581
2582 if ((!pipe_ctx->plane_state && old_pipe_ctx->plane_state)
2583 || (!pipe_ctx->stream && old_pipe_ctx->stream)) {
2584 if (old_pipe_ctx->stream_res.tg->inst != be_idx)
2585 continue;
2586
2587 if (!old_pipe_ctx->top_pipe) {
2588 ASSERT(0);
2589 continue;
2590 }
2591
2592 /* reset mpc */
2593 dc->res_pool->mpc->funcs->remove(
2594 dc->res_pool->mpc,
2595 &(old_pipe_ctx->stream_res.opp->mpc_tree),
2596 old_pipe_ctx->stream_res.opp->inst,
2597 old_pipe_ctx->pipe_idx);
2598 old_pipe_ctx->stream_res.opp->mpcc_disconnect_pending[old_pipe_ctx->plane_res.hubp->mpcc_id] = true;
2599
2600 /*dm_logger_write(dc->ctx->logger, LOG_ERROR,
2601 "[debug_mpo: apply_ctx disconnect pending on mpcc %d]\n",
2602 old_pipe_ctx->mpcc->inst);*/
2603
2604 if (dc->debug.sanity_checks)
2605 verify_allow_pstate_change_high(dc->hwseq);
2606
2607 old_pipe_ctx->top_pipe = NULL;
2608 old_pipe_ctx->bottom_pipe = NULL;
2609 old_pipe_ctx->plane_state = NULL;
2610 old_pipe_ctx->stream = NULL;
2611
2612 dm_logger_write(dc->ctx->logger, LOG_DC,
2613 "Reset mpcc for pipe %d\n",
2614 old_pipe_ctx->pipe_idx);
2615 }
2616 }
2617
2618 for (i = 0; i < dc->res_pool->pipe_count; i++) {
2619 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
2620
2621 if (pipe_ctx->stream != stream)
2622 continue;
2623
2624 /* looking for top pipe to program */
2625 if (!pipe_ctx->top_pipe)
2626 program_all_pipe_in_tree(dc, pipe_ctx, context);
2627 }
2628
2629 dm_logger_write(dc->ctx->logger, LOG_BANDWIDTH_CALCS,
2630 "\n============== Watermark parameters ==============\n"
2631 "a.urgent_ns: %d \n"
2632 "a.cstate_enter_plus_exit: %d \n"
2633 "a.cstate_exit: %d \n"
2634 "a.pstate_change: %d \n"
2635 "a.pte_meta_urgent: %d \n"
2636 "b.urgent_ns: %d \n"
2637 "b.cstate_enter_plus_exit: %d \n"
2638 "b.cstate_exit: %d \n"
2639 "b.pstate_change: %d \n"
2640 "b.pte_meta_urgent: %d \n",
2641 context->bw.dcn.watermarks.a.urgent_ns,
2642 context->bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns,
2643 context->bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns,
2644 context->bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns,
2645 context->bw.dcn.watermarks.a.pte_meta_urgent_ns,
2646 context->bw.dcn.watermarks.b.urgent_ns,
2647 context->bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns,
2648 context->bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns,
2649 context->bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns,
2650 context->bw.dcn.watermarks.b.pte_meta_urgent_ns
2651 );
2652 dm_logger_write(dc->ctx->logger, LOG_BANDWIDTH_CALCS,
2653 "\nc.urgent_ns: %d \n"
2654 "c.cstate_enter_plus_exit: %d \n"
2655 "c.cstate_exit: %d \n"
2656 "c.pstate_change: %d \n"
2657 "c.pte_meta_urgent: %d \n"
2658 "d.urgent_ns: %d \n"
2659 "d.cstate_enter_plus_exit: %d \n"
2660 "d.cstate_exit: %d \n"
2661 "d.pstate_change: %d \n"
2662 "d.pte_meta_urgent: %d \n"
2663 "========================================================\n",
2664 context->bw.dcn.watermarks.c.urgent_ns,
2665 context->bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns,
2666 context->bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns,
2667 context->bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns,
2668 context->bw.dcn.watermarks.c.pte_meta_urgent_ns,
2669 context->bw.dcn.watermarks.d.urgent_ns,
2670 context->bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns,
2671 context->bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns,
2672 context->bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns,
2673 context->bw.dcn.watermarks.d.pte_meta_urgent_ns
2674 );
2675
2676 if (dc->debug.sanity_checks)
2677 verify_allow_pstate_change_high(dc->hwseq);
2678 }
2679
2680 static void dcn10_set_bandwidth(
2681 struct dc *dc,
2682 struct dc_state *context,
2683 bool decrease_allowed)
2684 {
2685 struct pp_smu_display_requirement_rv *smu_req_cur =
2686 &dc->res_pool->pp_smu_req;
2687 struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
2688 struct pp_smu_funcs_rv *pp_smu = dc->res_pool->pp_smu;
2689
2690 if (dc->debug.sanity_checks) {
2691 verify_allow_pstate_change_high(dc->hwseq);
2692 }
2693
2694 if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment))
2695 return;
2696
2697 if (decrease_allowed || context->bw.dcn.calc_clk.dispclk_khz
2698 > dc->current_state->bw.dcn.cur_clk.dispclk_khz) {
2699 dc->res_pool->display_clock->funcs->set_clock(
2700 dc->res_pool->display_clock,
2701 context->bw.dcn.calc_clk.dispclk_khz);
2702 dc->current_state->bw.dcn.cur_clk.dispclk_khz =
2703 context->bw.dcn.calc_clk.dispclk_khz;
2704 }
2705 if (decrease_allowed || context->bw.dcn.calc_clk.dcfclk_khz
2706 > dc->current_state->bw.dcn.cur_clk.dcfclk_khz) {
2707 smu_req.hard_min_dcefclk_khz =
2708 context->bw.dcn.calc_clk.dcfclk_khz;
2709 }
2710 if (decrease_allowed || context->bw.dcn.calc_clk.fclk_khz
2711 > dc->current_state->bw.dcn.cur_clk.fclk_khz) {
2712 smu_req.hard_min_fclk_khz = context->bw.dcn.calc_clk.fclk_khz;
2713 }
2714 if (decrease_allowed || context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz
2715 > dc->current_state->bw.dcn.cur_clk.dcfclk_deep_sleep_khz) {
2716 dc->current_state->bw.dcn.calc_clk.dcfclk_deep_sleep_khz =
2717 context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz;
2718 context->bw.dcn.cur_clk.dcfclk_deep_sleep_khz =
2719 context->bw.dcn.calc_clk.dcfclk_deep_sleep_khz;
2720 }
2721
2722 smu_req.display_count = context->stream_count;
2723
2724 if (pp_smu->set_display_requirement)
2725 pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
2726
2727 *smu_req_cur = smu_req;
2728
2729 /* Decrease in freq is increase in period so opposite comparison for dram_ccm */
2730 if (decrease_allowed || context->bw.dcn.calc_clk.dram_ccm_us
2731 < dc->current_state->bw.dcn.cur_clk.dram_ccm_us) {
2732 dc->current_state->bw.dcn.calc_clk.dram_ccm_us =
2733 context->bw.dcn.calc_clk.dram_ccm_us;
2734 context->bw.dcn.cur_clk.dram_ccm_us =
2735 context->bw.dcn.calc_clk.dram_ccm_us;
2736 }
2737 if (decrease_allowed || context->bw.dcn.calc_clk.min_active_dram_ccm_us
2738 < dc->current_state->bw.dcn.cur_clk.min_active_dram_ccm_us) {
2739 dc->current_state->bw.dcn.calc_clk.min_active_dram_ccm_us =
2740 context->bw.dcn.calc_clk.min_active_dram_ccm_us;
2741 context->bw.dcn.cur_clk.min_active_dram_ccm_us =
2742 context->bw.dcn.calc_clk.min_active_dram_ccm_us;
2743 }
2744 dcn10_pplib_apply_display_requirements(dc, context);
2745
2746 if (dc->debug.sanity_checks) {
2747 verify_allow_pstate_change_high(dc->hwseq);
2748 }
2749
2750 /* need to fix this function. not doing the right thing here */
2751 }
2752
2753 static void set_drr(struct pipe_ctx **pipe_ctx,
2754 int num_pipes, int vmin, int vmax)
2755 {
2756 int i = 0;
2757 struct drr_params params = {0};
2758
2759 params.vertical_total_max = vmax;
2760 params.vertical_total_min = vmin;
2761
2762 /* TODO: If multiple pipes are to be supported, you need
2763 * some GSL stuff
2764 */
2765 for (i = 0; i < num_pipes; i++) {
2766 pipe_ctx[i]->stream_res.tg->funcs->set_drr(pipe_ctx[i]->stream_res.tg, &params);
2767 }
2768 }
2769
2770 static void get_position(struct pipe_ctx **pipe_ctx,
2771 int num_pipes,
2772 struct crtc_position *position)
2773 {
2774 int i = 0;
2775
2776 /* TODO: handle pipes > 1
2777 */
2778 for (i = 0; i < num_pipes; i++)
2779 pipe_ctx[i]->stream_res.tg->funcs->get_position(pipe_ctx[i]->stream_res.tg, position);
2780 }
2781
2782 static void set_static_screen_control(struct pipe_ctx **pipe_ctx,
2783 int num_pipes, const struct dc_static_screen_events *events)
2784 {
2785 unsigned int i;
2786 unsigned int value = 0;
2787
2788 if (events->surface_update)
2789 value |= 0x80;
2790 if (events->cursor_update)
2791 value |= 0x2;
2792
2793 for (i = 0; i < num_pipes; i++)
2794 pipe_ctx[i]->stream_res.tg->funcs->
2795 set_static_screen_control(pipe_ctx[i]->stream_res.tg, value);
2796 }
2797
2798 static void set_plane_config(
2799 const struct dc *dc,
2800 struct pipe_ctx *pipe_ctx,
2801 struct resource_context *res_ctx)
2802 {
2803 /* TODO */
2804 program_gamut_remap(pipe_ctx);
2805 }
2806
2807 static void dcn10_config_stereo_parameters(
2808 struct dc_stream_state *stream, struct crtc_stereo_flags *flags)
2809 {
2810 enum view_3d_format view_format = stream->view_format;
2811 enum dc_timing_3d_format timing_3d_format =\
2812 stream->timing.timing_3d_format;
2813 bool non_stereo_timing = false;
2814
2815 if (timing_3d_format == TIMING_3D_FORMAT_NONE ||
2816 timing_3d_format == TIMING_3D_FORMAT_SIDE_BY_SIDE ||
2817 timing_3d_format == TIMING_3D_FORMAT_TOP_AND_BOTTOM)
2818 non_stereo_timing = true;
2819
2820 if (non_stereo_timing == false &&
2821 view_format == VIEW_3D_FORMAT_FRAME_SEQUENTIAL) {
2822
2823 flags->PROGRAM_STEREO = 1;
2824 flags->PROGRAM_POLARITY = 1;
2825 if (timing_3d_format == TIMING_3D_FORMAT_INBAND_FA ||
2826 timing_3d_format == TIMING_3D_FORMAT_DP_HDMI_INBAND_FA ||
2827 timing_3d_format == TIMING_3D_FORMAT_SIDEBAND_FA) {
2828 enum display_dongle_type dongle = \
2829 stream->sink->link->ddc->dongle_type;
2830 if (dongle == DISPLAY_DONGLE_DP_VGA_CONVERTER ||
2831 dongle == DISPLAY_DONGLE_DP_DVI_CONVERTER ||
2832 dongle == DISPLAY_DONGLE_DP_HDMI_CONVERTER)
2833 flags->DISABLE_STEREO_DP_SYNC = 1;
2834 }
2835 flags->RIGHT_EYE_POLARITY =\
2836 stream->timing.flags.RIGHT_EYE_3D_POLARITY;
2837 if (timing_3d_format == TIMING_3D_FORMAT_HW_FRAME_PACKING)
2838 flags->FRAME_PACKED = 1;
2839 }
2840
2841 return;
2842 }
2843
2844 static void dcn10_setup_stereo(struct pipe_ctx *pipe_ctx, struct dc *dc)
2845 {
2846 struct crtc_stereo_flags flags = { 0 };
2847 struct dc_stream_state *stream = pipe_ctx->stream;
2848
2849 dcn10_config_stereo_parameters(stream, &flags);
2850
2851 pipe_ctx->stream_res.opp->funcs->opp_set_stereo_polarity(
2852 pipe_ctx->stream_res.opp,
2853 flags.PROGRAM_STEREO == 1 ? true:false,
2854 stream->timing.flags.RIGHT_EYE_3D_POLARITY == 1 ? true:false);
2855
2856 pipe_ctx->stream_res.tg->funcs->program_stereo(
2857 pipe_ctx->stream_res.tg,
2858 &stream->timing,
2859 &flags);
2860
2861 return;
2862 }
2863
2864 static void dcn10_wait_for_mpcc_disconnect(
2865 struct dc *dc,
2866 struct resource_pool *res_pool,
2867 struct pipe_ctx *pipe_ctx)
2868 {
2869 int i;
2870
2871 if (dc->debug.sanity_checks) {
2872 verify_allow_pstate_change_high(dc->hwseq);
2873 }
2874
2875 if (!pipe_ctx->stream_res.opp)
2876 return;
2877
2878 for (i = 0; i < MAX_PIPES; i++) {
2879 if (pipe_ctx->stream_res.opp->mpcc_disconnect_pending[i]) {
2880 res_pool->mpc->funcs->wait_for_idle(res_pool->mpc, i);
2881 pipe_ctx->stream_res.opp->mpcc_disconnect_pending[i] = false;
2882 res_pool->hubps[i]->funcs->set_blank(res_pool->hubps[i], true);
2883 /*dm_logger_write(dc->ctx->logger, LOG_ERROR,
2884 "[debug_mpo: wait_for_mpcc finished waiting on mpcc %d]\n",
2885 i);*/
2886 }
2887 }
2888
2889 if (dc->debug.sanity_checks) {
2890 verify_allow_pstate_change_high(dc->hwseq);
2891 }
2892
2893 }
2894
2895 static bool dcn10_dummy_display_power_gating(
2896 struct dc *dc,
2897 uint8_t controller_id,
2898 struct dc_bios *dcb,
2899 enum pipe_gating_control power_gating)
2900 {
2901 return true;
2902 }
2903
2904 void dcn10_update_pending_status(struct pipe_ctx *pipe_ctx)
2905 {
2906 struct dc_plane_state *plane_state = pipe_ctx->plane_state;
2907 struct timing_generator *tg = pipe_ctx->stream_res.tg;
2908
2909 if (plane_state == NULL)
2910 return;
2911
2912 plane_state->status.is_flip_pending =
2913 pipe_ctx->plane_res.hubp->funcs->hubp_is_flip_pending(
2914 pipe_ctx->plane_res.hubp);
2915
2916 plane_state->status.current_address = pipe_ctx->plane_res.hubp->current_address;
2917 if (pipe_ctx->plane_res.hubp->current_address.type == PLN_ADDR_TYPE_GRPH_STEREO &&
2918 tg->funcs->is_stereo_left_eye) {
2919 plane_state->status.is_right_eye =
2920 !tg->funcs->is_stereo_left_eye(pipe_ctx->stream_res.tg);
2921 }
2922 }
2923
2924
2925
2926 static const struct hw_sequencer_funcs dcn10_funcs = {
2927 .program_gamut_remap = program_gamut_remap,
2928 .program_csc_matrix = program_csc_matrix,
2929 .init_hw = dcn10_init_hw,
2930 .apply_ctx_to_hw = dce110_apply_ctx_to_hw,
2931 .apply_ctx_for_surface = dcn10_apply_ctx_for_surface,
2932 .set_plane_config = set_plane_config,
2933 .update_plane_addr = dcn10_update_plane_addr,
2934 .update_dchub = dcn10_update_dchub,
2935 .update_pending_status = dcn10_update_pending_status,
2936 .set_input_transfer_func = dcn10_set_input_transfer_func,
2937 .set_output_transfer_func = dcn10_set_output_transfer_func,
2938 .power_down = dce110_power_down,
2939 .enable_accelerated_mode = dce110_enable_accelerated_mode,
2940 .enable_timing_synchronization = dcn10_enable_timing_synchronization,
2941 .update_info_frame = dce110_update_info_frame,
2942 .enable_stream = dce110_enable_stream,
2943 .disable_stream = dce110_disable_stream,
2944 .unblank_stream = dce110_unblank_stream,
2945 .enable_display_power_gating = dcn10_dummy_display_power_gating,
2946 .power_down_front_end = dcn10_power_down_fe,
2947 .power_on_front_end = dcn10_power_on_fe,
2948 .pipe_control_lock = dcn10_pipe_control_lock,
2949 .set_bandwidth = dcn10_set_bandwidth,
2950 .reset_hw_ctx_wrap = reset_hw_ctx_wrap,
2951 .prog_pixclk_crtc_otg = dcn10_prog_pixclk_crtc_otg,
2952 .set_drr = set_drr,
2953 .get_position = get_position,
2954 .set_static_screen_control = set_static_screen_control,
2955 .setup_stereo = dcn10_setup_stereo,
2956 .set_avmute = dce110_set_avmute,
2957 .log_hw_state = dcn10_log_hw_state,
2958 .wait_for_mpcc_disconnect = dcn10_wait_for_mpcc_disconnect,
2959 .ready_shared_resources = ready_shared_resources,
2960 .optimize_shared_resources = optimize_shared_resources,
2961 .edp_backlight_control = hwss_edp_backlight_control,
2962 .edp_power_control = hwss_edp_power_control
2963 };
2964
2965
2966 void dcn10_hw_sequencer_construct(struct dc *dc)
2967 {
2968 dc->hwss = dcn10_funcs;
2969 }
2970