]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - drivers/gpu/drm/amd/display/dc/core/dc_resource.c
drm/amd/display: Rename more dc_surface stuff to plane_state
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / amd / display / dc / core / dc_resource.c
1 /*
2 * Copyright 2012-15 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25 #include "dm_services.h"
26
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "core_types.h"
35 #include "set_mode_types.h"
36 #include "virtual/virtual_stream_encoder.h"
37
38 #include "dce80/dce80_resource.h"
39 #include "dce100/dce100_resource.h"
40 #include "dce110/dce110_resource.h"
41 #include "dce112/dce112_resource.h"
42 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
43 #include "dcn10/dcn10_resource.h"
44 #endif
45 #include "dce120/dce120_resource.h"
46
47 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
48 {
49 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
50 switch (asic_id.chip_family) {
51
52 case FAMILY_CI:
53 case FAMILY_KV:
54 dc_version = DCE_VERSION_8_0;
55 break;
56 case FAMILY_CZ:
57 dc_version = DCE_VERSION_11_0;
58 break;
59
60 case FAMILY_VI:
61 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
62 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
63 dc_version = DCE_VERSION_10_0;
64 break;
65 }
66 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
67 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
68 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
69 dc_version = DCE_VERSION_11_2;
70 }
71 break;
72 case FAMILY_AI:
73 dc_version = DCE_VERSION_12_0;
74 break;
75 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
76 case FAMILY_RV:
77 dc_version = DCN_VERSION_1_0;
78 break;
79 #endif
80 default:
81 dc_version = DCE_VERSION_UNKNOWN;
82 break;
83 }
84 return dc_version;
85 }
86
87 struct resource_pool *dc_create_resource_pool(
88 struct core_dc *dc,
89 int num_virtual_links,
90 enum dce_version dc_version,
91 struct hw_asic_id asic_id)
92 {
93 struct resource_pool *res_pool = NULL;
94
95 switch (dc_version) {
96 case DCE_VERSION_8_0:
97 res_pool = dce80_create_resource_pool(
98 num_virtual_links, dc);
99 break;
100 case DCE_VERSION_10_0:
101 res_pool = dce100_create_resource_pool(
102 num_virtual_links, dc);
103 break;
104 case DCE_VERSION_11_0:
105 res_pool = dce110_create_resource_pool(
106 num_virtual_links, dc, asic_id);
107 break;
108 case DCE_VERSION_11_2:
109 res_pool = dce112_create_resource_pool(
110 num_virtual_links, dc);
111 break;
112 case DCE_VERSION_12_0:
113 res_pool = dce120_create_resource_pool(
114 num_virtual_links, dc);
115 break;
116
117 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
118 case DCN_VERSION_1_0:
119 res_pool = dcn10_create_resource_pool(
120 num_virtual_links, dc);
121 break;
122 #endif
123
124
125 default:
126 break;
127 }
128 if (res_pool != NULL) {
129 struct dc_firmware_info fw_info = { { 0 } };
130
131 if (dc->ctx->dc_bios->funcs->get_firmware_info(
132 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
133 res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
134 } else
135 ASSERT_CRITICAL(false);
136 }
137
138 return res_pool;
139 }
140
141 void dc_destroy_resource_pool(struct core_dc *dc)
142 {
143 if (dc) {
144 if (dc->res_pool)
145 dc->res_pool->funcs->destroy(&dc->res_pool);
146
147 if (dc->hwseq)
148 dm_free(dc->hwseq);
149 }
150 }
151
152 static void update_num_audio(
153 const struct resource_straps *straps,
154 unsigned int *num_audio,
155 struct audio_support *aud_support)
156 {
157 if (straps->hdmi_disable == 0) {
158 aud_support->hdmi_audio_native = true;
159 aud_support->hdmi_audio_on_dongle = true;
160 aud_support->dp_audio = true;
161 } else {
162 if (straps->dc_pinstraps_audio & 0x2) {
163 aud_support->hdmi_audio_on_dongle = true;
164 aud_support->dp_audio = true;
165 } else {
166 aud_support->dp_audio = true;
167 }
168 }
169
170 switch (straps->audio_stream_number) {
171 case 0: /* multi streams supported */
172 break;
173 case 1: /* multi streams not supported */
174 *num_audio = 1;
175 break;
176 default:
177 DC_ERR("DC: unexpected audio fuse!\n");
178 }
179 }
180
181 bool resource_construct(
182 unsigned int num_virtual_links,
183 struct core_dc *dc,
184 struct resource_pool *pool,
185 const struct resource_create_funcs *create_funcs)
186 {
187 struct dc_context *ctx = dc->ctx;
188 const struct resource_caps *caps = pool->res_cap;
189 int i;
190 unsigned int num_audio = caps->num_audio;
191 struct resource_straps straps = {0};
192
193 if (create_funcs->read_dce_straps)
194 create_funcs->read_dce_straps(dc->ctx, &straps);
195
196 pool->audio_count = 0;
197 if (create_funcs->create_audio) {
198 /* find the total number of streams available via the
199 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
200 * registers (one for each pin) starting from pin 1
201 * up to the max number of audio pins.
202 * We stop on the first pin where
203 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
204 */
205 update_num_audio(&straps, &num_audio, &pool->audio_support);
206 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
207 struct audio *aud = create_funcs->create_audio(ctx, i);
208
209 if (aud == NULL) {
210 DC_ERR("DC: failed to create audio!\n");
211 return false;
212 }
213
214 if (!aud->funcs->endpoint_valid(aud)) {
215 aud->funcs->destroy(&aud);
216 break;
217 }
218
219 pool->audios[i] = aud;
220 pool->audio_count++;
221 }
222 }
223
224 pool->stream_enc_count = 0;
225 if (create_funcs->create_stream_encoder) {
226 for (i = 0; i < caps->num_stream_encoder; i++) {
227 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
228 if (pool->stream_enc[i] == NULL)
229 DC_ERR("DC: failed to create stream_encoder!\n");
230 pool->stream_enc_count++;
231 }
232 }
233
234 for (i = 0; i < num_virtual_links; i++) {
235 pool->stream_enc[pool->stream_enc_count] =
236 virtual_stream_encoder_create(
237 ctx, ctx->dc_bios);
238 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
239 DC_ERR("DC: failed to create stream_encoder!\n");
240 return false;
241 }
242 pool->stream_enc_count++;
243 }
244
245 dc->hwseq = create_funcs->create_hwseq(ctx);
246
247 return true;
248 }
249
250
251 void resource_unreference_clock_source(
252 struct resource_context *res_ctx,
253 const struct resource_pool *pool,
254 struct clock_source **clock_source)
255 {
256 int i;
257 for (i = 0; i < pool->clk_src_count; i++) {
258 if (pool->clock_sources[i] != *clock_source)
259 continue;
260
261 res_ctx->clock_source_ref_count[i]--;
262
263 if (res_ctx->clock_source_ref_count[i] == 0)
264 (*clock_source)->funcs->cs_power_down(*clock_source);
265
266 break;
267 }
268
269 if (pool->dp_clock_source == *clock_source) {
270 res_ctx->dp_clock_source_ref_count--;
271
272 if (res_ctx->dp_clock_source_ref_count == 0)
273 (*clock_source)->funcs->cs_power_down(*clock_source);
274 }
275 *clock_source = NULL;
276 }
277
278 void resource_reference_clock_source(
279 struct resource_context *res_ctx,
280 const struct resource_pool *pool,
281 struct clock_source *clock_source)
282 {
283 int i;
284 for (i = 0; i < pool->clk_src_count; i++) {
285 if (pool->clock_sources[i] != clock_source)
286 continue;
287
288 res_ctx->clock_source_ref_count[i]++;
289 break;
290 }
291
292 if (pool->dp_clock_source == clock_source)
293 res_ctx->dp_clock_source_ref_count++;
294 }
295
296 bool resource_are_streams_timing_synchronizable(
297 struct dc_stream_state *stream1,
298 struct dc_stream_state *stream2)
299 {
300 if (stream1->timing.h_total != stream2->timing.h_total)
301 return false;
302
303 if (stream1->timing.v_total != stream2->timing.v_total)
304 return false;
305
306 if (stream1->timing.h_addressable
307 != stream2->timing.h_addressable)
308 return false;
309
310 if (stream1->timing.v_addressable
311 != stream2->timing.v_addressable)
312 return false;
313
314 if (stream1->timing.pix_clk_khz
315 != stream2->timing.pix_clk_khz)
316 return false;
317
318 if (stream1->phy_pix_clk != stream2->phy_pix_clk
319 && (!dc_is_dp_signal(stream1->signal)
320 || !dc_is_dp_signal(stream2->signal)))
321 return false;
322
323 return true;
324 }
325
326 static bool is_sharable_clk_src(
327 const struct pipe_ctx *pipe_with_clk_src,
328 const struct pipe_ctx *pipe)
329 {
330 if (pipe_with_clk_src->clock_source == NULL)
331 return false;
332
333 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
334 return false;
335
336 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
337 return false;
338
339 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
340 && dc_is_dvi_signal(pipe->stream->signal))
341 return false;
342
343 if (dc_is_hdmi_signal(pipe->stream->signal)
344 && dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
345 return false;
346
347 if (!resource_are_streams_timing_synchronizable(
348 pipe_with_clk_src->stream, pipe->stream))
349 return false;
350
351 return true;
352 }
353
354 struct clock_source *resource_find_used_clk_src_for_sharing(
355 struct resource_context *res_ctx,
356 struct pipe_ctx *pipe_ctx)
357 {
358 int i;
359
360 for (i = 0; i < MAX_PIPES; i++) {
361 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
362 return res_ctx->pipe_ctx[i].clock_source;
363 }
364
365 return NULL;
366 }
367
368 static enum pixel_format convert_pixel_format_to_dalsurface(
369 enum surface_pixel_format surface_pixel_format)
370 {
371 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
372
373 switch (surface_pixel_format) {
374 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
375 dal_pixel_format = PIXEL_FORMAT_INDEX8;
376 break;
377 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
378 dal_pixel_format = PIXEL_FORMAT_RGB565;
379 break;
380 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
381 dal_pixel_format = PIXEL_FORMAT_RGB565;
382 break;
383 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
384 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
385 break;
386 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
387 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
388 break;
389 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
390 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
391 break;
392 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
393 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
394 break;
395 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
396 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
397 break;
398 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
399 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
400 dal_pixel_format = PIXEL_FORMAT_FP16;
401 break;
402 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
403 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
404 dal_pixel_format = PIXEL_FORMAT_420BPP8;
405 break;
406 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
407 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
408 dal_pixel_format = PIXEL_FORMAT_420BPP10;
409 break;
410 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
411 default:
412 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
413 break;
414 }
415 return dal_pixel_format;
416 }
417
418 static void rect_swap_helper(struct rect *rect)
419 {
420 uint32_t temp = 0;
421
422 temp = rect->height;
423 rect->height = rect->width;
424 rect->width = temp;
425
426 temp = rect->x;
427 rect->x = rect->y;
428 rect->y = temp;
429 }
430
431 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
432 {
433 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
434 const struct dc_stream_state *stream = pipe_ctx->stream;
435 struct scaler_data *data = &pipe_ctx->scl_data;
436 struct rect surf_src = plane_state->src_rect;
437 struct rect clip = { 0 };
438 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
439 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
440 bool pri_split = pipe_ctx->bottom_pipe &&
441 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
442 bool sec_split = pipe_ctx->top_pipe &&
443 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
444
445 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
446 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
447 pri_split = false;
448 sec_split = false;
449 }
450
451 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
452 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
453 rect_swap_helper(&surf_src);
454
455 /* The actual clip is an intersection between stream
456 * source and surface clip
457 */
458 clip.x = stream->src.x > plane_state->clip_rect.x ?
459 stream->src.x : plane_state->clip_rect.x;
460
461 clip.width = stream->src.x + stream->src.width <
462 plane_state->clip_rect.x + plane_state->clip_rect.width ?
463 stream->src.x + stream->src.width - clip.x :
464 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
465
466 clip.y = stream->src.y > plane_state->clip_rect.y ?
467 stream->src.y : plane_state->clip_rect.y;
468
469 clip.height = stream->src.y + stream->src.height <
470 plane_state->clip_rect.y + plane_state->clip_rect.height ?
471 stream->src.y + stream->src.height - clip.y :
472 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
473
474 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
475 * num_pixels = clip.num_pix * scl_ratio
476 */
477 data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
478 surf_src.width / plane_state->dst_rect.width;
479 data->viewport.width = clip.width *
480 surf_src.width / plane_state->dst_rect.width;
481
482 data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
483 surf_src.height / plane_state->dst_rect.height;
484 data->viewport.height = clip.height *
485 surf_src.height / plane_state->dst_rect.height;
486
487 /* Round down, compensate in init */
488 data->viewport_c.x = data->viewport.x / vpc_div;
489 data->viewport_c.y = data->viewport.y / vpc_div;
490 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
491 dal_fixed31_32_half : dal_fixed31_32_zero;
492 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
493 dal_fixed31_32_half : dal_fixed31_32_zero;
494 /* Round up, assume original video size always even dimensions */
495 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
496 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
497
498 /* Handle hsplit */
499 if (pri_split || sec_split) {
500 /* HMirror XOR Secondary_pipe XOR Rotation_180 */
501 bool right_view = (sec_split != plane_state->horizontal_mirror) !=
502 (plane_state->rotation == ROTATION_ANGLE_180);
503
504 if (plane_state->rotation == ROTATION_ANGLE_90
505 || plane_state->rotation == ROTATION_ANGLE_270)
506 /* Secondary_pipe XOR Rotation_270 */
507 right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
508
509 if (right_view) {
510 data->viewport.width /= 2;
511 data->viewport_c.width /= 2;
512 data->viewport.x += data->viewport.width;
513 data->viewport_c.x += data->viewport_c.width;
514 /* Ceil offset pipe */
515 data->viewport.width += data->viewport.width % 2;
516 data->viewport_c.width += data->viewport_c.width % 2;
517 } else {
518 data->viewport.width /= 2;
519 data->viewport_c.width /= 2;
520 }
521 }
522
523 if (plane_state->rotation == ROTATION_ANGLE_90 ||
524 plane_state->rotation == ROTATION_ANGLE_270) {
525 rect_swap_helper(&data->viewport_c);
526 rect_swap_helper(&data->viewport);
527 }
528 }
529
530 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
531 {
532 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
533 const struct dc_stream_state *stream = pipe_ctx->stream;
534 struct rect surf_src = plane_state->src_rect;
535 struct rect surf_clip = plane_state->clip_rect;
536 int recout_full_x, recout_full_y;
537
538 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
539 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
540 rect_swap_helper(&surf_src);
541
542 pipe_ctx->scl_data.recout.x = stream->dst.x;
543 if (stream->src.x < surf_clip.x)
544 pipe_ctx->scl_data.recout.x += (surf_clip.x
545 - stream->src.x) * stream->dst.width
546 / stream->src.width;
547
548 pipe_ctx->scl_data.recout.width = surf_clip.width *
549 stream->dst.width / stream->src.width;
550 if (pipe_ctx->scl_data.recout.width + pipe_ctx->scl_data.recout.x >
551 stream->dst.x + stream->dst.width)
552 pipe_ctx->scl_data.recout.width =
553 stream->dst.x + stream->dst.width
554 - pipe_ctx->scl_data.recout.x;
555
556 pipe_ctx->scl_data.recout.y = stream->dst.y;
557 if (stream->src.y < surf_clip.y)
558 pipe_ctx->scl_data.recout.y += (surf_clip.y
559 - stream->src.y) * stream->dst.height
560 / stream->src.height;
561
562 pipe_ctx->scl_data.recout.height = surf_clip.height *
563 stream->dst.height / stream->src.height;
564 if (pipe_ctx->scl_data.recout.height + pipe_ctx->scl_data.recout.y >
565 stream->dst.y + stream->dst.height)
566 pipe_ctx->scl_data.recout.height =
567 stream->dst.y + stream->dst.height
568 - pipe_ctx->scl_data.recout.y;
569
570 /* Handle h & vsplit */
571 if (pipe_ctx->top_pipe && pipe_ctx->top_pipe->plane_state ==
572 pipe_ctx->plane_state) {
573 if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
574 pipe_ctx->scl_data.recout.height /= 2;
575 pipe_ctx->scl_data.recout.y += pipe_ctx->scl_data.recout.height;
576 /* Floor primary pipe, ceil 2ndary pipe */
577 pipe_ctx->scl_data.recout.height += pipe_ctx->scl_data.recout.height % 2;
578 } else {
579 pipe_ctx->scl_data.recout.width /= 2;
580 pipe_ctx->scl_data.recout.x += pipe_ctx->scl_data.recout.width;
581 pipe_ctx->scl_data.recout.width += pipe_ctx->scl_data.recout.width % 2;
582 }
583 } else if (pipe_ctx->bottom_pipe &&
584 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state) {
585 if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
586 pipe_ctx->scl_data.recout.height /= 2;
587 else
588 pipe_ctx->scl_data.recout.width /= 2;
589 }
590
591 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
592 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
593 * ratio)
594 */
595 recout_full_x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
596 * stream->dst.width / stream->src.width -
597 surf_src.x * plane_state->dst_rect.width / surf_src.width
598 * stream->dst.width / stream->src.width;
599 recout_full_y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
600 * stream->dst.height / stream->src.height -
601 surf_src.y * plane_state->dst_rect.height / surf_src.height
602 * stream->dst.height / stream->src.height;
603
604 recout_skip->width = pipe_ctx->scl_data.recout.x - recout_full_x;
605 recout_skip->height = pipe_ctx->scl_data.recout.y - recout_full_y;
606 }
607
608 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
609 {
610 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
611 const struct dc_stream_state *stream = pipe_ctx->stream;
612 struct rect surf_src = plane_state->src_rect;
613 const int in_w = stream->src.width;
614 const int in_h = stream->src.height;
615 const int out_w = stream->dst.width;
616 const int out_h = stream->dst.height;
617
618 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
619 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
620 rect_swap_helper(&surf_src);
621
622 pipe_ctx->scl_data.ratios.horz = dal_fixed31_32_from_fraction(
623 surf_src.width,
624 plane_state->dst_rect.width);
625 pipe_ctx->scl_data.ratios.vert = dal_fixed31_32_from_fraction(
626 surf_src.height,
627 plane_state->dst_rect.height);
628
629 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
630 pipe_ctx->scl_data.ratios.horz.value *= 2;
631 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
632 pipe_ctx->scl_data.ratios.vert.value *= 2;
633
634 pipe_ctx->scl_data.ratios.vert.value = div64_s64(
635 pipe_ctx->scl_data.ratios.vert.value * in_h, out_h);
636 pipe_ctx->scl_data.ratios.horz.value = div64_s64(
637 pipe_ctx->scl_data.ratios.horz.value * in_w, out_w);
638
639 pipe_ctx->scl_data.ratios.horz_c = pipe_ctx->scl_data.ratios.horz;
640 pipe_ctx->scl_data.ratios.vert_c = pipe_ctx->scl_data.ratios.vert;
641
642 if (pipe_ctx->scl_data.format == PIXEL_FORMAT_420BPP8
643 || pipe_ctx->scl_data.format == PIXEL_FORMAT_420BPP10) {
644 pipe_ctx->scl_data.ratios.horz_c.value /= 2;
645 pipe_ctx->scl_data.ratios.vert_c.value /= 2;
646 }
647 }
648
649 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
650 {
651 struct scaler_data *data = &pipe_ctx->scl_data;
652 struct rect src = pipe_ctx->plane_state->src_rect;
653 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
654 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
655
656
657 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
658 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
659 rect_swap_helper(&src);
660 rect_swap_helper(&data->viewport_c);
661 rect_swap_helper(&data->viewport);
662 }
663
664 /*
665 * Init calculated according to formula:
666 * init = (scaling_ratio + number_of_taps + 1) / 2
667 * init_bot = init + scaling_ratio
668 * init_c = init + truncated_vp_c_offset(from calculate viewport)
669 */
670 data->inits.h = dal_fixed31_32_div_int(
671 dal_fixed31_32_add_int(data->ratios.horz, data->taps.h_taps + 1), 2);
672
673 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_div_int(
674 dal_fixed31_32_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2));
675
676 data->inits.v = dal_fixed31_32_div_int(
677 dal_fixed31_32_add_int(data->ratios.vert, data->taps.v_taps + 1), 2);
678
679 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_div_int(
680 dal_fixed31_32_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2));
681
682
683 /* Adjust for viewport end clip-off */
684 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
685 int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
686 int int_part = dal_fixed31_32_floor(
687 dal_fixed31_32_sub(data->inits.h, data->ratios.horz));
688
689 int_part = int_part > 0 ? int_part : 0;
690 data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
691 }
692 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
693 int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
694 int int_part = dal_fixed31_32_floor(
695 dal_fixed31_32_sub(data->inits.v, data->ratios.vert));
696
697 int_part = int_part > 0 ? int_part : 0;
698 data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
699 }
700 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
701 int vp_clip = (src.x + src.width) / vpc_div -
702 data->viewport_c.width - data->viewport_c.x;
703 int int_part = dal_fixed31_32_floor(
704 dal_fixed31_32_sub(data->inits.h_c, data->ratios.horz_c));
705
706 int_part = int_part > 0 ? int_part : 0;
707 data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
708 }
709 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
710 int vp_clip = (src.y + src.height) / vpc_div -
711 data->viewport_c.height - data->viewport_c.y;
712 int int_part = dal_fixed31_32_floor(
713 dal_fixed31_32_sub(data->inits.v_c, data->ratios.vert_c));
714
715 int_part = int_part > 0 ? int_part : 0;
716 data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
717 }
718
719 /* Adjust for non-0 viewport offset */
720 if (data->viewport.x) {
721 int int_part;
722
723 data->inits.h = dal_fixed31_32_add(data->inits.h, dal_fixed31_32_mul_int(
724 data->ratios.horz, recout_skip->width));
725 int_part = dal_fixed31_32_floor(data->inits.h) - data->viewport.x;
726 if (int_part < data->taps.h_taps) {
727 int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
728 (data->taps.h_taps - int_part) : data->viewport.x;
729 data->viewport.x -= int_adj;
730 data->viewport.width += int_adj;
731 int_part += int_adj;
732 } else if (int_part > data->taps.h_taps) {
733 data->viewport.x += int_part - data->taps.h_taps;
734 data->viewport.width -= int_part - data->taps.h_taps;
735 int_part = data->taps.h_taps;
736 }
737 data->inits.h.value &= 0xffffffff;
738 data->inits.h = dal_fixed31_32_add_int(data->inits.h, int_part);
739 }
740
741 if (data->viewport_c.x) {
742 int int_part;
743
744 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_mul_int(
745 data->ratios.horz_c, recout_skip->width));
746 int_part = dal_fixed31_32_floor(data->inits.h_c) - data->viewport_c.x;
747 if (int_part < data->taps.h_taps_c) {
748 int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
749 (data->taps.h_taps_c - int_part) : data->viewport_c.x;
750 data->viewport_c.x -= int_adj;
751 data->viewport_c.width += int_adj;
752 int_part += int_adj;
753 } else if (int_part > data->taps.h_taps_c) {
754 data->viewport_c.x += int_part - data->taps.h_taps_c;
755 data->viewport_c.width -= int_part - data->taps.h_taps_c;
756 int_part = data->taps.h_taps_c;
757 }
758 data->inits.h_c.value &= 0xffffffff;
759 data->inits.h_c = dal_fixed31_32_add_int(data->inits.h_c, int_part);
760 }
761
762 if (data->viewport.y) {
763 int int_part;
764
765 data->inits.v = dal_fixed31_32_add(data->inits.v, dal_fixed31_32_mul_int(
766 data->ratios.vert, recout_skip->height));
767 int_part = dal_fixed31_32_floor(data->inits.v) - data->viewport.y;
768 if (int_part < data->taps.v_taps) {
769 int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
770 (data->taps.v_taps - int_part) : data->viewport.y;
771 data->viewport.y -= int_adj;
772 data->viewport.height += int_adj;
773 int_part += int_adj;
774 } else if (int_part > data->taps.v_taps) {
775 data->viewport.y += int_part - data->taps.v_taps;
776 data->viewport.height -= int_part - data->taps.v_taps;
777 int_part = data->taps.v_taps;
778 }
779 data->inits.v.value &= 0xffffffff;
780 data->inits.v = dal_fixed31_32_add_int(data->inits.v, int_part);
781 }
782
783 if (data->viewport_c.y) {
784 int int_part;
785
786 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_mul_int(
787 data->ratios.vert_c, recout_skip->height));
788 int_part = dal_fixed31_32_floor(data->inits.v_c) - data->viewport_c.y;
789 if (int_part < data->taps.v_taps_c) {
790 int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
791 (data->taps.v_taps_c - int_part) : data->viewport_c.y;
792 data->viewport_c.y -= int_adj;
793 data->viewport_c.height += int_adj;
794 int_part += int_adj;
795 } else if (int_part > data->taps.v_taps_c) {
796 data->viewport_c.y += int_part - data->taps.v_taps_c;
797 data->viewport_c.height -= int_part - data->taps.v_taps_c;
798 int_part = data->taps.v_taps_c;
799 }
800 data->inits.v_c.value &= 0xffffffff;
801 data->inits.v_c = dal_fixed31_32_add_int(data->inits.v_c, int_part);
802 }
803
804 /* Interlaced inits based on final vert inits */
805 data->inits.v_bot = dal_fixed31_32_add(data->inits.v, data->ratios.vert);
806 data->inits.v_c_bot = dal_fixed31_32_add(data->inits.v_c, data->ratios.vert_c);
807
808 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
809 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
810 rect_swap_helper(&data->viewport_c);
811 rect_swap_helper(&data->viewport);
812 }
813 }
814
815 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
816 {
817 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
818 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
819 struct view recout_skip = { 0 };
820 bool res = false;
821
822 /* Important: scaling ratio calculation requires pixel format,
823 * lb depth calculation requires recout and taps require scaling ratios.
824 * Inits require viewport, taps, ratios and recout of split pipe
825 */
826 pipe_ctx->scl_data.format = convert_pixel_format_to_dalsurface(
827 pipe_ctx->plane_state->format);
828
829 calculate_scaling_ratios(pipe_ctx);
830
831 calculate_viewport(pipe_ctx);
832
833 if (pipe_ctx->scl_data.viewport.height < 16 || pipe_ctx->scl_data.viewport.width < 16)
834 return false;
835
836 calculate_recout(pipe_ctx, &recout_skip);
837
838 /**
839 * Setting line buffer pixel depth to 24bpp yields banding
840 * on certain displays, such as the Sharp 4k
841 */
842 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
843
844 pipe_ctx->scl_data.h_active = timing->h_addressable;
845 pipe_ctx->scl_data.v_active = timing->v_addressable;
846
847 /* Taps calculations */
848 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
849 pipe_ctx->xfm, &pipe_ctx->scl_data, &plane_state->scaling_quality);
850
851 if (!res) {
852 /* Try 24 bpp linebuffer */
853 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
854
855 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
856 pipe_ctx->xfm, &pipe_ctx->scl_data, &plane_state->scaling_quality);
857 }
858
859 if (res)
860 /* May need to re-check lb size after this in some obscure scenario */
861 calculate_inits_and_adj_vp(pipe_ctx, &recout_skip);
862
863 dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
864 "%s: Viewport:\nheight:%d width:%d x:%d "
865 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
866 "y:%d\n",
867 __func__,
868 pipe_ctx->scl_data.viewport.height,
869 pipe_ctx->scl_data.viewport.width,
870 pipe_ctx->scl_data.viewport.x,
871 pipe_ctx->scl_data.viewport.y,
872 plane_state->dst_rect.height,
873 plane_state->dst_rect.width,
874 plane_state->dst_rect.x,
875 plane_state->dst_rect.y);
876
877 return res;
878 }
879
880
881 enum dc_status resource_build_scaling_params_for_context(
882 const struct core_dc *dc,
883 struct validate_context *context)
884 {
885 int i;
886
887 for (i = 0; i < MAX_PIPES; i++) {
888 if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
889 context->res_ctx.pipe_ctx[i].stream != NULL)
890 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
891 return DC_FAIL_SCALING;
892 }
893
894 return DC_OK;
895 }
896
897 struct pipe_ctx *find_idle_secondary_pipe(
898 struct resource_context *res_ctx,
899 const struct resource_pool *pool)
900 {
901 int i;
902 struct pipe_ctx *secondary_pipe = NULL;
903
904 /*
905 * search backwards for the second pipe to keep pipe
906 * assignment more consistent
907 */
908
909 for (i = pool->pipe_count - 1; i >= 0; i--) {
910 if (res_ctx->pipe_ctx[i].stream == NULL) {
911 secondary_pipe = &res_ctx->pipe_ctx[i];
912 secondary_pipe->pipe_idx = i;
913 break;
914 }
915 }
916
917
918 return secondary_pipe;
919 }
920
921 struct pipe_ctx *resource_get_head_pipe_for_stream(
922 struct resource_context *res_ctx,
923 struct dc_stream_state *stream)
924 {
925 int i;
926 for (i = 0; i < MAX_PIPES; i++) {
927 if (res_ctx->pipe_ctx[i].stream == stream &&
928 res_ctx->pipe_ctx[i].stream_enc) {
929 return &res_ctx->pipe_ctx[i];
930 break;
931 }
932 }
933 return NULL;
934 }
935
936 /*
937 * A free_pipe for a stream is defined here as a pipe
938 * that has no surface attached yet
939 */
940 static struct pipe_ctx *acquire_free_pipe_for_stream(
941 struct validate_context *context,
942 const struct resource_pool *pool,
943 struct dc_stream_state *stream)
944 {
945 int i;
946 struct resource_context *res_ctx = &context->res_ctx;
947
948 struct pipe_ctx *head_pipe = NULL;
949
950 /* Find head pipe, which has the back end set up*/
951
952 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
953
954 if (!head_pipe)
955 ASSERT(0);
956
957 if (!head_pipe->plane_state)
958 return head_pipe;
959
960 /* Re-use pipe already acquired for this stream if available*/
961 for (i = pool->pipe_count - 1; i >= 0; i--) {
962 if (res_ctx->pipe_ctx[i].stream == stream &&
963 !res_ctx->pipe_ctx[i].plane_state) {
964 return &res_ctx->pipe_ctx[i];
965 }
966 }
967
968 /*
969 * At this point we have no re-useable pipe for this stream and we need
970 * to acquire an idle one to satisfy the request
971 */
972
973 if (!pool->funcs->acquire_idle_pipe_for_layer)
974 return NULL;
975
976 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
977
978 }
979
980 static void release_free_pipes_for_stream(
981 struct resource_context *res_ctx,
982 struct dc_stream_state *stream)
983 {
984 int i;
985
986 for (i = MAX_PIPES - 1; i >= 0; i--) {
987 /* never release the topmost pipe*/
988 if (res_ctx->pipe_ctx[i].stream == stream &&
989 res_ctx->pipe_ctx[i].top_pipe &&
990 !res_ctx->pipe_ctx[i].plane_state) {
991 memset(&res_ctx->pipe_ctx[i], 0, sizeof(struct pipe_ctx));
992 }
993 }
994 }
995
996 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
997 static int acquire_first_split_pipe(
998 struct resource_context *res_ctx,
999 const struct resource_pool *pool,
1000 struct dc_stream_state *stream)
1001 {
1002 int i;
1003
1004 for (i = 0; i < pool->pipe_count; i++) {
1005 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1006
1007 if (pipe_ctx->top_pipe &&
1008 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1009 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1010 if (pipe_ctx->bottom_pipe)
1011 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1012
1013 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1014 pipe_ctx->tg = pool->timing_generators[i];
1015 pipe_ctx->mi = pool->mis[i];
1016 pipe_ctx->ipp = pool->ipps[i];
1017 pipe_ctx->xfm = pool->transforms[i];
1018 pipe_ctx->opp = pool->opps[i];
1019 pipe_ctx->dis_clk = pool->display_clock;
1020 pipe_ctx->pipe_idx = i;
1021
1022 pipe_ctx->stream = stream;
1023 return i;
1024 }
1025 }
1026 return -1;
1027 }
1028 #endif
1029
1030 bool resource_attach_surfaces_to_context(
1031 struct dc_plane_state * const *plane_states,
1032 int surface_count,
1033 struct dc_stream_state *stream,
1034 struct validate_context *context,
1035 const struct resource_pool *pool)
1036 {
1037 int i;
1038 struct pipe_ctx *tail_pipe;
1039 struct dc_stream_status *stream_status = NULL;
1040
1041
1042 if (surface_count > MAX_SURFACE_NUM) {
1043 dm_error("Surface: can not attach %d surfaces! Maximum is: %d\n",
1044 surface_count, MAX_SURFACE_NUM);
1045 return false;
1046 }
1047
1048 for (i = 0; i < context->stream_count; i++)
1049 if (context->streams[i] == stream) {
1050 stream_status = &context->stream_status[i];
1051 break;
1052 }
1053 if (stream_status == NULL) {
1054 dm_error("Existing stream not found; failed to attach surfaces\n");
1055 return false;
1056 }
1057
1058 /* retain new surfaces */
1059 for (i = 0; i < surface_count; i++)
1060 dc_plane_state_retain(plane_states[i]);
1061
1062 /* detach surfaces from pipes */
1063 for (i = 0; i < pool->pipe_count; i++)
1064 if (context->res_ctx.pipe_ctx[i].stream == stream) {
1065 context->res_ctx.pipe_ctx[i].plane_state = NULL;
1066 context->res_ctx.pipe_ctx[i].bottom_pipe = NULL;
1067 }
1068
1069 /* release existing surfaces*/
1070 for (i = 0; i < stream_status->plane_count; i++)
1071 dc_plane_state_release(stream_status->plane_states[i]);
1072
1073 for (i = surface_count; i < stream_status->plane_count; i++)
1074 stream_status->plane_states[i] = NULL;
1075
1076 tail_pipe = NULL;
1077 for (i = 0; i < surface_count; i++) {
1078 struct dc_plane_state *plane_state = plane_states[i];
1079 struct pipe_ctx *free_pipe = acquire_free_pipe_for_stream(
1080 context, pool, stream);
1081
1082 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1083 if (!free_pipe) {
1084 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1085 if (pipe_idx >= 0)
1086 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1087 }
1088 #endif
1089 if (!free_pipe) {
1090 stream_status->plane_states[i] = NULL;
1091 return false;
1092 }
1093
1094 free_pipe->plane_state = plane_state;
1095
1096 if (tail_pipe) {
1097 free_pipe->tg = tail_pipe->tg;
1098 free_pipe->opp = tail_pipe->opp;
1099 free_pipe->stream_enc = tail_pipe->stream_enc;
1100 free_pipe->audio = tail_pipe->audio;
1101 free_pipe->clock_source = tail_pipe->clock_source;
1102 free_pipe->top_pipe = tail_pipe;
1103 tail_pipe->bottom_pipe = free_pipe;
1104 }
1105
1106 tail_pipe = free_pipe;
1107 }
1108
1109 release_free_pipes_for_stream(&context->res_ctx, stream);
1110
1111 /* assign new surfaces*/
1112 for (i = 0; i < surface_count; i++)
1113 stream_status->plane_states[i] = plane_states[i];
1114
1115 stream_status->plane_count = surface_count;
1116
1117 return true;
1118 }
1119
1120
1121 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1122 struct dc_stream_state *new_stream)
1123 {
1124 if (cur_stream == NULL)
1125 return true;
1126
1127 /* If sink pointer changed, it means this is a hotplug, we should do
1128 * full hw setting.
1129 */
1130 if (cur_stream->sink != new_stream->sink)
1131 return true;
1132
1133 /* If output color space is changed, need to reprogram info frames */
1134 if (cur_stream->output_color_space != new_stream->output_color_space)
1135 return true;
1136
1137 return memcmp(
1138 &cur_stream->timing,
1139 &new_stream->timing,
1140 sizeof(struct dc_crtc_timing)) != 0;
1141 }
1142
1143 static bool are_stream_backends_same(
1144 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1145 {
1146 if (stream_a == stream_b)
1147 return true;
1148
1149 if (stream_a == NULL || stream_b == NULL)
1150 return false;
1151
1152 if (is_timing_changed(stream_a, stream_b))
1153 return false;
1154
1155 return true;
1156 }
1157
1158 bool dc_is_stream_unchanged(
1159 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1160 {
1161
1162 if (!are_stream_backends_same(old_stream, stream))
1163 return false;
1164
1165 return true;
1166 }
1167
1168 bool resource_validate_attach_surfaces(
1169 const struct dc_validation_set set[],
1170 int set_count,
1171 const struct validate_context *old_context,
1172 struct validate_context *context,
1173 const struct resource_pool *pool)
1174 {
1175 int i, j;
1176
1177 for (i = 0; i < set_count; i++) {
1178 for (j = 0; old_context && j < old_context->stream_count; j++)
1179 if (dc_is_stream_unchanged(
1180 old_context->streams[j],
1181 context->streams[i])) {
1182 if (!resource_attach_surfaces_to_context(
1183 old_context->stream_status[j].plane_states,
1184 old_context->stream_status[j].plane_count,
1185 context->streams[i],
1186 context, pool))
1187 return false;
1188 context->stream_status[i] = old_context->stream_status[j];
1189 }
1190 if (set[i].plane_count != 0)
1191 if (!resource_attach_surfaces_to_context(
1192 set[i].plane_states,
1193 set[i].plane_count,
1194 context->streams[i],
1195 context, pool))
1196 return false;
1197
1198 }
1199
1200 return true;
1201 }
1202
1203 /* Maximum TMDS single link pixel clock 165MHz */
1204 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
1205 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ_UPMOST 297000
1206
1207 static void set_stream_engine_in_use(
1208 struct resource_context *res_ctx,
1209 const struct resource_pool *pool,
1210 struct stream_encoder *stream_enc)
1211 {
1212 int i;
1213
1214 for (i = 0; i < pool->stream_enc_count; i++) {
1215 if (pool->stream_enc[i] == stream_enc)
1216 res_ctx->is_stream_enc_acquired[i] = true;
1217 }
1218 }
1219
1220 /* TODO: release audio object */
1221 static void set_audio_in_use(
1222 struct resource_context *res_ctx,
1223 const struct resource_pool *pool,
1224 struct audio *audio)
1225 {
1226 int i;
1227 for (i = 0; i < pool->audio_count; i++) {
1228 if (pool->audios[i] == audio)
1229 res_ctx->is_audio_acquired[i] = true;
1230 }
1231 }
1232
1233 static int acquire_first_free_pipe(
1234 struct resource_context *res_ctx,
1235 const struct resource_pool *pool,
1236 struct dc_stream_state *stream)
1237 {
1238 int i;
1239
1240 for (i = 0; i < pool->pipe_count; i++) {
1241 if (!res_ctx->pipe_ctx[i].stream) {
1242 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1243
1244 pipe_ctx->tg = pool->timing_generators[i];
1245 pipe_ctx->mi = pool->mis[i];
1246 pipe_ctx->ipp = pool->ipps[i];
1247 pipe_ctx->xfm = pool->transforms[i];
1248 pipe_ctx->opp = pool->opps[i];
1249 pipe_ctx->dis_clk = pool->display_clock;
1250 pipe_ctx->pipe_idx = i;
1251
1252
1253 pipe_ctx->stream = stream;
1254 return i;
1255 }
1256 }
1257 return -1;
1258 }
1259
1260 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1261 struct resource_context *res_ctx,
1262 const struct resource_pool *pool,
1263 struct dc_stream_state *stream)
1264 {
1265 int i;
1266 int j = -1;
1267 struct dc_link *link = stream->sink->link;
1268
1269 for (i = 0; i < pool->stream_enc_count; i++) {
1270 if (!res_ctx->is_stream_enc_acquired[i] &&
1271 pool->stream_enc[i]) {
1272 /* Store first available for MST second display
1273 * in daisy chain use case */
1274 j = i;
1275 if (pool->stream_enc[i]->id ==
1276 link->link_enc->preferred_engine)
1277 return pool->stream_enc[i];
1278 }
1279 }
1280
1281 /*
1282 * below can happen in cases when stream encoder is acquired:
1283 * 1) for second MST display in chain, so preferred engine already
1284 * acquired;
1285 * 2) for another link, which preferred engine already acquired by any
1286 * MST configuration.
1287 *
1288 * If signal is of DP type and preferred engine not found, return last available
1289 *
1290 * TODO - This is just a patch up and a generic solution is
1291 * required for non DP connectors.
1292 */
1293
1294 if (j >= 0 && dc_is_dp_signal(stream->signal))
1295 return pool->stream_enc[j];
1296
1297 return NULL;
1298 }
1299
1300 static struct audio *find_first_free_audio(
1301 struct resource_context *res_ctx,
1302 const struct resource_pool *pool)
1303 {
1304 int i;
1305 for (i = 0; i < pool->audio_count; i++) {
1306 if (res_ctx->is_audio_acquired[i] == false) {
1307 return pool->audios[i];
1308 }
1309 }
1310
1311 return 0;
1312 }
1313
1314 static void update_stream_signal(struct dc_stream_state *stream)
1315 {
1316 if (stream->output_signal == SIGNAL_TYPE_NONE) {
1317 struct dc_sink *dc_sink = stream->sink;
1318
1319 if (dc_sink->sink_signal == SIGNAL_TYPE_NONE)
1320 stream->signal = stream->sink->link->connector_signal;
1321 else
1322 stream->signal = dc_sink->sink_signal;
1323 } else {
1324 stream->signal = stream->output_signal;
1325 }
1326
1327 if (dc_is_dvi_signal(stream->signal)) {
1328 if (stream->timing.pix_clk_khz > TMDS_MAX_PIXEL_CLOCK_IN_KHZ_UPMOST &&
1329 stream->sink->sink_signal != SIGNAL_TYPE_DVI_SINGLE_LINK)
1330 stream->signal = SIGNAL_TYPE_DVI_DUAL_LINK;
1331 else
1332 stream->signal = SIGNAL_TYPE_DVI_SINGLE_LINK;
1333 }
1334 }
1335
1336 bool resource_is_stream_unchanged(
1337 struct validate_context *old_context, struct dc_stream_state *stream)
1338 {
1339 int i;
1340
1341 for (i = 0; i < old_context->stream_count; i++) {
1342 struct dc_stream_state *old_stream = old_context->streams[i];
1343
1344 if (are_stream_backends_same(old_stream, stream))
1345 return true;
1346 }
1347
1348 return false;
1349 }
1350
1351 static void copy_pipe_ctx(
1352 const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1353 {
1354 struct dc_plane_state *plane_state = to_pipe_ctx->plane_state;
1355 struct dc_stream_state *stream = to_pipe_ctx->stream;
1356
1357 *to_pipe_ctx = *from_pipe_ctx;
1358 to_pipe_ctx->stream = stream;
1359 if (plane_state != NULL)
1360 to_pipe_ctx->plane_state = plane_state;
1361 }
1362
1363 static struct dc_stream_state *find_pll_sharable_stream(
1364 struct dc_stream_state *stream_needs_pll,
1365 struct validate_context *context)
1366 {
1367 int i;
1368
1369 for (i = 0; i < context->stream_count; i++) {
1370 struct dc_stream_state *stream_has_pll = context->streams[i];
1371
1372 /* We are looking for non dp, non virtual stream */
1373 if (resource_are_streams_timing_synchronizable(
1374 stream_needs_pll, stream_has_pll)
1375 && !dc_is_dp_signal(stream_has_pll->signal)
1376 && stream_has_pll->sink->link->connector_signal
1377 != SIGNAL_TYPE_VIRTUAL)
1378 return stream_has_pll;
1379
1380 }
1381
1382 return NULL;
1383 }
1384
1385 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1386 {
1387 uint32_t pix_clk = timing->pix_clk_khz;
1388 uint32_t normalized_pix_clk = pix_clk;
1389
1390 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1391 pix_clk /= 2;
1392 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1393 switch (timing->display_color_depth) {
1394 case COLOR_DEPTH_888:
1395 normalized_pix_clk = pix_clk;
1396 break;
1397 case COLOR_DEPTH_101010:
1398 normalized_pix_clk = (pix_clk * 30) / 24;
1399 break;
1400 case COLOR_DEPTH_121212:
1401 normalized_pix_clk = (pix_clk * 36) / 24;
1402 break;
1403 case COLOR_DEPTH_161616:
1404 normalized_pix_clk = (pix_clk * 48) / 24;
1405 break;
1406 default:
1407 ASSERT(0);
1408 break;
1409 }
1410 }
1411 return normalized_pix_clk;
1412 }
1413
1414 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1415 {
1416 update_stream_signal(stream);
1417
1418 /* update actual pixel clock on all streams */
1419 if (dc_is_hdmi_signal(stream->signal))
1420 stream->phy_pix_clk = get_norm_pix_clk(
1421 &stream->timing);
1422 else
1423 stream->phy_pix_clk =
1424 stream->timing.pix_clk_khz;
1425 }
1426
1427 enum dc_status resource_map_pool_resources(
1428 const struct core_dc *dc,
1429 struct validate_context *context,
1430 struct validate_context *old_context)
1431 {
1432 const struct resource_pool *pool = dc->res_pool;
1433 int i, j;
1434
1435 for (i = 0; old_context && i < context->stream_count; i++) {
1436 struct dc_stream_state *stream = context->streams[i];
1437
1438 if (!resource_is_stream_unchanged(old_context, stream)) {
1439 if (stream != NULL && old_context->streams[i] != NULL) {
1440 stream->bit_depth_params =
1441 old_context->streams[i]->bit_depth_params;
1442 stream->clamping = old_context->streams[i]->clamping;
1443 continue;
1444 }
1445 }
1446
1447 /* mark resources used for stream that is already active */
1448 for (j = 0; j < pool->pipe_count; j++) {
1449 struct pipe_ctx *pipe_ctx =
1450 &context->res_ctx.pipe_ctx[j];
1451 const struct pipe_ctx *old_pipe_ctx =
1452 &old_context->res_ctx.pipe_ctx[j];
1453
1454 if (!are_stream_backends_same(old_pipe_ctx->stream, stream))
1455 continue;
1456
1457 if (old_pipe_ctx->top_pipe)
1458 continue;
1459
1460 pipe_ctx->stream = stream;
1461 copy_pipe_ctx(old_pipe_ctx, pipe_ctx);
1462
1463 /* Split pipe resource, do not acquire back end */
1464 if (!pipe_ctx->stream_enc)
1465 continue;
1466
1467 set_stream_engine_in_use(
1468 &context->res_ctx, pool,
1469 pipe_ctx->stream_enc);
1470
1471 /* Switch to dp clock source only if there is
1472 * no non dp stream that shares the same timing
1473 * with the dp stream.
1474 */
1475 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1476 !find_pll_sharable_stream(stream, context))
1477 pipe_ctx->clock_source = pool->dp_clock_source;
1478
1479 resource_reference_clock_source(
1480 &context->res_ctx, pool,
1481 pipe_ctx->clock_source);
1482
1483 set_audio_in_use(&context->res_ctx, pool,
1484 pipe_ctx->audio);
1485 }
1486 }
1487
1488 for (i = 0; i < context->stream_count; i++) {
1489 struct dc_stream_state *stream = context->streams[i];
1490 struct pipe_ctx *pipe_ctx = NULL;
1491 int pipe_idx = -1;
1492
1493 if (old_context && resource_is_stream_unchanged(old_context, stream))
1494 continue;
1495 /* acquire new resources */
1496 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1497 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1498 if (pipe_idx < 0)
1499 acquire_first_split_pipe(&context->res_ctx, pool, stream);
1500 #endif
1501 if (pipe_idx < 0)
1502 return DC_NO_CONTROLLER_RESOURCE;
1503
1504 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1505
1506 pipe_ctx->stream_enc =
1507 find_first_free_match_stream_enc_for_link(
1508 &context->res_ctx, pool, stream);
1509
1510 if (!pipe_ctx->stream_enc)
1511 return DC_NO_STREAM_ENG_RESOURCE;
1512
1513 set_stream_engine_in_use(
1514 &context->res_ctx, pool,
1515 pipe_ctx->stream_enc);
1516
1517 /* TODO: Add check if ASIC support and EDID audio */
1518 if (!stream->sink->converter_disable_audio &&
1519 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1520 stream->audio_info.mode_count) {
1521 pipe_ctx->audio = find_first_free_audio(
1522 &context->res_ctx, pool);
1523
1524 /*
1525 * Audio assigned in order first come first get.
1526 * There are asics which has number of audio
1527 * resources less then number of pipes
1528 */
1529 if (pipe_ctx->audio)
1530 set_audio_in_use(
1531 &context->res_ctx, pool,
1532 pipe_ctx->audio);
1533 }
1534
1535 context->stream_status[i].primary_otg_inst = pipe_ctx->tg->inst;
1536 }
1537
1538 return DC_OK;
1539 }
1540
1541 /* first stream in the context is used to populate the rest */
1542 void validate_guaranteed_copy_streams(
1543 struct validate_context *context,
1544 int max_streams)
1545 {
1546 int i;
1547
1548 for (i = 1; i < max_streams; i++) {
1549 context->streams[i] = context->streams[0];
1550
1551 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1552 &context->res_ctx.pipe_ctx[i]);
1553 context->res_ctx.pipe_ctx[i].stream =
1554 context->res_ctx.pipe_ctx[0].stream;
1555
1556 dc_stream_retain(context->streams[i]);
1557 context->stream_count++;
1558 }
1559 }
1560
1561 static void patch_gamut_packet_checksum(
1562 struct encoder_info_packet *gamut_packet)
1563 {
1564 /* For gamut we recalc checksum */
1565 if (gamut_packet->valid) {
1566 uint8_t chk_sum = 0;
1567 uint8_t *ptr;
1568 uint8_t i;
1569
1570 /*start of the Gamut data. */
1571 ptr = &gamut_packet->sb[3];
1572
1573 for (i = 0; i <= gamut_packet->sb[1]; i++)
1574 chk_sum += ptr[i];
1575
1576 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1577 }
1578 }
1579
1580 static void set_avi_info_frame(
1581 struct encoder_info_packet *info_packet,
1582 struct pipe_ctx *pipe_ctx)
1583 {
1584 struct dc_stream_state *stream = pipe_ctx->stream;
1585 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1586 struct info_frame info_frame = { {0} };
1587 uint32_t pixel_encoding = 0;
1588 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1589 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1590 bool itc = false;
1591 uint8_t itc_value = 0;
1592 uint8_t cn0_cn1 = 0;
1593 unsigned int cn0_cn1_value = 0;
1594 uint8_t *check_sum = NULL;
1595 uint8_t byte_index = 0;
1596 union hdmi_info_packet *hdmi_info = &info_frame.avi_info_packet.info_packet_hdmi;
1597 union display_content_support support = {0};
1598 unsigned int vic = pipe_ctx->stream->timing.vic;
1599 enum dc_timing_3d_format format;
1600
1601 color_space = pipe_ctx->stream->output_color_space;
1602 if (color_space == COLOR_SPACE_UNKNOWN)
1603 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
1604 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
1605
1606 /* Initialize header */
1607 hdmi_info->bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1608 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1609 * not be used in HDMI 2.0 (Section 10.1) */
1610 hdmi_info->bits.header.version = 2;
1611 hdmi_info->bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
1612
1613 /*
1614 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1615 * according to HDMI 2.0 spec (Section 10.1)
1616 */
1617
1618 switch (stream->timing.pixel_encoding) {
1619 case PIXEL_ENCODING_YCBCR422:
1620 pixel_encoding = 1;
1621 break;
1622
1623 case PIXEL_ENCODING_YCBCR444:
1624 pixel_encoding = 2;
1625 break;
1626 case PIXEL_ENCODING_YCBCR420:
1627 pixel_encoding = 3;
1628 break;
1629
1630 case PIXEL_ENCODING_RGB:
1631 default:
1632 pixel_encoding = 0;
1633 }
1634
1635 /* Y0_Y1_Y2 : The pixel encoding */
1636 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1637 hdmi_info->bits.Y0_Y1_Y2 = pixel_encoding;
1638
1639 /* A0 = 1 Active Format Information valid */
1640 hdmi_info->bits.A0 = ACTIVE_FORMAT_VALID;
1641
1642 /* B0, B1 = 3; Bar info data is valid */
1643 hdmi_info->bits.B0_B1 = BAR_INFO_BOTH_VALID;
1644
1645 hdmi_info->bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
1646
1647 /* S0, S1 : Underscan / Overscan */
1648 /* TODO: un-hardcode scan type */
1649 scan_type = SCANNING_TYPE_UNDERSCAN;
1650 hdmi_info->bits.S0_S1 = scan_type;
1651
1652 /* C0, C1 : Colorimetry */
1653 if (color_space == COLOR_SPACE_YCBCR709 ||
1654 color_space == COLOR_SPACE_YCBCR709_LIMITED)
1655 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU709;
1656 else if (color_space == COLOR_SPACE_YCBCR601 ||
1657 color_space == COLOR_SPACE_YCBCR601_LIMITED)
1658 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU601;
1659 else {
1660 hdmi_info->bits.C0_C1 = COLORIMETRY_NO_DATA;
1661 }
1662 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1663 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1664 color_space == COLOR_SPACE_2020_YCBCR) {
1665 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
1666 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED;
1667 } else if (color_space == COLOR_SPACE_ADOBERGB) {
1668 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
1669 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED;
1670 }
1671
1672 /* TODO: un-hardcode aspect ratio */
1673 aspect = stream->timing.aspect_ratio;
1674
1675 switch (aspect) {
1676 case ASPECT_RATIO_4_3:
1677 case ASPECT_RATIO_16_9:
1678 hdmi_info->bits.M0_M1 = aspect;
1679 break;
1680
1681 case ASPECT_RATIO_NO_DATA:
1682 case ASPECT_RATIO_64_27:
1683 case ASPECT_RATIO_256_135:
1684 default:
1685 hdmi_info->bits.M0_M1 = 0;
1686 }
1687
1688 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1689 hdmi_info->bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1690
1691 /* TODO: un-hardcode cn0_cn1 and itc */
1692
1693 cn0_cn1 = 0;
1694 cn0_cn1_value = 0;
1695
1696 itc = true;
1697 itc_value = 1;
1698
1699 support = stream->sink->edid_caps.content_support;
1700
1701 if (itc) {
1702 if (!support.bits.valid_content_type) {
1703 cn0_cn1_value = 0;
1704 } else {
1705 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
1706 if (support.bits.graphics_content == 1) {
1707 cn0_cn1_value = 0;
1708 }
1709 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
1710 if (support.bits.photo_content == 1) {
1711 cn0_cn1_value = 1;
1712 } else {
1713 cn0_cn1_value = 0;
1714 itc_value = 0;
1715 }
1716 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
1717 if (support.bits.cinema_content == 1) {
1718 cn0_cn1_value = 2;
1719 } else {
1720 cn0_cn1_value = 0;
1721 itc_value = 0;
1722 }
1723 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
1724 if (support.bits.game_content == 1) {
1725 cn0_cn1_value = 3;
1726 } else {
1727 cn0_cn1_value = 0;
1728 itc_value = 0;
1729 }
1730 }
1731 }
1732 hdmi_info->bits.CN0_CN1 = cn0_cn1_value;
1733 hdmi_info->bits.ITC = itc_value;
1734 }
1735
1736 /* TODO : We should handle YCC quantization */
1737 /* but we do not have matrix calculation */
1738 if (stream->sink->edid_caps.qs_bit == 1 &&
1739 stream->sink->edid_caps.qy_bit == 1) {
1740 if (color_space == COLOR_SPACE_SRGB ||
1741 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
1742 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE;
1743 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
1744 } else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
1745 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
1746 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE;
1747 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
1748 } else {
1749 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
1750 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
1751 }
1752 } else {
1753 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
1754 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
1755 }
1756
1757 ///VIC
1758 format = stream->timing.timing_3d_format;
1759 /*todo, add 3DStereo support*/
1760 if (format != TIMING_3D_FORMAT_NONE) {
1761 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
1762 switch (pipe_ctx->stream->timing.hdmi_vic) {
1763 case 1:
1764 vic = 95;
1765 break;
1766 case 2:
1767 vic = 94;
1768 break;
1769 case 3:
1770 vic = 93;
1771 break;
1772 case 4:
1773 vic = 98;
1774 break;
1775 default:
1776 break;
1777 }
1778 }
1779 hdmi_info->bits.VIC0_VIC7 = vic;
1780
1781 /* pixel repetition
1782 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
1783 * repetition start from 1 */
1784 hdmi_info->bits.PR0_PR3 = 0;
1785
1786 /* Bar Info
1787 * barTop: Line Number of End of Top Bar.
1788 * barBottom: Line Number of Start of Bottom Bar.
1789 * barLeft: Pixel Number of End of Left Bar.
1790 * barRight: Pixel Number of Start of Right Bar. */
1791 hdmi_info->bits.bar_top = stream->timing.v_border_top;
1792 hdmi_info->bits.bar_bottom = (stream->timing.v_total
1793 - stream->timing.v_border_bottom + 1);
1794 hdmi_info->bits.bar_left = stream->timing.h_border_left;
1795 hdmi_info->bits.bar_right = (stream->timing.h_total
1796 - stream->timing.h_border_right + 1);
1797
1798 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
1799 check_sum = &info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
1800
1801 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
1802
1803 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
1804 *check_sum += hdmi_info->packet_raw_data.sb[byte_index];
1805
1806 /* one byte complement */
1807 *check_sum = (uint8_t) (0x100 - *check_sum);
1808
1809 /* Store in hw_path_mode */
1810 info_packet->hb0 = hdmi_info->packet_raw_data.hb0;
1811 info_packet->hb1 = hdmi_info->packet_raw_data.hb1;
1812 info_packet->hb2 = hdmi_info->packet_raw_data.hb2;
1813
1814 for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
1815 info_packet_hdmi.packet_raw_data.sb); byte_index++)
1816 info_packet->sb[byte_index] = info_frame.avi_info_packet.
1817 info_packet_hdmi.packet_raw_data.sb[byte_index];
1818
1819 info_packet->valid = true;
1820 }
1821
1822 static void set_vendor_info_packet(
1823 struct encoder_info_packet *info_packet,
1824 struct dc_stream_state *stream)
1825 {
1826 uint32_t length = 0;
1827 bool hdmi_vic_mode = false;
1828 uint8_t checksum = 0;
1829 uint32_t i = 0;
1830 enum dc_timing_3d_format format;
1831 // Can be different depending on packet content /*todo*/
1832 // unsigned int length = pPathMode->dolbyVision ? 24 : 5;
1833
1834 info_packet->valid = false;
1835
1836 format = stream->timing.timing_3d_format;
1837 if (stream->view_format == VIEW_3D_FORMAT_NONE)
1838 format = TIMING_3D_FORMAT_NONE;
1839
1840 /* Can be different depending on packet content */
1841 length = 5;
1842
1843 if (stream->timing.hdmi_vic != 0
1844 && stream->timing.h_total >= 3840
1845 && stream->timing.v_total >= 2160)
1846 hdmi_vic_mode = true;
1847
1848 /* According to HDMI 1.4a CTS, VSIF should be sent
1849 * for both 3D stereo and HDMI VIC modes.
1850 * For all other modes, there is no VSIF sent. */
1851
1852 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
1853 return;
1854
1855 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
1856 info_packet->sb[1] = 0x03;
1857 info_packet->sb[2] = 0x0C;
1858 info_packet->sb[3] = 0x00;
1859
1860 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
1861 * The value for HDMI_Video_Format are:
1862 * 0x0 (0b000) - No additional HDMI video format is presented in this
1863 * packet
1864 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
1865 * parameter follows
1866 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
1867 * potentially 3D_Ext_Data follows
1868 * 0x3..0x7 (0b011..0b111) - reserved for future use */
1869 if (format != TIMING_3D_FORMAT_NONE)
1870 info_packet->sb[4] = (2 << 5);
1871 else if (hdmi_vic_mode)
1872 info_packet->sb[4] = (1 << 5);
1873
1874 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
1875 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
1876 * The value for 3D_Structure are:
1877 * 0x0 - Frame Packing
1878 * 0x1 - Field Alternative
1879 * 0x2 - Line Alternative
1880 * 0x3 - Side-by-Side (full)
1881 * 0x4 - L + depth
1882 * 0x5 - L + depth + graphics + graphics-depth
1883 * 0x6 - Top-and-Bottom
1884 * 0x7 - Reserved for future use
1885 * 0x8 - Side-by-Side (Half)
1886 * 0x9..0xE - Reserved for future use
1887 * 0xF - Not used */
1888 switch (format) {
1889 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
1890 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
1891 info_packet->sb[5] = (0x0 << 4);
1892 break;
1893
1894 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
1895 case TIMING_3D_FORMAT_SBS_SW_PACKED:
1896 info_packet->sb[5] = (0x8 << 4);
1897 length = 6;
1898 break;
1899
1900 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
1901 case TIMING_3D_FORMAT_TB_SW_PACKED:
1902 info_packet->sb[5] = (0x6 << 4);
1903 break;
1904
1905 default:
1906 break;
1907 }
1908
1909 /*PB5: If PB4 is set to 0x1 (extended resolution format)
1910 * fill PB5 with the correct HDMI VIC code */
1911 if (hdmi_vic_mode)
1912 info_packet->sb[5] = stream->timing.hdmi_vic;
1913
1914 /* Header */
1915 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
1916 info_packet->hb1 = 0x01; /* Version */
1917
1918 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
1919 info_packet->hb2 = (uint8_t) (length);
1920
1921 /* Calculate checksum */
1922 checksum = 0;
1923 checksum += info_packet->hb0;
1924 checksum += info_packet->hb1;
1925 checksum += info_packet->hb2;
1926
1927 for (i = 1; i <= length; i++)
1928 checksum += info_packet->sb[i];
1929
1930 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
1931
1932 info_packet->valid = true;
1933 }
1934
1935 static void set_spd_info_packet(
1936 struct encoder_info_packet *info_packet,
1937 struct dc_stream_state *stream)
1938 {
1939 /* SPD info packet for FreeSync */
1940
1941 unsigned char checksum = 0;
1942 unsigned int idx, payload_size = 0;
1943
1944 /* Check if Freesync is supported. Return if false. If true,
1945 * set the corresponding bit in the info packet
1946 */
1947 if (stream->freesync_ctx.supported == false)
1948 return;
1949
1950 if (dc_is_hdmi_signal(stream->signal)) {
1951
1952 /* HEADER */
1953
1954 /* HB0 = Packet Type = 0x83 (Source Product
1955 * Descriptor InfoFrame)
1956 */
1957 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
1958
1959 /* HB1 = Version = 0x01 */
1960 info_packet->hb1 = 0x01;
1961
1962 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
1963 info_packet->hb2 = 0x08;
1964
1965 payload_size = 0x08;
1966
1967 } else if (dc_is_dp_signal(stream->signal)) {
1968
1969 /* HEADER */
1970
1971 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
1972 * when used to associate audio related info packets
1973 */
1974 info_packet->hb0 = 0x00;
1975
1976 /* HB1 = Packet Type = 0x83 (Source Product
1977 * Descriptor InfoFrame)
1978 */
1979 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
1980
1981 /* HB2 = [Bits 7:0 = Least significant eight bits -
1982 * For INFOFRAME, the value must be 1Bh]
1983 */
1984 info_packet->hb2 = 0x1B;
1985
1986 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
1987 * [Bits 1:0 = Most significant two bits = 0x00]
1988 */
1989 info_packet->hb3 = 0x04;
1990
1991 payload_size = 0x1B;
1992 }
1993
1994 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
1995 info_packet->sb[1] = 0x1A;
1996
1997 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
1998 info_packet->sb[2] = 0x00;
1999
2000 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2001 info_packet->sb[3] = 0x00;
2002
2003 /* PB4 = Reserved */
2004 info_packet->sb[4] = 0x00;
2005
2006 /* PB5 = Reserved */
2007 info_packet->sb[5] = 0x00;
2008
2009 /* PB6 = [Bits 7:3 = Reserved] */
2010 info_packet->sb[6] = 0x00;
2011
2012 if (stream->freesync_ctx.supported == true)
2013 /* PB6 = [Bit 0 = FreeSync Supported] */
2014 info_packet->sb[6] |= 0x01;
2015
2016 if (stream->freesync_ctx.enabled == true)
2017 /* PB6 = [Bit 1 = FreeSync Enabled] */
2018 info_packet->sb[6] |= 0x02;
2019
2020 if (stream->freesync_ctx.active == true)
2021 /* PB6 = [Bit 2 = FreeSync Active] */
2022 info_packet->sb[6] |= 0x04;
2023
2024 /* PB7 = FreeSync Minimum refresh rate (Hz) */
2025 info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2026 min_refresh_in_micro_hz / 1000000);
2027
2028 /* PB8 = FreeSync Maximum refresh rate (Hz)
2029 *
2030 * Note: We do not use the maximum capable refresh rate
2031 * of the panel, because we should never go above the field
2032 * rate of the mode timing set.
2033 */
2034 info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2035 nominal_refresh_in_micro_hz / 1000000);
2036
2037 /* PB9 - PB27 = Reserved */
2038 for (idx = 9; idx <= 27; idx++)
2039 info_packet->sb[idx] = 0x00;
2040
2041 /* Calculate checksum */
2042 checksum += info_packet->hb0;
2043 checksum += info_packet->hb1;
2044 checksum += info_packet->hb2;
2045 checksum += info_packet->hb3;
2046
2047 for (idx = 1; idx <= payload_size; idx++)
2048 checksum += info_packet->sb[idx];
2049
2050 /* PB0 = Checksum (one byte complement) */
2051 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2052
2053 info_packet->valid = true;
2054 }
2055
2056 static void set_hdr_static_info_packet(
2057 struct encoder_info_packet *info_packet,
2058 struct dc_plane_state *plane_state,
2059 struct dc_stream_state *stream)
2060 {
2061 uint16_t i = 0;
2062 enum signal_type signal = stream->signal;
2063 struct dc_hdr_static_metadata hdr_metadata;
2064 uint32_t data;
2065
2066 if (!plane_state)
2067 return;
2068
2069 hdr_metadata = plane_state->hdr_static_ctx;
2070
2071 if (!hdr_metadata.hdr_supported)
2072 return;
2073
2074 if (dc_is_hdmi_signal(signal)) {
2075 info_packet->valid = true;
2076
2077 info_packet->hb0 = 0x87;
2078 info_packet->hb1 = 0x01;
2079 info_packet->hb2 = 0x1A;
2080 i = 1;
2081 } else if (dc_is_dp_signal(signal)) {
2082 info_packet->valid = true;
2083
2084 info_packet->hb0 = 0x00;
2085 info_packet->hb1 = 0x87;
2086 info_packet->hb2 = 0x1D;
2087 info_packet->hb3 = (0x13 << 2);
2088 i = 2;
2089 }
2090
2091 data = hdr_metadata.is_hdr;
2092 info_packet->sb[i++] = data ? 0x02 : 0x00;
2093 info_packet->sb[i++] = 0x00;
2094
2095 data = hdr_metadata.chromaticity_green_x / 2;
2096 info_packet->sb[i++] = data & 0xFF;
2097 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2098
2099 data = hdr_metadata.chromaticity_green_y / 2;
2100 info_packet->sb[i++] = data & 0xFF;
2101 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2102
2103 data = hdr_metadata.chromaticity_blue_x / 2;
2104 info_packet->sb[i++] = data & 0xFF;
2105 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2106
2107 data = hdr_metadata.chromaticity_blue_y / 2;
2108 info_packet->sb[i++] = data & 0xFF;
2109 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2110
2111 data = hdr_metadata.chromaticity_red_x / 2;
2112 info_packet->sb[i++] = data & 0xFF;
2113 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2114
2115 data = hdr_metadata.chromaticity_red_y / 2;
2116 info_packet->sb[i++] = data & 0xFF;
2117 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2118
2119 data = hdr_metadata.chromaticity_white_point_x / 2;
2120 info_packet->sb[i++] = data & 0xFF;
2121 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2122
2123 data = hdr_metadata.chromaticity_white_point_y / 2;
2124 info_packet->sb[i++] = data & 0xFF;
2125 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2126
2127 data = hdr_metadata.max_luminance;
2128 info_packet->sb[i++] = data & 0xFF;
2129 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2130
2131 data = hdr_metadata.min_luminance;
2132 info_packet->sb[i++] = data & 0xFF;
2133 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2134
2135 data = hdr_metadata.maximum_content_light_level;
2136 info_packet->sb[i++] = data & 0xFF;
2137 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2138
2139 data = hdr_metadata.maximum_frame_average_light_level;
2140 info_packet->sb[i++] = data & 0xFF;
2141 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2142
2143 if (dc_is_hdmi_signal(signal)) {
2144 uint32_t checksum = 0;
2145
2146 checksum += info_packet->hb0;
2147 checksum += info_packet->hb1;
2148 checksum += info_packet->hb2;
2149
2150 for (i = 1; i <= info_packet->hb2; i++)
2151 checksum += info_packet->sb[i];
2152
2153 info_packet->sb[0] = 0x100 - checksum;
2154 } else if (dc_is_dp_signal(signal)) {
2155 info_packet->sb[0] = 0x01;
2156 info_packet->sb[1] = 0x1A;
2157 }
2158 }
2159
2160 static void set_vsc_info_packet(
2161 struct encoder_info_packet *info_packet,
2162 struct dc_stream_state *stream)
2163 {
2164 unsigned int vscPacketRevision = 0;
2165 unsigned int i;
2166
2167 if (stream->sink->link->psr_enabled) {
2168 vscPacketRevision = 2;
2169 }
2170
2171 /* VSC packet not needed based on the features
2172 * supported by this DP display
2173 */
2174 if (vscPacketRevision == 0)
2175 return;
2176
2177 if (vscPacketRevision == 0x2) {
2178 /* Secondary-data Packet ID = 0*/
2179 info_packet->hb0 = 0x00;
2180 /* 07h - Packet Type Value indicating Video
2181 * Stream Configuration packet
2182 */
2183 info_packet->hb1 = 0x07;
2184 /* 02h = VSC SDP supporting 3D stereo and PSR
2185 * (applies to eDP v1.3 or higher).
2186 */
2187 info_packet->hb2 = 0x02;
2188 /* 08h = VSC packet supporting 3D stereo + PSR
2189 * (HB2 = 02h).
2190 */
2191 info_packet->hb3 = 0x08;
2192
2193 for (i = 0; i < 28; i++)
2194 info_packet->sb[i] = 0;
2195
2196 info_packet->valid = true;
2197 }
2198
2199 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2200 }
2201
2202 void dc_resource_validate_ctx_destruct(struct validate_context *context)
2203 {
2204 int i, j;
2205
2206 for (i = 0; i < context->stream_count; i++) {
2207 for (j = 0; j < context->stream_status[i].plane_count; j++)
2208 dc_plane_state_release(
2209 context->stream_status[i].plane_states[j]);
2210
2211 context->stream_status[i].plane_count = 0;
2212 dc_stream_release(context->streams[i]);
2213 context->streams[i] = NULL;
2214 }
2215 }
2216
2217 /*
2218 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2219 * by the src_ctx
2220 */
2221 void dc_resource_validate_ctx_copy_construct(
2222 const struct validate_context *src_ctx,
2223 struct validate_context *dst_ctx)
2224 {
2225 int i, j;
2226 int ref_count = dst_ctx->ref_count;
2227
2228 *dst_ctx = *src_ctx;
2229
2230 for (i = 0; i < MAX_PIPES; i++) {
2231 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2232
2233 if (cur_pipe->top_pipe)
2234 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2235
2236 if (cur_pipe->bottom_pipe)
2237 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2238
2239 }
2240
2241 for (i = 0; i < dst_ctx->stream_count; i++) {
2242 dc_stream_retain(dst_ctx->streams[i]);
2243 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2244 dc_plane_state_retain(
2245 dst_ctx->stream_status[i].plane_states[j]);
2246 }
2247
2248 /* context refcount should not be overridden */
2249 dst_ctx->ref_count = ref_count;
2250
2251 }
2252
2253 struct clock_source *dc_resource_find_first_free_pll(
2254 struct resource_context *res_ctx,
2255 const struct resource_pool *pool)
2256 {
2257 int i;
2258
2259 for (i = 0; i < pool->clk_src_count; ++i) {
2260 if (res_ctx->clock_source_ref_count[i] == 0)
2261 return pool->clock_sources[i];
2262 }
2263
2264 return NULL;
2265 }
2266
2267 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2268 {
2269 enum signal_type signal = SIGNAL_TYPE_NONE;
2270 struct encoder_info_frame *info = &pipe_ctx->encoder_info_frame;
2271
2272 /* default all packets to invalid */
2273 info->avi.valid = false;
2274 info->gamut.valid = false;
2275 info->vendor.valid = false;
2276 info->spd.valid = false;
2277 info->hdrsmd.valid = false;
2278 info->vsc.valid = false;
2279
2280 signal = pipe_ctx->stream->signal;
2281
2282 /* HDMi and DP have different info packets*/
2283 if (dc_is_hdmi_signal(signal)) {
2284 set_avi_info_frame(&info->avi, pipe_ctx);
2285
2286 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2287
2288 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2289
2290 set_hdr_static_info_packet(&info->hdrsmd,
2291 pipe_ctx->plane_state, pipe_ctx->stream);
2292
2293 } else if (dc_is_dp_signal(signal)) {
2294 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2295
2296 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2297
2298 set_hdr_static_info_packet(&info->hdrsmd,
2299 pipe_ctx->plane_state, pipe_ctx->stream);
2300 }
2301
2302 patch_gamut_packet_checksum(&info->gamut);
2303 }
2304
2305 enum dc_status resource_map_clock_resources(
2306 const struct core_dc *dc,
2307 struct validate_context *context,
2308 struct validate_context *old_context)
2309 {
2310 int i, j;
2311 const struct resource_pool *pool = dc->res_pool;
2312
2313 /* acquire new resources */
2314 for (i = 0; i < context->stream_count; i++) {
2315 struct dc_stream_state *stream = context->streams[i];
2316
2317 if (old_context && resource_is_stream_unchanged(old_context, stream))
2318 continue;
2319
2320 for (j = 0; j < MAX_PIPES; j++) {
2321 struct pipe_ctx *pipe_ctx =
2322 &context->res_ctx.pipe_ctx[j];
2323
2324 if (context->res_ctx.pipe_ctx[j].stream != stream)
2325 continue;
2326
2327 if (dc_is_dp_signal(pipe_ctx->stream->signal)
2328 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2329 pipe_ctx->clock_source = pool->dp_clock_source;
2330 else {
2331 pipe_ctx->clock_source = NULL;
2332
2333 if (!dc->public.config.disable_disp_pll_sharing)
2334 resource_find_used_clk_src_for_sharing(
2335 &context->res_ctx,
2336 pipe_ctx);
2337
2338 if (pipe_ctx->clock_source == NULL)
2339 pipe_ctx->clock_source =
2340 dc_resource_find_first_free_pll(
2341 &context->res_ctx,
2342 pool);
2343 }
2344
2345 if (pipe_ctx->clock_source == NULL)
2346 return DC_NO_CLOCK_SOURCE_RESOURCE;
2347
2348 resource_reference_clock_source(
2349 &context->res_ctx, pool,
2350 pipe_ctx->clock_source);
2351
2352 /* only one cs per stream regardless of mpo */
2353 break;
2354 }
2355 }
2356
2357 return DC_OK;
2358 }
2359
2360 /*
2361 * Note: We need to disable output if clock sources change,
2362 * since bios does optimization and doesn't apply if changing
2363 * PHY when not already disabled.
2364 */
2365 bool pipe_need_reprogram(
2366 struct pipe_ctx *pipe_ctx_old,
2367 struct pipe_ctx *pipe_ctx)
2368 {
2369 if (!pipe_ctx_old->stream)
2370 return false;
2371
2372 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2373 return true;
2374
2375 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2376 return true;
2377
2378 if (pipe_ctx_old->audio != pipe_ctx->audio)
2379 return true;
2380
2381 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2382 && pipe_ctx_old->stream != pipe_ctx->stream)
2383 return true;
2384
2385 if (pipe_ctx_old->stream_enc != pipe_ctx->stream_enc)
2386 return true;
2387
2388 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2389 return true;
2390
2391
2392 return false;
2393 }
2394
2395 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2396 struct bit_depth_reduction_params *fmt_bit_depth)
2397 {
2398 enum dc_dither_option option = stream->dither_option;
2399 enum dc_pixel_encoding pixel_encoding =
2400 stream->timing.pixel_encoding;
2401
2402 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2403
2404 if (option == DITHER_OPTION_DISABLE)
2405 return;
2406
2407 if (option == DITHER_OPTION_TRUN6) {
2408 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2409 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2410 } else if (option == DITHER_OPTION_TRUN8 ||
2411 option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2412 option == DITHER_OPTION_TRUN8_FM6) {
2413 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2414 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2415 } else if (option == DITHER_OPTION_TRUN10 ||
2416 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2417 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2418 option == DITHER_OPTION_TRUN10_FM8 ||
2419 option == DITHER_OPTION_TRUN10_FM6 ||
2420 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2421 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2422 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2423 }
2424
2425 /* special case - Formatter can only reduce by 4 bits at most.
2426 * When reducing from 12 to 6 bits,
2427 * HW recommends we use trunc with round mode
2428 * (if we did nothing, trunc to 10 bits would be used)
2429 * note that any 12->10 bit reduction is ignored prior to DCE8,
2430 * as the input was 10 bits.
2431 */
2432 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2433 option == DITHER_OPTION_SPATIAL6 ||
2434 option == DITHER_OPTION_FM6) {
2435 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2436 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2437 fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2438 }
2439
2440 /* spatial dither
2441 * note that spatial modes 1-3 are never used
2442 */
2443 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2444 option == DITHER_OPTION_SPATIAL6 ||
2445 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2446 option == DITHER_OPTION_TRUN8_SPATIAL6) {
2447 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2448 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2449 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2450 fmt_bit_depth->flags.RGB_RANDOM =
2451 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2452 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM ||
2453 option == DITHER_OPTION_SPATIAL8 ||
2454 option == DITHER_OPTION_SPATIAL8_FM6 ||
2455 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2456 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2457 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2458 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2459 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2460 fmt_bit_depth->flags.RGB_RANDOM =
2461 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2462 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2463 option == DITHER_OPTION_SPATIAL10 ||
2464 option == DITHER_OPTION_SPATIAL10_FM8 ||
2465 option == DITHER_OPTION_SPATIAL10_FM6) {
2466 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2467 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2468 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2469 fmt_bit_depth->flags.RGB_RANDOM =
2470 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2471 }
2472
2473 if (option == DITHER_OPTION_SPATIAL6 ||
2474 option == DITHER_OPTION_SPATIAL8 ||
2475 option == DITHER_OPTION_SPATIAL10) {
2476 fmt_bit_depth->flags.FRAME_RANDOM = 0;
2477 } else {
2478 fmt_bit_depth->flags.FRAME_RANDOM = 1;
2479 }
2480
2481 //////////////////////
2482 //// temporal dither
2483 //////////////////////
2484 if (option == DITHER_OPTION_FM6 ||
2485 option == DITHER_OPTION_SPATIAL8_FM6 ||
2486 option == DITHER_OPTION_SPATIAL10_FM6 ||
2487 option == DITHER_OPTION_TRUN10_FM6 ||
2488 option == DITHER_OPTION_TRUN8_FM6 ||
2489 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2490 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2491 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2492 } else if (option == DITHER_OPTION_FM8 ||
2493 option == DITHER_OPTION_SPATIAL10_FM8 ||
2494 option == DITHER_OPTION_TRUN10_FM8) {
2495 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2496 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2497 } else if (option == DITHER_OPTION_FM10) {
2498 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2499 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2500 }
2501
2502 fmt_bit_depth->pixel_encoding = pixel_encoding;
2503 }
2504
2505 bool dc_validate_stream(const struct dc *dc, struct dc_stream_state *stream)
2506 {
2507 struct core_dc *core_dc = DC_TO_CORE(dc);
2508 struct dc_context *dc_ctx = core_dc->ctx;
2509 struct dc_link *link = stream->sink->link;
2510 struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2511 enum dc_status res = DC_OK;
2512
2513 calculate_phy_pix_clks(stream);
2514
2515 if (!tg->funcs->validate_timing(tg, &stream->timing))
2516 res = DC_FAIL_CONTROLLER_VALIDATE;
2517
2518 if (res == DC_OK)
2519 if (!link->link_enc->funcs->validate_output_with_stream(
2520 link->link_enc, stream))
2521 res = DC_FAIL_ENC_VALIDATE;
2522
2523 /* TODO: validate audio ASIC caps, encoder */
2524
2525 if (res == DC_OK)
2526 res = dc_link_validate_mode_timing(stream,
2527 link,
2528 &stream->timing);
2529
2530 if (res != DC_OK)
2531 DC_ERROR("Failed validation for stream %p, err:%d, !\n",
2532 stream, res);
2533
2534 return res == DC_OK;
2535 }
2536
2537 bool dc_validate_plane(const struct dc *dc, const struct dc_plane_state *plane_state)
2538 {
2539 struct core_dc *core_dc = DC_TO_CORE(dc);
2540
2541 /* TODO For now validates pixel format only */
2542 if (core_dc->res_pool->funcs->validate_plane)
2543 return core_dc->res_pool->funcs->validate_plane(plane_state) == DC_OK;
2544
2545 return true;
2546 }