]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - drivers/gpu/drm/amd/display/dc/core/dc_resource.c
drm/amd/display: drop min/max wrappers
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / amd / display / dc / core / dc_resource.c
CommitLineData
4562236b
HW
1/*
2* Copyright 2012-15 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25#include "dm_services.h"
26
27#include "resource.h"
28#include "include/irq_service_interface.h"
29#include "link_encoder.h"
30#include "stream_encoder.h"
31#include "opp.h"
32#include "timing_generator.h"
33#include "transform.h"
34#include "set_mode_types.h"
4562236b
HW
35#include "virtual/virtual_stream_encoder.h"
36
37#include "dce80/dce80_resource.h"
38#include "dce100/dce100_resource.h"
39#include "dce110/dce110_resource.h"
40#include "dce112/dce112_resource.h"
41
42enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
43{
44 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
45 switch (asic_id.chip_family) {
46
47 case FAMILY_CI:
48 case FAMILY_KV:
49 dc_version = DCE_VERSION_8_0;
50 break;
51 case FAMILY_CZ:
52 dc_version = DCE_VERSION_11_0;
53 break;
54
55 case FAMILY_VI:
56 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
57 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
58 dc_version = DCE_VERSION_10_0;
59 break;
60 }
61 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
b264d345
JL
62 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
63 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
4562236b
HW
64 dc_version = DCE_VERSION_11_2;
65 }
66 break;
67 default:
68 dc_version = DCE_VERSION_UNKNOWN;
69 break;
70 }
71 return dc_version;
72}
73
74struct resource_pool *dc_create_resource_pool(
75 struct core_dc *dc,
76 int num_virtual_links,
77 enum dce_version dc_version,
78 struct hw_asic_id asic_id)
79{
80
81 switch (dc_version) {
82 case DCE_VERSION_8_0:
83 return dce80_create_resource_pool(
84 num_virtual_links, dc);
85 case DCE_VERSION_10_0:
86 return dce100_create_resource_pool(
87 num_virtual_links, dc);
88 case DCE_VERSION_11_0:
89 return dce110_create_resource_pool(
90 num_virtual_links, dc, asic_id);
91 case DCE_VERSION_11_2:
92 return dce112_create_resource_pool(
93 num_virtual_links, dc);
94 default:
95 break;
96 }
97
98 return false;
99}
100
101void dc_destroy_resource_pool(struct core_dc *dc)
102{
103 if (dc) {
104 if (dc->res_pool)
105 dc->res_pool->funcs->destroy(&dc->res_pool);
106
107 if (dc->hwseq)
108 dm_free(dc->hwseq);
109 }
110}
111
112static void update_num_audio(
113 const struct resource_straps *straps,
114 unsigned int *num_audio,
115 struct audio_support *aud_support)
116{
117 if (straps->hdmi_disable == 0) {
118 aud_support->hdmi_audio_native = true;
119 aud_support->hdmi_audio_on_dongle = true;
120 aud_support->dp_audio = true;
121 } else {
122 if (straps->dc_pinstraps_audio & 0x2) {
123 aud_support->hdmi_audio_on_dongle = true;
124 aud_support->dp_audio = true;
125 } else {
126 aud_support->dp_audio = true;
127 }
128 }
129
130 switch (straps->audio_stream_number) {
131 case 0: /* multi streams supported */
132 break;
133 case 1: /* multi streams not supported */
134 *num_audio = 1;
135 break;
136 default:
137 DC_ERR("DC: unexpected audio fuse!\n");
138 };
139}
140
141bool resource_construct(
142 unsigned int num_virtual_links,
143 struct core_dc *dc,
144 struct resource_pool *pool,
145 const struct resource_create_funcs *create_funcs)
146{
147 struct dc_context *ctx = dc->ctx;
148 const struct resource_caps *caps = pool->res_cap;
149 int i;
150 unsigned int num_audio = caps->num_audio;
151 struct resource_straps straps = {0};
152
153 if (create_funcs->read_dce_straps)
154 create_funcs->read_dce_straps(dc->ctx, &straps);
155
156 pool->audio_count = 0;
157 if (create_funcs->create_audio) {
158 /* find the total number of streams available via the
159 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
160 * registers (one for each pin) starting from pin 1
161 * up to the max number of audio pins.
162 * We stop on the first pin where
163 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
164 */
165 update_num_audio(&straps, &num_audio, &pool->audio_support);
166 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
167 struct audio *aud = create_funcs->create_audio(ctx, i);
168
169 if (aud == NULL) {
170 DC_ERR("DC: failed to create audio!\n");
171 return false;
172 }
173
174 if (!aud->funcs->endpoint_valid(aud)) {
175 aud->funcs->destroy(&aud);
176 break;
177 }
178
179 pool->audios[i] = aud;
180 pool->audio_count++;
181 }
182 }
183
184 pool->stream_enc_count = 0;
185 if (create_funcs->create_stream_encoder) {
186 for (i = 0; i < caps->num_stream_encoder; i++) {
187 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
188 if (pool->stream_enc[i] == NULL)
189 DC_ERR("DC: failed to create stream_encoder!\n");
190 pool->stream_enc_count++;
191 }
192 }
193
194 for (i = 0; i < num_virtual_links; i++) {
195 pool->stream_enc[pool->stream_enc_count] =
196 virtual_stream_encoder_create(
197 ctx, ctx->dc_bios);
198 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
199 DC_ERR("DC: failed to create stream_encoder!\n");
200 return false;
201 }
202 pool->stream_enc_count++;
203 }
204
205 dc->hwseq = create_funcs->create_hwseq(ctx);
206
207 return true;
208}
209
210
211void resource_unreference_clock_source(
212 struct resource_context *res_ctx,
8c737fcc 213 struct clock_source **clock_source)
4562236b
HW
214{
215 int i;
216 for (i = 0; i < res_ctx->pool->clk_src_count; i++) {
8c737fcc 217 if (res_ctx->pool->clock_sources[i] != *clock_source)
4562236b
HW
218 continue;
219
220 res_ctx->clock_source_ref_count[i]--;
221
222 if (res_ctx->clock_source_ref_count[i] == 0)
8c737fcc 223 (*clock_source)->funcs->cs_power_down(*clock_source);
4562236b
HW
224
225 break;
226 }
227
8c737fcc 228 if (res_ctx->pool->dp_clock_source == *clock_source) {
4562236b
HW
229 res_ctx->dp_clock_source_ref_count--;
230
231 if (res_ctx->dp_clock_source_ref_count == 0)
8c737fcc 232 (*clock_source)->funcs->cs_power_down(*clock_source);
4562236b 233 }
8c737fcc 234 *clock_source = NULL;
4562236b
HW
235}
236
237void resource_reference_clock_source(
238 struct resource_context *res_ctx,
239 struct clock_source *clock_source)
240{
241 int i;
242 for (i = 0; i < res_ctx->pool->clk_src_count; i++) {
243 if (res_ctx->pool->clock_sources[i] != clock_source)
244 continue;
245
246 res_ctx->clock_source_ref_count[i]++;
247 break;
248 }
249
250 if (res_ctx->pool->dp_clock_source == clock_source)
251 res_ctx->dp_clock_source_ref_count++;
252}
253
254bool resource_are_streams_timing_synchronizable(
255 const struct core_stream *stream1,
256 const struct core_stream *stream2)
257{
258 if (stream1->public.timing.h_total != stream2->public.timing.h_total)
259 return false;
260
261 if (stream1->public.timing.v_total != stream2->public.timing.v_total)
262 return false;
263
264 if (stream1->public.timing.h_addressable
265 != stream2->public.timing.h_addressable)
266 return false;
267
268 if (stream1->public.timing.v_addressable
269 != stream2->public.timing.v_addressable)
270 return false;
271
272 if (stream1->public.timing.pix_clk_khz
273 != stream2->public.timing.pix_clk_khz)
274 return false;
275
276 if (stream1->phy_pix_clk != stream2->phy_pix_clk
277 && !dc_is_dp_signal(stream1->signal)
278 && !dc_is_dp_signal(stream2->signal))
279 return false;
280
281 return true;
282}
283
284static bool is_sharable_clk_src(
285 const struct pipe_ctx *pipe_with_clk_src,
286 const struct pipe_ctx *pipe)
287{
288 if (pipe_with_clk_src->clock_source == NULL)
289 return false;
290
291 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
292 return false;
293
294 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
295 return false;
296
297 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
298 && dc_is_dvi_signal(pipe->stream->signal))
299 return false;
300
301 if (dc_is_hdmi_signal(pipe->stream->signal)
302 && dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
303 return false;
304
305 if (!resource_are_streams_timing_synchronizable(
306 pipe_with_clk_src->stream, pipe->stream))
307 return false;
308
309 return true;
310}
311
312struct clock_source *resource_find_used_clk_src_for_sharing(
313 struct resource_context *res_ctx,
314 struct pipe_ctx *pipe_ctx)
315{
316 int i;
317
318 for (i = 0; i < MAX_PIPES; i++) {
319 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
320 return res_ctx->pipe_ctx[i].clock_source;
321 }
322
323 return NULL;
324}
325
326static enum pixel_format convert_pixel_format_to_dalsurface(
327 enum surface_pixel_format surface_pixel_format)
328{
329 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
330
331 switch (surface_pixel_format) {
332 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
333 dal_pixel_format = PIXEL_FORMAT_INDEX8;
334 break;
335 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
336 dal_pixel_format = PIXEL_FORMAT_RGB565;
337 break;
338 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
339 dal_pixel_format = PIXEL_FORMAT_RGB565;
340 break;
341 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
342 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
343 break;
344 case SURFACE_PIXEL_FORMAT_GRPH_BGRA8888:
345 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
346 break;
347 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
348 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
349 break;
350 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
351 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
352 break;
353 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
354 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
355 break;
356 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
357 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
358 dal_pixel_format = PIXEL_FORMAT_FP16;
359 break;
360 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
361 dal_pixel_format = PIXEL_FORMAT_420BPP12;
362 break;
363 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
364 dal_pixel_format = PIXEL_FORMAT_420BPP12;
365 break;
366 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
367 default:
368 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
369 break;
370 }
371 return dal_pixel_format;
372}
373
374static void rect_swap_helper(struct rect *rect)
375{
376 uint32_t temp = 0;
377
378 temp = rect->height;
379 rect->height = rect->width;
380 rect->width = temp;
381
382 temp = rect->x;
383 rect->x = rect->y;
384 rect->y = temp;
385}
386
387static void calculate_viewport(
388 const struct dc_surface *surface,
389 struct pipe_ctx *pipe_ctx)
390{
391 struct rect stream_src = pipe_ctx->stream->public.src;
392 struct rect src = surface->src_rect;
393 struct rect dst = surface->dst_rect;
394 struct rect surface_clip = surface->clip_rect;
395 struct rect clip = {0};
396
397
398 if (surface->rotation == ROTATION_ANGLE_90 ||
399 surface->rotation == ROTATION_ANGLE_270) {
400 rect_swap_helper(&src);
401 rect_swap_helper(&dst);
402 rect_swap_helper(&surface_clip);
403 rect_swap_helper(&stream_src);
404 }
405
406 /* The actual clip is an intersection between stream
407 * source and surface clip
408 */
409 clip.x = stream_src.x > surface_clip.x ?
410 stream_src.x : surface_clip.x;
411
412 clip.width = stream_src.x + stream_src.width <
413 surface_clip.x + surface_clip.width ?
414 stream_src.x + stream_src.width - clip.x :
415 surface_clip.x + surface_clip.width - clip.x ;
416
417 clip.y = stream_src.y > surface_clip.y ?
418 stream_src.y : surface_clip.y;
419
420 clip.height = stream_src.y + stream_src.height <
421 surface_clip.y + surface_clip.height ?
422 stream_src.y + stream_src.height - clip.y :
423 surface_clip.y + surface_clip.height - clip.y ;
424
425 /* offset = src.ofs + (clip.ofs - dst.ofs) * scl_ratio
426 * num_pixels = clip.num_pix * scl_ratio
427 */
428 pipe_ctx->scl_data.viewport.x = src.x + (clip.x - dst.x) *
429 src.width / dst.width;
430 pipe_ctx->scl_data.viewport.width = clip.width *
431 src.width / dst.width;
432
433 pipe_ctx->scl_data.viewport.y = src.y + (clip.y - dst.y) *
434 src.height / dst.height;
435 pipe_ctx->scl_data.viewport.height = clip.height *
436 src.height / dst.height;
437
438 /* Minimum viewport such that 420/422 chroma vp is non 0 */
439 if (pipe_ctx->scl_data.viewport.width < 2)
440 pipe_ctx->scl_data.viewport.width = 2;
441 if (pipe_ctx->scl_data.viewport.height < 2)
442 pipe_ctx->scl_data.viewport.height = 2;
443}
444
445static void calculate_recout(
446 const struct dc_surface *surface,
447 struct pipe_ctx *pipe_ctx)
448{
449 struct core_stream *stream = pipe_ctx->stream;
450 struct rect clip = surface->clip_rect;
451
452 pipe_ctx->scl_data.recout.x = stream->public.dst.x;
453 if (stream->public.src.x < clip.x)
454 pipe_ctx->scl_data.recout.x += (clip.x
455 - stream->public.src.x) * stream->public.dst.width
456 / stream->public.src.width;
457
458 pipe_ctx->scl_data.recout.width = clip.width *
459 stream->public.dst.width / stream->public.src.width;
460 if (pipe_ctx->scl_data.recout.width + pipe_ctx->scl_data.recout.x >
461 stream->public.dst.x + stream->public.dst.width)
462 pipe_ctx->scl_data.recout.width =
463 stream->public.dst.x + stream->public.dst.width
464 - pipe_ctx->scl_data.recout.x;
465
466 pipe_ctx->scl_data.recout.y = stream->public.dst.y;
467 if (stream->public.src.y < clip.y)
468 pipe_ctx->scl_data.recout.y += (clip.y
469 - stream->public.src.y) * stream->public.dst.height
470 / stream->public.src.height;
471
472 pipe_ctx->scl_data.recout.height = clip.height *
473 stream->public.dst.height / stream->public.src.height;
474 if (pipe_ctx->scl_data.recout.height + pipe_ctx->scl_data.recout.y >
475 stream->public.dst.y + stream->public.dst.height)
476 pipe_ctx->scl_data.recout.height =
477 stream->public.dst.y + stream->public.dst.height
478 - pipe_ctx->scl_data.recout.y;
479}
480
481static void calculate_scaling_ratios(
482 const struct dc_surface *surface,
483 struct pipe_ctx *pipe_ctx)
484{
485 struct core_stream *stream = pipe_ctx->stream;
486 const uint32_t in_w = stream->public.src.width;
487 const uint32_t in_h = stream->public.src.height;
488 const uint32_t out_w = stream->public.dst.width;
489 const uint32_t out_h = stream->public.dst.height;
490
491 pipe_ctx->scl_data.ratios.horz = dal_fixed31_32_from_fraction(
492 surface->src_rect.width,
493 surface->dst_rect.width);
494 pipe_ctx->scl_data.ratios.vert = dal_fixed31_32_from_fraction(
495 surface->src_rect.height,
496 surface->dst_rect.height);
497
498 if (surface->stereo_format == PLANE_STEREO_FORMAT_SIDE_BY_SIDE)
499 pipe_ctx->scl_data.ratios.horz.value *= 2;
500 else if (surface->stereo_format == PLANE_STEREO_FORMAT_TOP_AND_BOTTOM)
501 pipe_ctx->scl_data.ratios.vert.value *= 2;
502
503 pipe_ctx->scl_data.ratios.vert.value = div64_s64(
504 pipe_ctx->scl_data.ratios.vert.value * in_h, out_h);
505 pipe_ctx->scl_data.ratios.horz.value = div64_s64(
506 pipe_ctx->scl_data.ratios.horz.value * in_w, out_w);
507
508 pipe_ctx->scl_data.ratios.horz_c = pipe_ctx->scl_data.ratios.horz;
509 pipe_ctx->scl_data.ratios.vert_c = pipe_ctx->scl_data.ratios.vert;
510
511 if (pipe_ctx->scl_data.format == PIXEL_FORMAT_420BPP12) {
512 pipe_ctx->scl_data.ratios.horz_c.value /= 2;
513 pipe_ctx->scl_data.ratios.vert_c.value /= 2;
514 }
515}
516
517bool resource_build_scaling_params(
518 const struct dc_surface *surface,
519 struct pipe_ctx *pipe_ctx)
520{
521 bool res;
522 struct dc_crtc_timing *timing = &pipe_ctx->stream->public.timing;
523 /* Important: scaling ratio calculation requires pixel format,
524 * lb depth calculation requires recout and taps require scaling ratios.
525 */
526 pipe_ctx->scl_data.format = convert_pixel_format_to_dalsurface(surface->format);
527
528 calculate_viewport(surface, pipe_ctx);
529
530 if (pipe_ctx->scl_data.viewport.height < 16 || pipe_ctx->scl_data.viewport.width < 16)
531 return false;
532
533 calculate_scaling_ratios(surface, pipe_ctx);
534
535 calculate_recout(surface, pipe_ctx);
536
537 /**
538 * Setting line buffer pixel depth to 24bpp yields banding
539 * on certain displays, such as the Sharp 4k
540 */
541 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
542
543 pipe_ctx->scl_data.h_active = timing->h_addressable;
544 pipe_ctx->scl_data.v_active = timing->v_addressable;
545
546 /* Taps calculations */
547 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
548 pipe_ctx->xfm, &pipe_ctx->scl_data, &surface->scaling_quality);
549
550 if (!res) {
551 /* Try 24 bpp linebuffer */
552 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
553
554 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
555 pipe_ctx->xfm, &pipe_ctx->scl_data, &surface->scaling_quality);
556 }
557
558 dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
559 "%s: Viewport:\nheight:%d width:%d x:%d "
560 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
561 "y:%d\n",
562 __func__,
563 pipe_ctx->scl_data.viewport.height,
564 pipe_ctx->scl_data.viewport.width,
565 pipe_ctx->scl_data.viewport.x,
566 pipe_ctx->scl_data.viewport.y,
567 surface->dst_rect.height,
568 surface->dst_rect.width,
569 surface->dst_rect.x,
570 surface->dst_rect.y);
571
572 return res;
573}
574
575
576enum dc_status resource_build_scaling_params_for_context(
577 const struct core_dc *dc,
578 struct validate_context *context)
579{
580 int i;
581
582 for (i = 0; i < MAX_PIPES; i++) {
583 if (context->res_ctx.pipe_ctx[i].surface != NULL &&
584 context->res_ctx.pipe_ctx[i].stream != NULL)
585 if (!resource_build_scaling_params(
586 &context->res_ctx.pipe_ctx[i].surface->public,
587 &context->res_ctx.pipe_ctx[i]))
588 return DC_FAIL_BANDWIDTH_VALIDATE;
589 }
590
591 return DC_OK;
592}
593
594static void detach_surfaces_for_target(
595 struct validate_context *context,
596 const struct dc_target *dc_target)
597{
598 int i;
599 struct core_stream *stream = DC_STREAM_TO_CORE(dc_target->streams[0]);
600
601 for (i = 0; i < context->res_ctx.pool->pipe_count; i++) {
602 struct pipe_ctx *cur_pipe = &context->res_ctx.pipe_ctx[i];
603 if (cur_pipe->stream == stream) {
604 cur_pipe->surface = NULL;
605 cur_pipe->top_pipe = NULL;
606 cur_pipe->bottom_pipe = NULL;
607 }
608 }
609}
610
611struct pipe_ctx *find_idle_secondary_pipe(struct resource_context *res_ctx)
612{
613 int i;
614 struct pipe_ctx *secondary_pipe = NULL;
615
616 /*
617 * search backwards for the second pipe to keep pipe
618 * assignment more consistent
619 */
620
621 for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
622 if (res_ctx->pipe_ctx[i].stream == NULL) {
623 secondary_pipe = &res_ctx->pipe_ctx[i];
624 secondary_pipe->pipe_idx = i;
625 break;
626 }
627 }
628
629
630 return secondary_pipe;
631}
632
633struct pipe_ctx *resource_get_head_pipe_for_stream(
634 struct resource_context *res_ctx,
635 const struct core_stream *stream)
636{
637 int i;
638 for (i = 0; i < res_ctx->pool->pipe_count; i++) {
639 if (res_ctx->pipe_ctx[i].stream == stream &&
640 !res_ctx->pipe_ctx[i].top_pipe) {
641 return &res_ctx->pipe_ctx[i];
642 break;
643 }
644 }
645 return NULL;
646}
647
648/*
649 * A free_pipe for a target is defined here as a pipe with a stream that belongs
650 * to the target but has no surface attached yet
651 */
652static struct pipe_ctx *acquire_free_pipe_for_target(
653 struct resource_context *res_ctx,
654 const struct dc_target *dc_target)
655{
656 int i;
657 struct core_stream *stream = DC_STREAM_TO_CORE(dc_target->streams[0]);
658
659 struct pipe_ctx *head_pipe = NULL;
660
661 /* Find head pipe, which has the back end set up*/
662
663 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
664
665 if (!head_pipe)
666 ASSERT(0);
667
668 if (!head_pipe->surface)
669 return head_pipe;
670
671 /* Re-use pipe already acquired for this stream if available*/
672 for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
673 if (res_ctx->pipe_ctx[i].stream == stream &&
674 !res_ctx->pipe_ctx[i].surface) {
675 return &res_ctx->pipe_ctx[i];
676 }
677 }
678
679 /*
680 * At this point we have no re-useable pipe for this stream and we need
681 * to acquire an idle one to satisfy the request
682 */
683
684 if(!res_ctx->pool->funcs->acquire_idle_pipe_for_layer)
685 return NULL;
686
687 return res_ctx->pool->funcs->acquire_idle_pipe_for_layer(res_ctx, stream);
688
689}
690
691static void release_free_pipes_for_target(
692 struct resource_context *res_ctx,
693 const struct dc_target *dc_target)
694{
695 int i;
696 struct core_stream *stream = DC_STREAM_TO_CORE(dc_target->streams[0]);
697
698 for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
699 if (res_ctx->pipe_ctx[i].stream == stream &&
700 !res_ctx->pipe_ctx[i].surface) {
701 res_ctx->pipe_ctx[i].stream = NULL;
702 }
703 }
704}
705
706bool resource_attach_surfaces_to_context(
707 const struct dc_surface * const *surfaces,
708 int surface_count,
709 const struct dc_target *dc_target,
710 struct validate_context *context)
711{
712 int i;
713 struct pipe_ctx *tail_pipe;
714 struct dc_target_status *target_status = NULL;
715
716
717 if (surface_count > MAX_SURFACE_NUM) {
718 dm_error("Surface: can not attach %d surfaces! Maximum is: %d\n",
719 surface_count, MAX_SURFACE_NUM);
720 return false;
721 }
722
723 for (i = 0; i < context->target_count; i++)
724 if (&context->targets[i]->public == dc_target) {
725 target_status = &context->target_status[i];
726 break;
727 }
728 if (target_status == NULL) {
729 dm_error("Existing target not found; failed to attach surfaces\n");
730 return false;
731 }
732
733 /* retain new surfaces */
734 for (i = 0; i < surface_count; i++)
735 dc_surface_retain(surfaces[i]);
736
737 detach_surfaces_for_target(context, dc_target);
738
739 /* release existing surfaces*/
740 for (i = 0; i < target_status->surface_count; i++)
741 dc_surface_release(target_status->surfaces[i]);
742
743 for (i = surface_count; i < target_status->surface_count; i++)
744 target_status->surfaces[i] = NULL;
745
746 target_status->surface_count = 0;
747
748 if (surface_count == 0)
749 return true;
750
751 tail_pipe = NULL;
752 for (i = 0; i < surface_count; i++) {
753 struct core_surface *surface = DC_SURFACE_TO_CORE(surfaces[i]);
754 struct pipe_ctx *free_pipe = acquire_free_pipe_for_target(
755 &context->res_ctx, dc_target);
756
757 if (!free_pipe) {
758 target_status->surfaces[i] = NULL;
759 return false;
760 }
761
762 free_pipe->surface = surface;
763
764 if (tail_pipe) {
765 free_pipe->top_pipe = tail_pipe;
766 tail_pipe->bottom_pipe = free_pipe;
767 }
768
769 tail_pipe = free_pipe;
770 }
771
772 release_free_pipes_for_target(&context->res_ctx, dc_target);
773
774 /* assign new surfaces*/
775 for (i = 0; i < surface_count; i++)
776 target_status->surfaces[i] = surfaces[i];
777
778 target_status->surface_count = surface_count;
779
780 return true;
781}
782
783
784static bool is_timing_changed(const struct core_stream *cur_stream,
785 const struct core_stream *new_stream)
786{
787 if (cur_stream == NULL)
788 return true;
789
790 /* If sink pointer changed, it means this is a hotplug, we should do
791 * full hw setting.
792 */
793 if (cur_stream->sink != new_stream->sink)
794 return true;
795
796 /* If output color space is changed, need to reprogram info frames */
797 if (cur_stream->public.output_color_space !=
798 new_stream->public.output_color_space)
799 return true;
800
801 return memcmp(
802 &cur_stream->public.timing,
803 &new_stream->public.timing,
804 sizeof(struct dc_crtc_timing)) != 0;
805}
806
807static bool are_stream_backends_same(
808 const struct core_stream *stream_a, const struct core_stream *stream_b)
809{
810 if (stream_a == stream_b)
811 return true;
812
813 if (stream_a == NULL || stream_b == NULL)
814 return false;
815
816 if (is_timing_changed(stream_a, stream_b))
817 return false;
818
819 return true;
820}
821
822bool is_target_unchanged(
823 const struct core_target *old_target, const struct core_target *target)
824{
825 int i;
826
827 if (old_target == target)
828 return true;
829 if (old_target->public.stream_count != target->public.stream_count)
830 return false;
831
832 for (i = 0; i < old_target->public.stream_count; i++) {
833 const struct core_stream *old_stream = DC_STREAM_TO_CORE(
834 old_target->public.streams[i]);
835 const struct core_stream *stream = DC_STREAM_TO_CORE(
836 target->public.streams[i]);
837
838 if (!are_stream_backends_same(old_stream, stream))
839 return false;
840 }
841
842 return true;
843}
844
845bool resource_validate_attach_surfaces(
846 const struct dc_validation_set set[],
847 int set_count,
848 const struct validate_context *old_context,
849 struct validate_context *context)
850{
851 int i, j;
852
853 for (i = 0; i < set_count; i++) {
854 for (j = 0; j < old_context->target_count; j++)
855 if (is_target_unchanged(
856 old_context->targets[j],
857 context->targets[i])) {
858 if (!resource_attach_surfaces_to_context(
859 old_context->target_status[j].surfaces,
860 old_context->target_status[j].surface_count,
861 &context->targets[i]->public,
862 context))
863 return false;
864 context->target_status[i] = old_context->target_status[j];
865 }
866 if (set[i].surface_count != 0)
867 if (!resource_attach_surfaces_to_context(
868 set[i].surfaces,
869 set[i].surface_count,
870 &context->targets[i]->public,
871 context))
872 return false;
873
874 }
875
876 return true;
877}
878
879/* Maximum TMDS single link pixel clock 165MHz */
880#define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
881
882static void set_stream_engine_in_use(
883 struct resource_context *res_ctx,
884 struct stream_encoder *stream_enc)
885{
886 int i;
887
888 for (i = 0; i < res_ctx->pool->stream_enc_count; i++) {
889 if (res_ctx->pool->stream_enc[i] == stream_enc)
890 res_ctx->is_stream_enc_acquired[i] = true;
891 }
892}
893
894/* TODO: release audio object */
895static void set_audio_in_use(
896 struct resource_context *res_ctx,
897 struct audio *audio)
898{
899 int i;
900 for (i = 0; i < res_ctx->pool->audio_count; i++) {
901 if (res_ctx->pool->audios[i] == audio) {
902 res_ctx->is_audio_acquired[i] = true;
903 }
904 }
905}
906
907static int acquire_first_free_pipe(
908 struct resource_context *res_ctx,
909 struct core_stream *stream)
910{
911 int i;
912
913 for (i = 0; i < res_ctx->pool->pipe_count; i++) {
914 if (!res_ctx->pipe_ctx[i].stream) {
915 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
916
917 pipe_ctx->tg = res_ctx->pool->timing_generators[i];
918 pipe_ctx->mi = res_ctx->pool->mis[i];
919 pipe_ctx->ipp = res_ctx->pool->ipps[i];
920 pipe_ctx->xfm = res_ctx->pool->transforms[i];
921 pipe_ctx->opp = res_ctx->pool->opps[i];
922 pipe_ctx->dis_clk = res_ctx->pool->display_clock;
923 pipe_ctx->pipe_idx = i;
924
925 pipe_ctx->stream = stream;
926 return i;
927 }
928 }
929 return -1;
930}
931
932static struct stream_encoder *find_first_free_match_stream_enc_for_link(
933 struct resource_context *res_ctx,
934 struct core_stream *stream)
935{
936 int i;
937 int j = -1;
938 struct core_link *link = stream->sink->link;
939
940 for (i = 0; i < res_ctx->pool->stream_enc_count; i++) {
941 if (!res_ctx->is_stream_enc_acquired[i] &&
942 res_ctx->pool->stream_enc[i]) {
943 /* Store first available for MST second display
944 * in daisy chain use case */
945 j = i;
946 if (res_ctx->pool->stream_enc[i]->id ==
947 link->link_enc->preferred_engine)
948 return res_ctx->pool->stream_enc[i];
949 }
950 }
951
952 /*
953 * below can happen in cases when stream encoder is acquired:
954 * 1) for second MST display in chain, so preferred engine already
955 * acquired;
956 * 2) for another link, which preferred engine already acquired by any
957 * MST configuration.
958 *
959 * If signal is of DP type and preferred engine not found, return last available
960 *
961 * TODO - This is just a patch up and a generic solution is
962 * required for non DP connectors.
963 */
964
965 if (j >= 0 && dc_is_dp_signal(stream->signal))
966 return res_ctx->pool->stream_enc[j];
967
968 return NULL;
969}
970
971static struct audio *find_first_free_audio(struct resource_context *res_ctx)
972{
973 int i;
974 for (i = 0; i < res_ctx->pool->audio_count; i++) {
975 if (res_ctx->is_audio_acquired[i] == false) {
976 return res_ctx->pool->audios[i];
977 }
978 }
979
980 return 0;
981}
982
983static void update_stream_signal(struct core_stream *stream)
984{
985 const struct dc_sink *dc_sink = stream->public.sink;
986
2796eaee
JA
987 if (dc_sink->sink_signal == SIGNAL_TYPE_NONE)
988 stream->signal = stream->sink->link->public.connector_signal;
989 else if (dc_sink->sink_signal == SIGNAL_TYPE_DVI_SINGLE_LINK ||
990 dc_sink->sink_signal == SIGNAL_TYPE_DVI_DUAL_LINK)
991 /* For asic supports dual link DVI, we should adjust signal type
992 * based on timing pixel clock. If pixel clock more than 165Mhz,
993 * signal is dual link, otherwise, single link.
994 */
995 if (stream->public.timing.pix_clk_khz > TMDS_MAX_PIXEL_CLOCK_IN_KHZ)
4562236b
HW
996 stream->signal = SIGNAL_TYPE_DVI_DUAL_LINK;
997 else
998 stream->signal = SIGNAL_TYPE_DVI_SINGLE_LINK;
2796eaee
JA
999 else
1000 stream->signal = dc_sink->sink_signal;
4562236b
HW
1001}
1002
1003bool resource_is_stream_unchanged(
1004 const struct validate_context *old_context, struct core_stream *stream)
1005{
1006 int i, j;
1007
1008 for (i = 0; i < old_context->target_count; i++) {
1009 struct core_target *old_target = old_context->targets[i];
1010
1011 for (j = 0; j < old_target->public.stream_count; j++) {
1012 struct core_stream *old_stream =
1013 DC_STREAM_TO_CORE(old_target->public.streams[j]);
1014
1015 if (are_stream_backends_same(old_stream, stream))
1016 return true;
1017 }
1018 }
1019
1020 return false;
1021}
1022
1023static void copy_pipe_ctx(
1024 const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1025{
1026 struct core_surface *surface = to_pipe_ctx->surface;
1027 struct core_stream *stream = to_pipe_ctx->stream;
1028
1029 *to_pipe_ctx = *from_pipe_ctx;
1030 to_pipe_ctx->stream = stream;
1031 if (surface != NULL)
1032 to_pipe_ctx->surface = surface;
1033}
1034
1035static struct core_stream *find_pll_sharable_stream(
1036 const struct core_stream *stream_needs_pll,
1037 struct validate_context *context)
1038{
1039 int i, j;
1040
1041 for (i = 0; i < context->target_count; i++) {
1042 struct core_target *target = context->targets[i];
1043
1044 for (j = 0; j < target->public.stream_count; j++) {
1045 struct core_stream *stream_has_pll =
1046 DC_STREAM_TO_CORE(target->public.streams[j]);
1047
1048 /* We are looking for non dp, non virtual stream */
1049 if (resource_are_streams_timing_synchronizable(
1050 stream_needs_pll, stream_has_pll)
1051 && !dc_is_dp_signal(stream_has_pll->signal)
1052 && stream_has_pll->sink->link->public.connector_signal
1053 != SIGNAL_TYPE_VIRTUAL)
1054 return stream_has_pll;
1055 }
1056 }
1057
1058 return NULL;
1059}
1060
1061static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1062{
1063 uint32_t pix_clk = timing->pix_clk_khz;
1064 uint32_t normalized_pix_clk = pix_clk;
1065
1066 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1067 pix_clk /= 2;
1068
1069 switch (timing->display_color_depth) {
1070 case COLOR_DEPTH_888:
1071 normalized_pix_clk = pix_clk;
1072 break;
1073 case COLOR_DEPTH_101010:
1074 normalized_pix_clk = (pix_clk * 30) / 24;
1075 break;
1076 case COLOR_DEPTH_121212:
1077 normalized_pix_clk = (pix_clk * 36) / 24;
1078 break;
1079 case COLOR_DEPTH_161616:
1080 normalized_pix_clk = (pix_clk * 48) / 24;
1081 break;
1082 default:
1083 ASSERT(0);
1084 break;
1085 }
1086
1087 return normalized_pix_clk;
1088}
1089
1090static void calculate_phy_pix_clks(
1091 const struct core_dc *dc,
1092 struct validate_context *context)
1093{
1094 int i, j;
1095
1096 for (i = 0; i < context->target_count; i++) {
1097 struct core_target *target = context->targets[i];
1098
1099 for (j = 0; j < target->public.stream_count; j++) {
1100 struct core_stream *stream =
1101 DC_STREAM_TO_CORE(target->public.streams[j]);
1102
1103 update_stream_signal(stream);
1104
1105 /* update actual pixel clock on all streams */
1106 if (dc_is_hdmi_signal(stream->signal))
1107 stream->phy_pix_clk = get_norm_pix_clk(
1108 &stream->public.timing);
1109 else
1110 stream->phy_pix_clk =
1111 stream->public.timing.pix_clk_khz;
1112 }
1113 }
1114}
1115
1116enum dc_status resource_map_pool_resources(
1117 const struct core_dc *dc,
1118 struct validate_context *context)
1119{
1120 int i, j, k;
1121
1122 calculate_phy_pix_clks(dc, context);
1123
1124 for (i = 0; i < context->target_count; i++) {
1125 struct core_target *target = context->targets[i];
1126
1127 for (j = 0; j < target->public.stream_count; j++) {
1128 struct core_stream *stream =
1129 DC_STREAM_TO_CORE(target->public.streams[j]);
1130
1131 if (!resource_is_stream_unchanged(dc->current_context, stream))
1132 continue;
1133
1134 /* mark resources used for stream that is already active */
1135 for (k = 0; k < MAX_PIPES; k++) {
1136 struct pipe_ctx *pipe_ctx =
1137 &context->res_ctx.pipe_ctx[k];
1138 const struct pipe_ctx *old_pipe_ctx =
1139 &dc->current_context->res_ctx.pipe_ctx[k];
1140
1141 if (!are_stream_backends_same(old_pipe_ctx->stream, stream))
1142 continue;
1143
1144 pipe_ctx->stream = stream;
1145 copy_pipe_ctx(old_pipe_ctx, pipe_ctx);
1146
8c737fcc
YS
1147 /* Split pipe resource, do not acquire back end */
1148 if (!pipe_ctx->stream_enc)
1149 continue;
1150
4562236b
HW
1151 set_stream_engine_in_use(
1152 &context->res_ctx,
1153 pipe_ctx->stream_enc);
1154
1155 /* Switch to dp clock source only if there is
1156 * no non dp stream that shares the same timing
1157 * with the dp stream.
1158 */
1159 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1160 !find_pll_sharable_stream(stream, context))
1161 pipe_ctx->clock_source =
1162 context->res_ctx.pool->dp_clock_source;
1163
1164 resource_reference_clock_source(
1165 &context->res_ctx,
1166 pipe_ctx->clock_source);
1167
1168 set_audio_in_use(&context->res_ctx,
1169 pipe_ctx->audio);
1170 }
1171 }
1172 }
1173
1174 for (i = 0; i < context->target_count; i++) {
1175 struct core_target *target = context->targets[i];
1176
1177 for (j = 0; j < target->public.stream_count; j++) {
1178 struct core_stream *stream =
1179 DC_STREAM_TO_CORE(target->public.streams[j]);
1180 struct pipe_ctx *pipe_ctx = NULL;
1181 int pipe_idx = -1;
1182
1183 if (resource_is_stream_unchanged(dc->current_context, stream))
1184 continue;
1185 /* acquire new resources */
1186 pipe_idx = acquire_first_free_pipe(
1187 &context->res_ctx, stream);
1188 if (pipe_idx < 0)
1189 return DC_NO_CONTROLLER_RESOURCE;
1190
1191
1192 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1193
1194 pipe_ctx->stream_enc =
1195 find_first_free_match_stream_enc_for_link(
1196 &context->res_ctx, stream);
1197
1198 if (!pipe_ctx->stream_enc)
1199 return DC_NO_STREAM_ENG_RESOURCE;
1200
1201 set_stream_engine_in_use(
1202 &context->res_ctx,
1203 pipe_ctx->stream_enc);
1204
1205 /* TODO: Add check if ASIC support and EDID audio */
1206 if (!stream->sink->converter_disable_audio &&
1207 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1208 stream->public.audio_info.mode_count) {
1209 pipe_ctx->audio = find_first_free_audio(
1210 &context->res_ctx);
1211
1212 /*
1213 * Audio assigned in order first come first get.
1214 * There are asics which has number of audio
1215 * resources less then number of pipes
1216 */
1217 if (pipe_ctx->audio)
1218 set_audio_in_use(
1219 &context->res_ctx,
1220 pipe_ctx->audio);
1221 }
1222
1223 if (j == 0) {
1224 context->target_status[i].primary_otg_inst =
1225 pipe_ctx->tg->inst;
1226 }
1227 }
1228 }
1229
1230 return DC_OK;
1231}
1232
1233/* first target in the context is used to populate the rest */
1234void validate_guaranteed_copy_target(
1235 struct validate_context *context,
1236 int max_targets)
1237{
1238 int i;
1239
1240 for (i = 1; i < max_targets; i++) {
1241 context->targets[i] = context->targets[0];
1242
1243 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1244 &context->res_ctx.pipe_ctx[i]);
1245 context->res_ctx.pipe_ctx[i].stream =
1246 context->res_ctx.pipe_ctx[0].stream;
1247
1248 dc_target_retain(&context->targets[i]->public);
1249 context->target_count++;
1250 }
1251}
1252
1253static void translate_info_frame(const struct hw_info_frame *hw_info_frame,
1254 struct encoder_info_frame *encoder_info_frame)
1255{
1256 memset(
1257 encoder_info_frame, 0, sizeof(struct encoder_info_frame));
1258
1259 /* For gamut we recalc checksum */
1260 if (hw_info_frame->gamut_packet.valid) {
1261 uint8_t chk_sum = 0;
1262 uint8_t *ptr;
1263 uint8_t i;
1264
1265 memmove(
1266 &encoder_info_frame->gamut,
1267 &hw_info_frame->gamut_packet,
1268 sizeof(struct hw_info_packet));
1269
1270 /*start of the Gamut data. */
1271 ptr = &encoder_info_frame->gamut.sb[3];
1272
1273 for (i = 0; i <= encoder_info_frame->gamut.sb[1]; i++)
1274 chk_sum += ptr[i];
1275
1276 encoder_info_frame->gamut.sb[2] = (uint8_t) (0x100 - chk_sum);
1277 }
1278
1279 if (hw_info_frame->avi_info_packet.valid) {
1280 memmove(
1281 &encoder_info_frame->avi,
1282 &hw_info_frame->avi_info_packet,
1283 sizeof(struct hw_info_packet));
1284 }
1285
1286 if (hw_info_frame->vendor_info_packet.valid) {
1287 memmove(
1288 &encoder_info_frame->vendor,
1289 &hw_info_frame->vendor_info_packet,
1290 sizeof(struct hw_info_packet));
1291 }
1292
1293 if (hw_info_frame->spd_packet.valid) {
1294 memmove(
1295 &encoder_info_frame->spd,
1296 &hw_info_frame->spd_packet,
1297 sizeof(struct hw_info_packet));
1298 }
1299
1300 if (hw_info_frame->vsc_packet.valid) {
1301 memmove(
1302 &encoder_info_frame->vsc,
1303 &hw_info_frame->vsc_packet,
1304 sizeof(struct hw_info_packet));
1305 }
1646a6fe
AW
1306
1307 if (hw_info_frame->hdrsmd_packet.valid) {
1308 memmove(
1309 &encoder_info_frame->hdrsmd,
1310 &hw_info_frame->hdrsmd_packet,
1311 sizeof(struct hw_info_packet));
1312 }
4562236b
HW
1313}
1314
1315static void set_avi_info_frame(
1316 struct hw_info_packet *info_packet,
1317 struct pipe_ctx *pipe_ctx)
1318{
1319 struct core_stream *stream = pipe_ctx->stream;
1320 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1321 struct info_frame info_frame = { {0} };
1322 uint32_t pixel_encoding = 0;
1323 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1324 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1325 bool itc = false;
1326 uint8_t cn0_cn1 = 0;
1327 uint8_t *check_sum = NULL;
1328 uint8_t byte_index = 0;
1329
1330 if (info_packet == NULL)
1331 return;
1332
1333 color_space = pipe_ctx->stream->public.output_color_space;
1334
1335 /* Initialize header */
1336 info_frame.avi_info_packet.info_packet_hdmi.bits.header.
1337 info_frame_type = INFO_FRAME_AVI;
1338 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1339 * not be used in HDMI 2.0 (Section 10.1) */
1340 info_frame.avi_info_packet.info_packet_hdmi.bits.header.version =
1341 INFO_FRAME_VERSION_2;
1342 info_frame.avi_info_packet.info_packet_hdmi.bits.header.length =
1343 INFO_FRAME_SIZE_AVI;
1344
1345 /*
1346 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1347 * according to HDMI 2.0 spec (Section 10.1)
1348 */
1349
1350 switch (stream->public.timing.pixel_encoding) {
1351 case PIXEL_ENCODING_YCBCR422:
1352 pixel_encoding = 1;
1353 break;
1354
1355 case PIXEL_ENCODING_YCBCR444:
1356 pixel_encoding = 2;
1357 break;
1358 case PIXEL_ENCODING_YCBCR420:
1359 pixel_encoding = 3;
1360 break;
1361
1362 case PIXEL_ENCODING_RGB:
1363 default:
1364 pixel_encoding = 0;
1365 }
1366
1367 /* Y0_Y1_Y2 : The pixel encoding */
1368 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1369 info_frame.avi_info_packet.info_packet_hdmi.bits.Y0_Y1_Y2 =
1370 pixel_encoding;
1371
1372 /* A0 = 1 Active Format Information valid */
1373 info_frame.avi_info_packet.info_packet_hdmi.bits.A0 =
1374 ACTIVE_FORMAT_VALID;
1375
1376 /* B0, B1 = 3; Bar info data is valid */
1377 info_frame.avi_info_packet.info_packet_hdmi.bits.B0_B1 =
1378 BAR_INFO_BOTH_VALID;
1379
1380 info_frame.avi_info_packet.info_packet_hdmi.bits.SC0_SC1 =
1381 PICTURE_SCALING_UNIFORM;
1382
1383 /* S0, S1 : Underscan / Overscan */
1384 /* TODO: un-hardcode scan type */
1385 scan_type = SCANNING_TYPE_UNDERSCAN;
1386 info_frame.avi_info_packet.info_packet_hdmi.bits.S0_S1 = scan_type;
1387
1388 /* C0, C1 : Colorimetry */
1389 if (color_space == COLOR_SPACE_YCBCR709)
1390 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1391 COLORIMETRY_ITU709;
1392 else if (color_space == COLOR_SPACE_YCBCR601)
1393 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1394 COLORIMETRY_ITU601;
1395 else
1396 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1397 COLORIMETRY_NO_DATA;
1398
1399 /* TODO: un-hardcode aspect ratio */
1400 aspect = stream->public.timing.aspect_ratio;
1401
1402 switch (aspect) {
1403 case ASPECT_RATIO_4_3:
1404 case ASPECT_RATIO_16_9:
1405 info_frame.avi_info_packet.info_packet_hdmi.bits.M0_M1 = aspect;
1406 break;
1407
1408 case ASPECT_RATIO_NO_DATA:
1409 case ASPECT_RATIO_64_27:
1410 case ASPECT_RATIO_256_135:
1411 default:
1412 info_frame.avi_info_packet.info_packet_hdmi.bits.M0_M1 = 0;
1413 }
1414
1415 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1416 info_frame.avi_info_packet.info_packet_hdmi.bits.R0_R3 =
1417 ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1418
1419 /* TODO: un-hardcode cn0_cn1 and itc */
1420 cn0_cn1 = 0;
1421 itc = false;
1422
1423 if (itc) {
1424 info_frame.avi_info_packet.info_packet_hdmi.bits.ITC = 1;
1425 info_frame.avi_info_packet.info_packet_hdmi.bits.CN0_CN1 =
1426 cn0_cn1;
1427 }
1428
1429 /* TODO : We should handle YCC quantization */
1430 /* but we do not have matrix calculation */
1431 if (color_space == COLOR_SPACE_SRGB) {
1432 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1433 RGB_QUANTIZATION_FULL_RANGE;
1434 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1435 YYC_QUANTIZATION_FULL_RANGE;
1436 } else if (color_space == COLOR_SPACE_SRGB_LIMITED) {
1437 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1438 RGB_QUANTIZATION_LIMITED_RANGE;
1439 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1440 YYC_QUANTIZATION_LIMITED_RANGE;
1441 } else {
1442 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1443 RGB_QUANTIZATION_DEFAULT_RANGE;
1444 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1445 YYC_QUANTIZATION_LIMITED_RANGE;
1446 }
1447
1448 info_frame.avi_info_packet.info_packet_hdmi.bits.VIC0_VIC7 =
1449 stream->public.timing.vic;
1450
1451 /* pixel repetition
1452 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
1453 * repetition start from 1 */
1454 info_frame.avi_info_packet.info_packet_hdmi.bits.PR0_PR3 = 0;
1455
1456 /* Bar Info
1457 * barTop: Line Number of End of Top Bar.
1458 * barBottom: Line Number of Start of Bottom Bar.
1459 * barLeft: Pixel Number of End of Left Bar.
1460 * barRight: Pixel Number of Start of Right Bar. */
1461 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_top =
1462 stream->public.timing.v_border_top;
1463 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_bottom =
1464 (stream->public.timing.v_border_top
1465 - stream->public.timing.v_border_bottom + 1);
1466 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_left =
1467 stream->public.timing.h_border_left;
1468 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_right =
1469 (stream->public.timing.h_total
1470 - stream->public.timing.h_border_right + 1);
1471
1472 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
1473 check_sum =
1474 &info_frame.
1475 avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
1476 *check_sum = INFO_FRAME_AVI + INFO_FRAME_SIZE_AVI
1477 + INFO_FRAME_VERSION_2;
1478
1479 for (byte_index = 1; byte_index <= INFO_FRAME_SIZE_AVI; byte_index++)
1480 *check_sum += info_frame.avi_info_packet.info_packet_hdmi.
1481 packet_raw_data.sb[byte_index];
1482
1483 /* one byte complement */
1484 *check_sum = (uint8_t) (0x100 - *check_sum);
1485
1486 /* Store in hw_path_mode */
1487 info_packet->hb0 =
1488 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb0;
1489 info_packet->hb1 =
1490 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb1;
1491 info_packet->hb2 =
1492 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb2;
1493
e66e4d64
HW
1494 for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
1495 info_packet_hdmi.packet_raw_data.sb); byte_index++)
4562236b 1496 info_packet->sb[byte_index] = info_frame.avi_info_packet.
e66e4d64 1497 info_packet_hdmi.packet_raw_data.sb[byte_index];
4562236b
HW
1498
1499 info_packet->valid = true;
1500}
1501
1502static void set_vendor_info_packet(struct core_stream *stream,
1503 struct hw_info_packet *info_packet)
1504{
1505 uint32_t length = 0;
1506 bool hdmi_vic_mode = false;
1507 uint8_t checksum = 0;
1508 uint32_t i = 0;
1509 enum dc_timing_3d_format format;
1510
1511 ASSERT_CRITICAL(stream != NULL);
1512 ASSERT_CRITICAL(info_packet != NULL);
1513
1514 format = stream->public.timing.timing_3d_format;
1515
1516 /* Can be different depending on packet content */
1517 length = 5;
1518
1519 if (stream->public.timing.hdmi_vic != 0
1520 && stream->public.timing.h_total >= 3840
1521 && stream->public.timing.v_total >= 2160)
1522 hdmi_vic_mode = true;
1523
1524 /* According to HDMI 1.4a CTS, VSIF should be sent
1525 * for both 3D stereo and HDMI VIC modes.
1526 * For all other modes, there is no VSIF sent. */
1527
1528 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
1529 return;
1530
1531 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
1532 info_packet->sb[1] = 0x03;
1533 info_packet->sb[2] = 0x0C;
1534 info_packet->sb[3] = 0x00;
1535
1536 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
1537 * The value for HDMI_Video_Format are:
1538 * 0x0 (0b000) - No additional HDMI video format is presented in this
1539 * packet
1540 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
1541 * parameter follows
1542 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
1543 * potentially 3D_Ext_Data follows
1544 * 0x3..0x7 (0b011..0b111) - reserved for future use */
1545 if (format != TIMING_3D_FORMAT_NONE)
1546 info_packet->sb[4] = (2 << 5);
1547 else if (hdmi_vic_mode)
1548 info_packet->sb[4] = (1 << 5);
1549
1550 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
1551 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
1552 * The value for 3D_Structure are:
1553 * 0x0 - Frame Packing
1554 * 0x1 - Field Alternative
1555 * 0x2 - Line Alternative
1556 * 0x3 - Side-by-Side (full)
1557 * 0x4 - L + depth
1558 * 0x5 - L + depth + graphics + graphics-depth
1559 * 0x6 - Top-and-Bottom
1560 * 0x7 - Reserved for future use
1561 * 0x8 - Side-by-Side (Half)
1562 * 0x9..0xE - Reserved for future use
1563 * 0xF - Not used */
1564 switch (format) {
1565 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
1566 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
1567 info_packet->sb[5] = (0x0 << 4);
1568 break;
1569
1570 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
1571 case TIMING_3D_FORMAT_SBS_SW_PACKED:
1572 info_packet->sb[5] = (0x8 << 4);
1573 length = 6;
1574 break;
1575
1576 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
1577 case TIMING_3D_FORMAT_TB_SW_PACKED:
1578 info_packet->sb[5] = (0x6 << 4);
1579 break;
1580
1581 default:
1582 break;
1583 }
1584
1585 /*PB5: If PB4 is set to 0x1 (extended resolution format)
1586 * fill PB5 with the correct HDMI VIC code */
1587 if (hdmi_vic_mode)
1588 info_packet->sb[5] = stream->public.timing.hdmi_vic;
1589
1590 /* Header */
1591 info_packet->hb0 = 0x81; /* VSIF packet type. */
1592 info_packet->hb1 = 0x01; /* Version */
1593
1594 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
1595 info_packet->hb2 = (uint8_t) (length);
1596
1597 /* Calculate checksum */
1598 checksum = 0;
1599 checksum += info_packet->hb0;
1600 checksum += info_packet->hb1;
1601 checksum += info_packet->hb2;
1602
1603 for (i = 1; i <= length; i++)
1604 checksum += info_packet->sb[i];
1605
1606 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
1607
1608 info_packet->valid = true;
1609}
1610
1611static void set_spd_info_packet(struct core_stream *stream,
1612 struct hw_info_packet *info_packet)
1613{
1614 /* SPD info packet for FreeSync */
1615
1616 unsigned char checksum = 0;
1617 unsigned int idx, payload_size = 0;
1618
1619 /* Check if Freesync is supported. Return if false. If true,
1620 * set the corresponding bit in the info packet
1621 */
1622 if (stream->public.freesync_ctx.supported == false)
1623 return;
1624
1625 if (dc_is_hdmi_signal(stream->signal)) {
1626
1627 /* HEADER */
1628
1629 /* HB0 = Packet Type = 0x83 (Source Product
1630 * Descriptor InfoFrame)
1631 */
1632 info_packet->hb0 = 0x83;
1633
1634 /* HB1 = Version = 0x01 */
1635 info_packet->hb1 = 0x01;
1636
1637 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
1638 info_packet->hb2 = 0x08;
1639
1640 payload_size = 0x08;
1641
1642 } else if (dc_is_dp_signal(stream->signal)) {
1643
1644 /* HEADER */
1645
1646 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
1647 * when used to associate audio related info packets
1648 */
1649 info_packet->hb0 = 0x00;
1650
1651 /* HB1 = Packet Type = 0x83 (Source Product
1652 * Descriptor InfoFrame)
1653 */
1654 info_packet->hb1 = 0x83;
1655
1656 /* HB2 = [Bits 7:0 = Least significant eight bits -
1657 * For INFOFRAME, the value must be 1Bh]
1658 */
1659 info_packet->hb2 = 0x1B;
1660
1661 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
1662 * [Bits 1:0 = Most significant two bits = 0x00]
1663 */
1664 info_packet->hb3 = 0x04;
1665
1666 payload_size = 0x1B;
1667 }
1668
1669 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
1670 info_packet->sb[1] = 0x1A;
1671
1672 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
1673 info_packet->sb[2] = 0x00;
1674
1675 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
1676 info_packet->sb[3] = 0x00;
1677
1678 /* PB4 = Reserved */
1679 info_packet->sb[4] = 0x00;
1680
1681 /* PB5 = Reserved */
1682 info_packet->sb[5] = 0x00;
1683
1684 /* PB6 = [Bits 7:3 = Reserved] */
1685 info_packet->sb[6] = 0x00;
1686
1687 if (stream->public.freesync_ctx.supported == true)
1688 /* PB6 = [Bit 0 = FreeSync Supported] */
1689 info_packet->sb[6] |= 0x01;
1690
1691 if (stream->public.freesync_ctx.enabled == true)
1692 /* PB6 = [Bit 1 = FreeSync Enabled] */
1693 info_packet->sb[6] |= 0x02;
1694
1695 if (stream->public.freesync_ctx.active == true)
1696 /* PB6 = [Bit 2 = FreeSync Active] */
1697 info_packet->sb[6] |= 0x04;
1698
1699 /* PB7 = FreeSync Minimum refresh rate (Hz) */
1700 info_packet->sb[7] = (unsigned char) (stream->public.freesync_ctx.
1701 min_refresh_in_micro_hz / 1000000);
1702
1703 /* PB8 = FreeSync Maximum refresh rate (Hz)
1704 *
1705 * Note: We do not use the maximum capable refresh rate
1706 * of the panel, because we should never go above the field
1707 * rate of the mode timing set.
1708 */
1709 info_packet->sb[8] = (unsigned char) (stream->public.freesync_ctx.
1710 nominal_refresh_in_micro_hz / 1000000);
1711
1712 /* PB9 - PB27 = Reserved */
1713 for (idx = 9; idx <= 27; idx++)
1714 info_packet->sb[idx] = 0x00;
1715
1716 /* Calculate checksum */
1717 checksum += info_packet->hb0;
1718 checksum += info_packet->hb1;
1719 checksum += info_packet->hb2;
1720 checksum += info_packet->hb3;
1721
1722 for (idx = 1; idx <= payload_size; idx++)
1723 checksum += info_packet->sb[idx];
1724
1725 /* PB0 = Checksum (one byte complement) */
1726 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
1727
1728 info_packet->valid = true;
1729}
1730
1646a6fe
AW
1731static void set_hdr_static_info_packet(
1732 struct core_surface *surface,
1733 struct core_stream *stream,
1734 struct hw_info_packet *info_packet)
1735{
e5cf325b 1736 uint16_t i = 0;
1646a6fe 1737 enum signal_type signal = stream->signal;
e5cf325b
HW
1738 struct dc_hdr_static_metadata hdr_metadata;
1739 uint32_t data;
1646a6fe
AW
1740
1741 if (!surface)
1742 return;
1743
e5cf325b 1744 hdr_metadata = surface->public.hdr_static_ctx;
1646a6fe 1745
10bff005
YS
1746 if (!hdr_metadata.is_hdr)
1747 return;
1748
1646a6fe
AW
1749 if (dc_is_hdmi_signal(signal)) {
1750 info_packet->valid = true;
1751
1752 info_packet->hb0 = 0x87;
1753 info_packet->hb1 = 0x01;
1754 info_packet->hb2 = 0x1A;
1755 i = 1;
1756 } else if (dc_is_dp_signal(signal)) {
1757 info_packet->valid = true;
1758
1759 info_packet->hb0 = 0x00;
1760 info_packet->hb1 = 0x87;
1761 info_packet->hb2 = 0x1D;
1762 info_packet->hb3 = (0x13 << 2);
1763 i = 2;
1764 }
1765
1646a6fe
AW
1766 data = hdr_metadata.is_hdr;
1767 info_packet->sb[i++] = data ? 0x02 : 0x00;
1768 info_packet->sb[i++] = 0x00;
1769
1770 data = hdr_metadata.chromaticity_green_x / 2;
1771 info_packet->sb[i++] = data & 0xFF;
1772 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1773
1774 data = hdr_metadata.chromaticity_green_y / 2;
1775 info_packet->sb[i++] = data & 0xFF;
1776 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1777
1778 data = hdr_metadata.chromaticity_blue_x / 2;
1779 info_packet->sb[i++] = data & 0xFF;
1780 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1781
1782 data = hdr_metadata.chromaticity_blue_y / 2;
1783 info_packet->sb[i++] = data & 0xFF;
1784 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1785
1786 data = hdr_metadata.chromaticity_red_x / 2;
1787 info_packet->sb[i++] = data & 0xFF;
1788 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1789
1790 data = hdr_metadata.chromaticity_red_y / 2;
1791 info_packet->sb[i++] = data & 0xFF;
1792 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1793
1794 data = hdr_metadata.chromaticity_white_point_x / 2;
1795 info_packet->sb[i++] = data & 0xFF;
1796 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1797
1798 data = hdr_metadata.chromaticity_white_point_y / 2;
1799 info_packet->sb[i++] = data & 0xFF;
1800 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1801
1802 data = hdr_metadata.max_luminance;
1803 info_packet->sb[i++] = data & 0xFF;
1804 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1805
1806 data = hdr_metadata.min_luminance;
1807 info_packet->sb[i++] = data & 0xFF;
1808 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1809
1810 data = hdr_metadata.maximum_content_light_level;
1811 info_packet->sb[i++] = data & 0xFF;
1812 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1813
1814 data = hdr_metadata.maximum_frame_average_light_level;
1815 info_packet->sb[i++] = data & 0xFF;
1816 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1817
1818 if (dc_is_hdmi_signal(signal)) {
1819 uint32_t checksum = 0;
1820
1821 checksum += info_packet->hb0;
1822 checksum += info_packet->hb1;
1823 checksum += info_packet->hb2;
1824
1825 for (i = 1; i <= info_packet->hb2; i++)
1826 checksum += info_packet->sb[i];
1827
1828 info_packet->sb[0] = 0x100 - checksum;
1829 } else if (dc_is_dp_signal(signal)) {
1830 info_packet->sb[0] = 0x01;
1831 info_packet->sb[1] = 0x1A;
1832 }
1833}
1834
4562236b
HW
1835static void set_vsc_info_packet(struct core_stream *stream,
1836 struct hw_info_packet *info_packet)
1837{
1838 unsigned int vscPacketRevision = 0;
1839 unsigned int i;
1840
1841 if (stream->sink->link->public.psr_caps.psr_version != 0) {
1842 vscPacketRevision = 2;
1843 }
1844
1845 /* VSC packet not needed based on the features
1846 * supported by this DP display
1847 */
1848 if (vscPacketRevision == 0)
1849 return;
1850
1851 if (vscPacketRevision == 0x2) {
1852 /* Secondary-data Packet ID = 0*/
1853 info_packet->hb0 = 0x00;
1854 /* 07h - Packet Type Value indicating Video
1855 * Stream Configuration packet
1856 */
1857 info_packet->hb1 = 0x07;
1858 /* 02h = VSC SDP supporting 3D stereo and PSR
1859 * (applies to eDP v1.3 or higher).
1860 */
1861 info_packet->hb2 = 0x02;
1862 /* 08h = VSC packet supporting 3D stereo + PSR
1863 * (HB2 = 02h).
1864 */
1865 info_packet->hb3 = 0x08;
1866
1867 for (i = 0; i < 28; i++)
1868 info_packet->sb[i] = 0;
1869
1870 info_packet->valid = true;
1871 }
1872
1873 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
1874}
1875
1876void resource_validate_ctx_destruct(struct validate_context *context)
1877{
1878 int i, j;
1879
1880 for (i = 0; i < context->target_count; i++) {
1881 for (j = 0; j < context->target_status[i].surface_count; j++)
1882 dc_surface_release(
1883 context->target_status[i].surfaces[j]);
1884
1885 context->target_status[i].surface_count = 0;
1886 dc_target_release(&context->targets[i]->public);
1887 }
1888}
1889
1890/*
1891 * Copy src_ctx into dst_ctx and retain all surfaces and targets referenced
1892 * by the src_ctx
1893 */
1894void resource_validate_ctx_copy_construct(
1895 const struct validate_context *src_ctx,
1896 struct validate_context *dst_ctx)
1897{
1898 int i, j;
1899
1900 *dst_ctx = *src_ctx;
1901
1902 for (i = 0; i < dst_ctx->res_ctx.pool->pipe_count; i++) {
1903 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
1904
1905 if (cur_pipe->top_pipe)
1906 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
1907
1908 if (cur_pipe->bottom_pipe)
1909 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
1910
1911 }
1912
1913 for (i = 0; i < dst_ctx->target_count; i++) {
1914 dc_target_retain(&dst_ctx->targets[i]->public);
1915 for (j = 0; j < dst_ctx->target_status[i].surface_count; j++)
1916 dc_surface_retain(
1917 dst_ctx->target_status[i].surfaces[j]);
1918 }
1919}
1920
1921struct clock_source *dc_resource_find_first_free_pll(
1922 struct resource_context *res_ctx)
1923{
1924 int i;
1925
1926 for (i = 0; i < res_ctx->pool->clk_src_count; ++i) {
1927 if (res_ctx->clock_source_ref_count[i] == 0)
1928 return res_ctx->pool->clock_sources[i];
1929 }
1930
1931 return NULL;
1932}
1933
1934void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
1935{
1936 enum signal_type signal = SIGNAL_TYPE_NONE;
1937 struct hw_info_frame info_frame = { { 0 } };
1938
1939 /* default all packets to invalid */
1940 info_frame.avi_info_packet.valid = false;
1941 info_frame.gamut_packet.valid = false;
1942 info_frame.vendor_info_packet.valid = false;
1943 info_frame.spd_packet.valid = false;
1944 info_frame.vsc_packet.valid = false;
1646a6fe 1945 info_frame.hdrsmd_packet.valid = false;
4562236b
HW
1946
1947 signal = pipe_ctx->stream->signal;
1948
1949 /* HDMi and DP have different info packets*/
1950 if (dc_is_hdmi_signal(signal)) {
1951 set_avi_info_frame(
1952 &info_frame.avi_info_packet, pipe_ctx);
1953 set_vendor_info_packet(
1954 pipe_ctx->stream, &info_frame.vendor_info_packet);
1955 set_spd_info_packet(pipe_ctx->stream, &info_frame.spd_packet);
1646a6fe
AW
1956 set_hdr_static_info_packet(pipe_ctx->surface,
1957 pipe_ctx->stream, &info_frame.hdrsmd_packet);
a33fa99d 1958 } else if (dc_is_dp_signal(signal)) {
4562236b
HW
1959 set_vsc_info_packet(pipe_ctx->stream, &info_frame.vsc_packet);
1960 set_spd_info_packet(pipe_ctx->stream, &info_frame.spd_packet);
1646a6fe
AW
1961 set_hdr_static_info_packet(pipe_ctx->surface,
1962 pipe_ctx->stream, &info_frame.hdrsmd_packet);
a33fa99d 1963 }
4562236b
HW
1964
1965 translate_info_frame(&info_frame,
1966 &pipe_ctx->encoder_info_frame);
1967}
1968
1969enum dc_status resource_map_clock_resources(
1970 const struct core_dc *dc,
1971 struct validate_context *context)
1972{
1973 int i, j, k;
1974
1975 /* acquire new resources */
1976 for (i = 0; i < context->target_count; i++) {
1977 struct core_target *target = context->targets[i];
1978
1979 for (j = 0; j < target->public.stream_count; j++) {
1980 struct core_stream *stream =
1981 DC_STREAM_TO_CORE(target->public.streams[j]);
1982
1983 if (resource_is_stream_unchanged(dc->current_context, stream))
1984 continue;
1985
1986 for (k = 0; k < MAX_PIPES; k++) {
1987 struct pipe_ctx *pipe_ctx =
1988 &context->res_ctx.pipe_ctx[k];
1989
1990 if (context->res_ctx.pipe_ctx[k].stream != stream)
1991 continue;
1992
1993 if (dc_is_dp_signal(pipe_ctx->stream->signal)
1994 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
1995 pipe_ctx->clock_source =
1996 context->res_ctx.pool->dp_clock_source;
1997 else {
1998 pipe_ctx->clock_source = NULL;
1999
2000 if (!dc->public.config.disable_disp_pll_sharing)
2001 resource_find_used_clk_src_for_sharing(
2002 &context->res_ctx,
2003 pipe_ctx);
2004
2005 if (pipe_ctx->clock_source == NULL)
2006 pipe_ctx->clock_source =
2007 dc_resource_find_first_free_pll(&context->res_ctx);
2008 }
2009
2010 if (pipe_ctx->clock_source == NULL)
2011 return DC_NO_CLOCK_SOURCE_RESOURCE;
2012
2013 resource_reference_clock_source(
2014 &context->res_ctx,
2015 pipe_ctx->clock_source);
2016
2017 /* only one cs per stream regardless of mpo */
2018 break;
2019 }
2020 }
2021 }
2022
2023 return DC_OK;
2024}
2025
2026/*
2027 * Note: We need to disable output if clock sources change,
2028 * since bios does optimization and doesn't apply if changing
2029 * PHY when not already disabled.
2030 */
2031bool pipe_need_reprogram(
2032 struct pipe_ctx *pipe_ctx_old,
2033 struct pipe_ctx *pipe_ctx)
2034{
2035 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2036 return true;
2037
2038 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2039 return true;
2040
2041 if (pipe_ctx_old->audio != pipe_ctx->audio)
2042 return true;
2043
2044 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2045 && pipe_ctx_old->stream != pipe_ctx->stream)
2046 return true;
2047
2048 if (pipe_ctx_old->stream_enc != pipe_ctx->stream_enc)
2049 return true;
2050
2051 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2052 return true;
2053
2054
2055 return false;
2056}