]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - drivers/gpu/drm/amd/display/dc/core/dc_resource.c
drm/amd/display: use encoder's engine id to find matched free audio device
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / amd / display / dc / core / dc_resource.c
1 /*
2 * Copyright 2012-15 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25 #include "dm_services.h"
26
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
39
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
46 #endif
47 #include "dce120/dce120_resource.h"
48
49 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
50 {
51 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
52 switch (asic_id.chip_family) {
53
54 case FAMILY_CI:
55 dc_version = DCE_VERSION_8_0;
56 break;
57 case FAMILY_KV:
58 if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
59 ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
60 ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
61 dc_version = DCE_VERSION_8_3;
62 else
63 dc_version = DCE_VERSION_8_1;
64 break;
65 case FAMILY_CZ:
66 dc_version = DCE_VERSION_11_0;
67 break;
68
69 case FAMILY_VI:
70 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
71 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
72 dc_version = DCE_VERSION_10_0;
73 break;
74 }
75 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
76 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
77 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
78 dc_version = DCE_VERSION_11_2;
79 }
80 break;
81 case FAMILY_AI:
82 dc_version = DCE_VERSION_12_0;
83 break;
84 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
85 case FAMILY_RV:
86 dc_version = DCN_VERSION_1_0;
87 break;
88 #endif
89 default:
90 dc_version = DCE_VERSION_UNKNOWN;
91 break;
92 }
93 return dc_version;
94 }
95
96 struct resource_pool *dc_create_resource_pool(
97 struct dc *dc,
98 int num_virtual_links,
99 enum dce_version dc_version,
100 struct hw_asic_id asic_id)
101 {
102 struct resource_pool *res_pool = NULL;
103
104 switch (dc_version) {
105 case DCE_VERSION_8_0:
106 res_pool = dce80_create_resource_pool(
107 num_virtual_links, dc);
108 break;
109 case DCE_VERSION_8_1:
110 res_pool = dce81_create_resource_pool(
111 num_virtual_links, dc);
112 break;
113 case DCE_VERSION_8_3:
114 res_pool = dce83_create_resource_pool(
115 num_virtual_links, dc);
116 break;
117 case DCE_VERSION_10_0:
118 res_pool = dce100_create_resource_pool(
119 num_virtual_links, dc);
120 break;
121 case DCE_VERSION_11_0:
122 res_pool = dce110_create_resource_pool(
123 num_virtual_links, dc, asic_id);
124 break;
125 case DCE_VERSION_11_2:
126 res_pool = dce112_create_resource_pool(
127 num_virtual_links, dc);
128 break;
129 case DCE_VERSION_12_0:
130 res_pool = dce120_create_resource_pool(
131 num_virtual_links, dc);
132 break;
133
134 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
135 case DCN_VERSION_1_0:
136 res_pool = dcn10_create_resource_pool(
137 num_virtual_links, dc);
138 break;
139 #endif
140
141
142 default:
143 break;
144 }
145 if (res_pool != NULL) {
146 struct dc_firmware_info fw_info = { { 0 } };
147
148 if (dc->ctx->dc_bios->funcs->get_firmware_info(
149 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
150 res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
151 } else
152 ASSERT_CRITICAL(false);
153 }
154
155 return res_pool;
156 }
157
158 void dc_destroy_resource_pool(struct dc *dc)
159 {
160 if (dc) {
161 if (dc->res_pool)
162 dc->res_pool->funcs->destroy(&dc->res_pool);
163
164 kfree(dc->hwseq);
165 }
166 }
167
168 static void update_num_audio(
169 const struct resource_straps *straps,
170 unsigned int *num_audio,
171 struct audio_support *aud_support)
172 {
173 aud_support->dp_audio = true;
174 aud_support->hdmi_audio_native = false;
175 aud_support->hdmi_audio_on_dongle = false;
176
177 if (straps->hdmi_disable == 0) {
178 if (straps->dc_pinstraps_audio & 0x2) {
179 aud_support->hdmi_audio_on_dongle = true;
180 aud_support->hdmi_audio_native = true;
181 }
182 }
183
184 switch (straps->audio_stream_number) {
185 case 0: /* multi streams supported */
186 break;
187 case 1: /* multi streams not supported */
188 *num_audio = 1;
189 break;
190 default:
191 DC_ERR("DC: unexpected audio fuse!\n");
192 }
193 }
194
195 bool resource_construct(
196 unsigned int num_virtual_links,
197 struct dc *dc,
198 struct resource_pool *pool,
199 const struct resource_create_funcs *create_funcs)
200 {
201 struct dc_context *ctx = dc->ctx;
202 const struct resource_caps *caps = pool->res_cap;
203 int i;
204 unsigned int num_audio = caps->num_audio;
205 struct resource_straps straps = {0};
206
207 if (create_funcs->read_dce_straps)
208 create_funcs->read_dce_straps(dc->ctx, &straps);
209
210 pool->audio_count = 0;
211 if (create_funcs->create_audio) {
212 /* find the total number of streams available via the
213 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
214 * registers (one for each pin) starting from pin 1
215 * up to the max number of audio pins.
216 * We stop on the first pin where
217 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
218 */
219 update_num_audio(&straps, &num_audio, &pool->audio_support);
220 for (i = 0; i < caps->num_audio; i++) {
221 struct audio *aud = create_funcs->create_audio(ctx, i);
222
223 if (aud == NULL) {
224 DC_ERR("DC: failed to create audio!\n");
225 return false;
226 }
227
228 if (!aud->funcs->endpoint_valid(aud)) {
229 aud->funcs->destroy(&aud);
230 break;
231 }
232
233 pool->audios[i] = aud;
234 pool->audio_count++;
235 }
236 }
237
238 pool->stream_enc_count = 0;
239 if (create_funcs->create_stream_encoder) {
240 for (i = 0; i < caps->num_stream_encoder; i++) {
241 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
242 if (pool->stream_enc[i] == NULL)
243 DC_ERR("DC: failed to create stream_encoder!\n");
244 pool->stream_enc_count++;
245 }
246 }
247 dc->caps.dynamic_audio = false;
248 if (pool->audio_count < pool->stream_enc_count) {
249 dc->caps.dynamic_audio = true;
250 }
251 for (i = 0; i < num_virtual_links; i++) {
252 pool->stream_enc[pool->stream_enc_count] =
253 virtual_stream_encoder_create(
254 ctx, ctx->dc_bios);
255 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
256 DC_ERR("DC: failed to create stream_encoder!\n");
257 return false;
258 }
259 pool->stream_enc_count++;
260 }
261
262 dc->hwseq = create_funcs->create_hwseq(ctx);
263
264 return true;
265 }
266 static int find_matching_clock_source(
267 const struct resource_pool *pool,
268 struct clock_source *clock_source)
269 {
270
271 int i;
272
273 for (i = 0; i < pool->clk_src_count; i++) {
274 if (pool->clock_sources[i] == clock_source)
275 return i;
276 }
277 return -1;
278 }
279
280 void resource_unreference_clock_source(
281 struct resource_context *res_ctx,
282 const struct resource_pool *pool,
283 struct clock_source *clock_source)
284 {
285 int i = find_matching_clock_source(pool, clock_source);
286
287 if (i > -1)
288 res_ctx->clock_source_ref_count[i]--;
289
290 if (pool->dp_clock_source == clock_source)
291 res_ctx->dp_clock_source_ref_count--;
292 }
293
294 void resource_reference_clock_source(
295 struct resource_context *res_ctx,
296 const struct resource_pool *pool,
297 struct clock_source *clock_source)
298 {
299 int i = find_matching_clock_source(pool, clock_source);
300
301 if (i > -1)
302 res_ctx->clock_source_ref_count[i]++;
303
304 if (pool->dp_clock_source == clock_source)
305 res_ctx->dp_clock_source_ref_count++;
306 }
307
308 int resource_get_clock_source_reference(
309 struct resource_context *res_ctx,
310 const struct resource_pool *pool,
311 struct clock_source *clock_source)
312 {
313 int i = find_matching_clock_source(pool, clock_source);
314
315 if (i > -1)
316 return res_ctx->clock_source_ref_count[i];
317
318 if (pool->dp_clock_source == clock_source)
319 return res_ctx->dp_clock_source_ref_count;
320
321 return -1;
322 }
323
324 bool resource_are_streams_timing_synchronizable(
325 struct dc_stream_state *stream1,
326 struct dc_stream_state *stream2)
327 {
328 if (stream1->timing.h_total != stream2->timing.h_total)
329 return false;
330
331 if (stream1->timing.v_total != stream2->timing.v_total)
332 return false;
333
334 if (stream1->timing.h_addressable
335 != stream2->timing.h_addressable)
336 return false;
337
338 if (stream1->timing.v_addressable
339 != stream2->timing.v_addressable)
340 return false;
341
342 if (stream1->timing.pix_clk_khz
343 != stream2->timing.pix_clk_khz)
344 return false;
345
346 if (stream1->clamping.c_depth != stream2->clamping.c_depth)
347 return false;
348
349 if (stream1->phy_pix_clk != stream2->phy_pix_clk
350 && (!dc_is_dp_signal(stream1->signal)
351 || !dc_is_dp_signal(stream2->signal)))
352 return false;
353
354 return true;
355 }
356 static bool is_dp_and_hdmi_sharable(
357 struct dc_stream_state *stream1,
358 struct dc_stream_state *stream2)
359 {
360 if (stream1->ctx->dc->caps.disable_dp_clk_share)
361 return false;
362
363 if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
364 stream2->clamping.c_depth != COLOR_DEPTH_888)
365 return false;
366
367 return true;
368
369 }
370
371 static bool is_sharable_clk_src(
372 const struct pipe_ctx *pipe_with_clk_src,
373 const struct pipe_ctx *pipe)
374 {
375 if (pipe_with_clk_src->clock_source == NULL)
376 return false;
377
378 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
379 return false;
380
381 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
382 (dc_is_dp_signal(pipe->stream->signal) &&
383 !is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
384 pipe->stream)))
385 return false;
386
387 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
388 && dc_is_dvi_signal(pipe->stream->signal))
389 return false;
390
391 if (dc_is_hdmi_signal(pipe->stream->signal)
392 && dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
393 return false;
394
395 if (!resource_are_streams_timing_synchronizable(
396 pipe_with_clk_src->stream, pipe->stream))
397 return false;
398
399 return true;
400 }
401
402 struct clock_source *resource_find_used_clk_src_for_sharing(
403 struct resource_context *res_ctx,
404 struct pipe_ctx *pipe_ctx)
405 {
406 int i;
407
408 for (i = 0; i < MAX_PIPES; i++) {
409 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
410 return res_ctx->pipe_ctx[i].clock_source;
411 }
412
413 return NULL;
414 }
415
416 static enum pixel_format convert_pixel_format_to_dalsurface(
417 enum surface_pixel_format surface_pixel_format)
418 {
419 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
420
421 switch (surface_pixel_format) {
422 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
423 dal_pixel_format = PIXEL_FORMAT_INDEX8;
424 break;
425 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
426 dal_pixel_format = PIXEL_FORMAT_RGB565;
427 break;
428 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
429 dal_pixel_format = PIXEL_FORMAT_RGB565;
430 break;
431 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
432 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
433 break;
434 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
435 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
436 break;
437 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
438 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
439 break;
440 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
441 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
442 break;
443 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
444 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
445 break;
446 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
447 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
448 dal_pixel_format = PIXEL_FORMAT_FP16;
449 break;
450 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
451 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
452 dal_pixel_format = PIXEL_FORMAT_420BPP8;
453 break;
454 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
455 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
456 dal_pixel_format = PIXEL_FORMAT_420BPP10;
457 break;
458 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
459 default:
460 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
461 break;
462 }
463 return dal_pixel_format;
464 }
465
466 static void rect_swap_helper(struct rect *rect)
467 {
468 uint32_t temp = 0;
469
470 temp = rect->height;
471 rect->height = rect->width;
472 rect->width = temp;
473
474 temp = rect->x;
475 rect->x = rect->y;
476 rect->y = temp;
477 }
478
479 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
480 {
481 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
482 const struct dc_stream_state *stream = pipe_ctx->stream;
483 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
484 struct rect surf_src = plane_state->src_rect;
485 struct rect clip = { 0 };
486 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
487 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
488 bool pri_split = pipe_ctx->bottom_pipe &&
489 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
490 bool sec_split = pipe_ctx->top_pipe &&
491 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
492
493 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
494 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
495 pri_split = false;
496 sec_split = false;
497 }
498
499 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
500 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
501 rect_swap_helper(&surf_src);
502
503 /* The actual clip is an intersection between stream
504 * source and surface clip
505 */
506 clip.x = stream->src.x > plane_state->clip_rect.x ?
507 stream->src.x : plane_state->clip_rect.x;
508
509 clip.width = stream->src.x + stream->src.width <
510 plane_state->clip_rect.x + plane_state->clip_rect.width ?
511 stream->src.x + stream->src.width - clip.x :
512 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
513
514 clip.y = stream->src.y > plane_state->clip_rect.y ?
515 stream->src.y : plane_state->clip_rect.y;
516
517 clip.height = stream->src.y + stream->src.height <
518 plane_state->clip_rect.y + plane_state->clip_rect.height ?
519 stream->src.y + stream->src.height - clip.y :
520 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
521
522 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
523 * num_pixels = clip.num_pix * scl_ratio
524 */
525 data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
526 surf_src.width / plane_state->dst_rect.width;
527 data->viewport.width = clip.width *
528 surf_src.width / plane_state->dst_rect.width;
529
530 data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
531 surf_src.height / plane_state->dst_rect.height;
532 data->viewport.height = clip.height *
533 surf_src.height / plane_state->dst_rect.height;
534
535 /* Round down, compensate in init */
536 data->viewport_c.x = data->viewport.x / vpc_div;
537 data->viewport_c.y = data->viewport.y / vpc_div;
538 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
539 dal_fixed31_32_half : dal_fixed31_32_zero;
540 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
541 dal_fixed31_32_half : dal_fixed31_32_zero;
542 /* Round up, assume original video size always even dimensions */
543 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
544 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
545
546 /* Handle hsplit */
547 if (pri_split || sec_split) {
548 /* HMirror XOR Secondary_pipe XOR Rotation_180 */
549 bool right_view = (sec_split != plane_state->horizontal_mirror) !=
550 (plane_state->rotation == ROTATION_ANGLE_180);
551
552 if (plane_state->rotation == ROTATION_ANGLE_90
553 || plane_state->rotation == ROTATION_ANGLE_270)
554 /* Secondary_pipe XOR Rotation_270 */
555 right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
556
557 if (right_view) {
558 data->viewport.x += data->viewport.width / 2;
559 data->viewport_c.x += data->viewport_c.width / 2;
560 /* Ceil offset pipe */
561 data->viewport.width = (data->viewport.width + 1) / 2;
562 data->viewport_c.width = (data->viewport_c.width + 1) / 2;
563 } else {
564 data->viewport.width /= 2;
565 data->viewport_c.width /= 2;
566 }
567 }
568
569 if (plane_state->rotation == ROTATION_ANGLE_90 ||
570 plane_state->rotation == ROTATION_ANGLE_270) {
571 rect_swap_helper(&data->viewport_c);
572 rect_swap_helper(&data->viewport);
573 }
574 }
575
576 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
577 {
578 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
579 const struct dc_stream_state *stream = pipe_ctx->stream;
580 struct rect surf_src = plane_state->src_rect;
581 struct rect surf_clip = plane_state->clip_rect;
582 int recout_full_x, recout_full_y;
583
584 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
585 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
586 rect_swap_helper(&surf_src);
587
588 pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
589 if (stream->src.x < surf_clip.x)
590 pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
591 - stream->src.x) * stream->dst.width
592 / stream->src.width;
593
594 pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
595 stream->dst.width / stream->src.width;
596 if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
597 stream->dst.x + stream->dst.width)
598 pipe_ctx->plane_res.scl_data.recout.width =
599 stream->dst.x + stream->dst.width
600 - pipe_ctx->plane_res.scl_data.recout.x;
601
602 pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
603 if (stream->src.y < surf_clip.y)
604 pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
605 - stream->src.y) * stream->dst.height
606 / stream->src.height;
607
608 pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
609 stream->dst.height / stream->src.height;
610 if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
611 stream->dst.y + stream->dst.height)
612 pipe_ctx->plane_res.scl_data.recout.height =
613 stream->dst.y + stream->dst.height
614 - pipe_ctx->plane_res.scl_data.recout.y;
615
616 /* Handle h & vsplit */
617 if (pipe_ctx->top_pipe && pipe_ctx->top_pipe->plane_state ==
618 pipe_ctx->plane_state) {
619 if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
620 pipe_ctx->plane_res.scl_data.recout.y += pipe_ctx->plane_res.scl_data.recout.height / 2;
621 /* Floor primary pipe, ceil 2ndary pipe */
622 pipe_ctx->plane_res.scl_data.recout.height = (pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
623 } else {
624 pipe_ctx->plane_res.scl_data.recout.x += pipe_ctx->plane_res.scl_data.recout.width / 2;
625 pipe_ctx->plane_res.scl_data.recout.width = (pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
626 }
627 } else if (pipe_ctx->bottom_pipe &&
628 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state) {
629 if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
630 pipe_ctx->plane_res.scl_data.recout.height /= 2;
631 else
632 pipe_ctx->plane_res.scl_data.recout.width /= 2;
633 }
634
635 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
636 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
637 * ratio)
638 */
639 recout_full_x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
640 * stream->dst.width / stream->src.width -
641 surf_src.x * plane_state->dst_rect.width / surf_src.width
642 * stream->dst.width / stream->src.width;
643 recout_full_y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
644 * stream->dst.height / stream->src.height -
645 surf_src.y * plane_state->dst_rect.height / surf_src.height
646 * stream->dst.height / stream->src.height;
647
648 recout_skip->width = pipe_ctx->plane_res.scl_data.recout.x - recout_full_x;
649 recout_skip->height = pipe_ctx->plane_res.scl_data.recout.y - recout_full_y;
650 }
651
652 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
653 {
654 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
655 const struct dc_stream_state *stream = pipe_ctx->stream;
656 struct rect surf_src = plane_state->src_rect;
657 const int in_w = stream->src.width;
658 const int in_h = stream->src.height;
659 const int out_w = stream->dst.width;
660 const int out_h = stream->dst.height;
661
662 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
663 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
664 rect_swap_helper(&surf_src);
665
666 pipe_ctx->plane_res.scl_data.ratios.horz = dal_fixed31_32_from_fraction(
667 surf_src.width,
668 plane_state->dst_rect.width);
669 pipe_ctx->plane_res.scl_data.ratios.vert = dal_fixed31_32_from_fraction(
670 surf_src.height,
671 plane_state->dst_rect.height);
672
673 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
674 pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
675 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
676 pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
677
678 pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
679 pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
680 pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
681 pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
682
683 pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
684 pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
685
686 if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
687 || pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
688 pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
689 pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
690 }
691 }
692
693 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
694 {
695 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
696 struct rect src = pipe_ctx->plane_state->src_rect;
697 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
698 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
699
700
701 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
702 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
703 rect_swap_helper(&src);
704 rect_swap_helper(&data->viewport_c);
705 rect_swap_helper(&data->viewport);
706 }
707
708 /*
709 * Init calculated according to formula:
710 * init = (scaling_ratio + number_of_taps + 1) / 2
711 * init_bot = init + scaling_ratio
712 * init_c = init + truncated_vp_c_offset(from calculate viewport)
713 */
714 data->inits.h = dal_fixed31_32_div_int(
715 dal_fixed31_32_add_int(data->ratios.horz, data->taps.h_taps + 1), 2);
716
717 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_div_int(
718 dal_fixed31_32_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2));
719
720 data->inits.v = dal_fixed31_32_div_int(
721 dal_fixed31_32_add_int(data->ratios.vert, data->taps.v_taps + 1), 2);
722
723 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_div_int(
724 dal_fixed31_32_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2));
725
726
727 /* Adjust for viewport end clip-off */
728 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
729 int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
730 int int_part = dal_fixed31_32_floor(
731 dal_fixed31_32_sub(data->inits.h, data->ratios.horz));
732
733 int_part = int_part > 0 ? int_part : 0;
734 data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
735 }
736 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
737 int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
738 int int_part = dal_fixed31_32_floor(
739 dal_fixed31_32_sub(data->inits.v, data->ratios.vert));
740
741 int_part = int_part > 0 ? int_part : 0;
742 data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
743 }
744 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
745 int vp_clip = (src.x + src.width) / vpc_div -
746 data->viewport_c.width - data->viewport_c.x;
747 int int_part = dal_fixed31_32_floor(
748 dal_fixed31_32_sub(data->inits.h_c, data->ratios.horz_c));
749
750 int_part = int_part > 0 ? int_part : 0;
751 data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
752 }
753 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
754 int vp_clip = (src.y + src.height) / vpc_div -
755 data->viewport_c.height - data->viewport_c.y;
756 int int_part = dal_fixed31_32_floor(
757 dal_fixed31_32_sub(data->inits.v_c, data->ratios.vert_c));
758
759 int_part = int_part > 0 ? int_part : 0;
760 data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
761 }
762
763 /* Adjust for non-0 viewport offset */
764 if (data->viewport.x) {
765 int int_part;
766
767 data->inits.h = dal_fixed31_32_add(data->inits.h, dal_fixed31_32_mul_int(
768 data->ratios.horz, recout_skip->width));
769 int_part = dal_fixed31_32_floor(data->inits.h) - data->viewport.x;
770 if (int_part < data->taps.h_taps) {
771 int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
772 (data->taps.h_taps - int_part) : data->viewport.x;
773 data->viewport.x -= int_adj;
774 data->viewport.width += int_adj;
775 int_part += int_adj;
776 } else if (int_part > data->taps.h_taps) {
777 data->viewport.x += int_part - data->taps.h_taps;
778 data->viewport.width -= int_part - data->taps.h_taps;
779 int_part = data->taps.h_taps;
780 }
781 data->inits.h.value &= 0xffffffff;
782 data->inits.h = dal_fixed31_32_add_int(data->inits.h, int_part);
783 }
784
785 if (data->viewport_c.x) {
786 int int_part;
787
788 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_mul_int(
789 data->ratios.horz_c, recout_skip->width));
790 int_part = dal_fixed31_32_floor(data->inits.h_c) - data->viewport_c.x;
791 if (int_part < data->taps.h_taps_c) {
792 int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
793 (data->taps.h_taps_c - int_part) : data->viewport_c.x;
794 data->viewport_c.x -= int_adj;
795 data->viewport_c.width += int_adj;
796 int_part += int_adj;
797 } else if (int_part > data->taps.h_taps_c) {
798 data->viewport_c.x += int_part - data->taps.h_taps_c;
799 data->viewport_c.width -= int_part - data->taps.h_taps_c;
800 int_part = data->taps.h_taps_c;
801 }
802 data->inits.h_c.value &= 0xffffffff;
803 data->inits.h_c = dal_fixed31_32_add_int(data->inits.h_c, int_part);
804 }
805
806 if (data->viewport.y) {
807 int int_part;
808
809 data->inits.v = dal_fixed31_32_add(data->inits.v, dal_fixed31_32_mul_int(
810 data->ratios.vert, recout_skip->height));
811 int_part = dal_fixed31_32_floor(data->inits.v) - data->viewport.y;
812 if (int_part < data->taps.v_taps) {
813 int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
814 (data->taps.v_taps - int_part) : data->viewport.y;
815 data->viewport.y -= int_adj;
816 data->viewport.height += int_adj;
817 int_part += int_adj;
818 } else if (int_part > data->taps.v_taps) {
819 data->viewport.y += int_part - data->taps.v_taps;
820 data->viewport.height -= int_part - data->taps.v_taps;
821 int_part = data->taps.v_taps;
822 }
823 data->inits.v.value &= 0xffffffff;
824 data->inits.v = dal_fixed31_32_add_int(data->inits.v, int_part);
825 }
826
827 if (data->viewport_c.y) {
828 int int_part;
829
830 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_mul_int(
831 data->ratios.vert_c, recout_skip->height));
832 int_part = dal_fixed31_32_floor(data->inits.v_c) - data->viewport_c.y;
833 if (int_part < data->taps.v_taps_c) {
834 int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
835 (data->taps.v_taps_c - int_part) : data->viewport_c.y;
836 data->viewport_c.y -= int_adj;
837 data->viewport_c.height += int_adj;
838 int_part += int_adj;
839 } else if (int_part > data->taps.v_taps_c) {
840 data->viewport_c.y += int_part - data->taps.v_taps_c;
841 data->viewport_c.height -= int_part - data->taps.v_taps_c;
842 int_part = data->taps.v_taps_c;
843 }
844 data->inits.v_c.value &= 0xffffffff;
845 data->inits.v_c = dal_fixed31_32_add_int(data->inits.v_c, int_part);
846 }
847
848 /* Interlaced inits based on final vert inits */
849 data->inits.v_bot = dal_fixed31_32_add(data->inits.v, data->ratios.vert);
850 data->inits.v_c_bot = dal_fixed31_32_add(data->inits.v_c, data->ratios.vert_c);
851
852 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
853 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
854 rect_swap_helper(&data->viewport_c);
855 rect_swap_helper(&data->viewport);
856 }
857 }
858
859 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
860 {
861 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
862 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
863 struct view recout_skip = { 0 };
864 bool res = false;
865
866 /* Important: scaling ratio calculation requires pixel format,
867 * lb depth calculation requires recout and taps require scaling ratios.
868 * Inits require viewport, taps, ratios and recout of split pipe
869 */
870 pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
871 pipe_ctx->plane_state->format);
872
873 calculate_scaling_ratios(pipe_ctx);
874
875 calculate_viewport(pipe_ctx);
876
877 if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
878 return false;
879
880 calculate_recout(pipe_ctx, &recout_skip);
881
882 /**
883 * Setting line buffer pixel depth to 24bpp yields banding
884 * on certain displays, such as the Sharp 4k
885 */
886 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
887
888 pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
889 pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
890
891 pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
892 pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
893
894
895 /* Taps calculations */
896 if (pipe_ctx->plane_res.xfm != NULL)
897 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
898 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
899
900 if (pipe_ctx->plane_res.dpp != NULL)
901 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
902 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
903 if (!res) {
904 /* Try 24 bpp linebuffer */
905 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
906
907 if (pipe_ctx->plane_res.xfm != NULL)
908 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
909 pipe_ctx->plane_res.xfm,
910 &pipe_ctx->plane_res.scl_data,
911 &plane_state->scaling_quality);
912
913 if (pipe_ctx->plane_res.dpp != NULL)
914 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
915 pipe_ctx->plane_res.dpp,
916 &pipe_ctx->plane_res.scl_data,
917 &plane_state->scaling_quality);
918 }
919
920 if (res)
921 /* May need to re-check lb size after this in some obscure scenario */
922 calculate_inits_and_adj_vp(pipe_ctx, &recout_skip);
923
924 dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
925 "%s: Viewport:\nheight:%d width:%d x:%d "
926 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
927 "y:%d\n",
928 __func__,
929 pipe_ctx->plane_res.scl_data.viewport.height,
930 pipe_ctx->plane_res.scl_data.viewport.width,
931 pipe_ctx->plane_res.scl_data.viewport.x,
932 pipe_ctx->plane_res.scl_data.viewport.y,
933 plane_state->dst_rect.height,
934 plane_state->dst_rect.width,
935 plane_state->dst_rect.x,
936 plane_state->dst_rect.y);
937
938 return res;
939 }
940
941
942 enum dc_status resource_build_scaling_params_for_context(
943 const struct dc *dc,
944 struct dc_state *context)
945 {
946 int i;
947
948 for (i = 0; i < MAX_PIPES; i++) {
949 if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
950 context->res_ctx.pipe_ctx[i].stream != NULL)
951 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
952 return DC_FAIL_SCALING;
953 }
954
955 return DC_OK;
956 }
957
958 struct pipe_ctx *find_idle_secondary_pipe(
959 struct resource_context *res_ctx,
960 const struct resource_pool *pool)
961 {
962 int i;
963 struct pipe_ctx *secondary_pipe = NULL;
964
965 /*
966 * search backwards for the second pipe to keep pipe
967 * assignment more consistent
968 */
969
970 for (i = pool->pipe_count - 1; i >= 0; i--) {
971 if (res_ctx->pipe_ctx[i].stream == NULL) {
972 secondary_pipe = &res_ctx->pipe_ctx[i];
973 secondary_pipe->pipe_idx = i;
974 break;
975 }
976 }
977
978
979 return secondary_pipe;
980 }
981
982 struct pipe_ctx *resource_get_head_pipe_for_stream(
983 struct resource_context *res_ctx,
984 struct dc_stream_state *stream)
985 {
986 int i;
987 for (i = 0; i < MAX_PIPES; i++) {
988 if (res_ctx->pipe_ctx[i].stream == stream &&
989 !res_ctx->pipe_ctx[i].top_pipe) {
990 return &res_ctx->pipe_ctx[i];
991 break;
992 }
993 }
994 return NULL;
995 }
996
997 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
998 struct resource_context *res_ctx,
999 struct dc_stream_state *stream)
1000 {
1001 struct pipe_ctx *head_pipe, *tail_pipe;
1002 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1003
1004 if (!head_pipe)
1005 return NULL;
1006
1007 tail_pipe = head_pipe->bottom_pipe;
1008
1009 while (tail_pipe) {
1010 head_pipe = tail_pipe;
1011 tail_pipe = tail_pipe->bottom_pipe;
1012 }
1013
1014 return head_pipe;
1015 }
1016
1017 /*
1018 * A free_pipe for a stream is defined here as a pipe
1019 * that has no surface attached yet
1020 */
1021 static struct pipe_ctx *acquire_free_pipe_for_stream(
1022 struct dc_state *context,
1023 const struct resource_pool *pool,
1024 struct dc_stream_state *stream)
1025 {
1026 int i;
1027 struct resource_context *res_ctx = &context->res_ctx;
1028
1029 struct pipe_ctx *head_pipe = NULL;
1030
1031 /* Find head pipe, which has the back end set up*/
1032
1033 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1034
1035 if (!head_pipe) {
1036 ASSERT(0);
1037 return NULL;
1038 }
1039
1040 if (!head_pipe->plane_state)
1041 return head_pipe;
1042
1043 /* Re-use pipe already acquired for this stream if available*/
1044 for (i = pool->pipe_count - 1; i >= 0; i--) {
1045 if (res_ctx->pipe_ctx[i].stream == stream &&
1046 !res_ctx->pipe_ctx[i].plane_state) {
1047 return &res_ctx->pipe_ctx[i];
1048 }
1049 }
1050
1051 /*
1052 * At this point we have no re-useable pipe for this stream and we need
1053 * to acquire an idle one to satisfy the request
1054 */
1055
1056 if (!pool->funcs->acquire_idle_pipe_for_layer)
1057 return NULL;
1058
1059 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1060
1061 }
1062
1063 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1064 static int acquire_first_split_pipe(
1065 struct resource_context *res_ctx,
1066 const struct resource_pool *pool,
1067 struct dc_stream_state *stream)
1068 {
1069 int i;
1070
1071 for (i = 0; i < pool->pipe_count; i++) {
1072 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1073
1074 if (pipe_ctx->top_pipe &&
1075 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1076 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1077 if (pipe_ctx->bottom_pipe)
1078 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1079
1080 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1081 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1082 pipe_ctx->plane_res.hubp = pool->hubps[i];
1083 pipe_ctx->plane_res.ipp = pool->ipps[i];
1084 pipe_ctx->plane_res.dpp = pool->dpps[i];
1085 pipe_ctx->stream_res.opp = pool->opps[i];
1086 pipe_ctx->pipe_idx = i;
1087
1088 pipe_ctx->stream = stream;
1089 return i;
1090 }
1091 }
1092 return -1;
1093 }
1094 #endif
1095
1096 bool dc_add_plane_to_context(
1097 const struct dc *dc,
1098 struct dc_stream_state *stream,
1099 struct dc_plane_state *plane_state,
1100 struct dc_state *context)
1101 {
1102 int i;
1103 struct resource_pool *pool = dc->res_pool;
1104 struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1105 struct dc_stream_status *stream_status = NULL;
1106
1107 for (i = 0; i < context->stream_count; i++)
1108 if (context->streams[i] == stream) {
1109 stream_status = &context->stream_status[i];
1110 break;
1111 }
1112 if (stream_status == NULL) {
1113 dm_error("Existing stream not found; failed to attach surface!\n");
1114 return false;
1115 }
1116
1117
1118 if (stream_status->plane_count == MAX_SURFACE_NUM) {
1119 dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1120 plane_state, MAX_SURFACE_NUM);
1121 return false;
1122 }
1123
1124 head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1125
1126 if (!head_pipe) {
1127 dm_error("Head pipe not found for stream_state %p !\n", stream);
1128 return false;
1129 }
1130
1131 free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1132
1133 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1134 if (!free_pipe) {
1135 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1136 if (pipe_idx >= 0)
1137 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1138 }
1139 #endif
1140 if (!free_pipe)
1141 return false;
1142
1143 /* retain new surfaces */
1144 dc_plane_state_retain(plane_state);
1145 free_pipe->plane_state = plane_state;
1146
1147 if (head_pipe != free_pipe) {
1148
1149 tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1150 ASSERT(tail_pipe);
1151
1152 free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1153 free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1154 free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1155 free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1156 free_pipe->clock_source = tail_pipe->clock_source;
1157 free_pipe->top_pipe = tail_pipe;
1158 tail_pipe->bottom_pipe = free_pipe;
1159 }
1160
1161 /* assign new surfaces*/
1162 stream_status->plane_states[stream_status->plane_count] = plane_state;
1163
1164 stream_status->plane_count++;
1165
1166 return true;
1167 }
1168
1169 bool dc_remove_plane_from_context(
1170 const struct dc *dc,
1171 struct dc_stream_state *stream,
1172 struct dc_plane_state *plane_state,
1173 struct dc_state *context)
1174 {
1175 int i;
1176 struct dc_stream_status *stream_status = NULL;
1177 struct resource_pool *pool = dc->res_pool;
1178
1179 for (i = 0; i < context->stream_count; i++)
1180 if (context->streams[i] == stream) {
1181 stream_status = &context->stream_status[i];
1182 break;
1183 }
1184
1185 if (stream_status == NULL) {
1186 dm_error("Existing stream not found; failed to remove plane.\n");
1187 return false;
1188 }
1189
1190 /* release pipe for plane*/
1191 for (i = pool->pipe_count - 1; i >= 0; i--) {
1192 struct pipe_ctx *pipe_ctx;
1193
1194 if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1195 pipe_ctx = &context->res_ctx.pipe_ctx[i];
1196
1197 if (pipe_ctx->top_pipe)
1198 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1199
1200 /* Second condition is to avoid setting NULL to top pipe
1201 * of tail pipe making it look like head pipe in subsequent
1202 * deletes
1203 */
1204 if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1205 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1206
1207 /*
1208 * For head pipe detach surfaces from pipe for tail
1209 * pipe just zero it out
1210 */
1211 if (!pipe_ctx->top_pipe ||
1212 (!pipe_ctx->top_pipe->top_pipe &&
1213 pipe_ctx->top_pipe->stream_res.opp != pipe_ctx->stream_res.opp)) {
1214 pipe_ctx->plane_state = NULL;
1215 pipe_ctx->bottom_pipe = NULL;
1216 } else {
1217 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1218 }
1219 }
1220 }
1221
1222
1223 for (i = 0; i < stream_status->plane_count; i++) {
1224 if (stream_status->plane_states[i] == plane_state) {
1225
1226 dc_plane_state_release(stream_status->plane_states[i]);
1227 break;
1228 }
1229 }
1230
1231 if (i == stream_status->plane_count) {
1232 dm_error("Existing plane_state not found; failed to detach it!\n");
1233 return false;
1234 }
1235
1236 stream_status->plane_count--;
1237
1238 /* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1239 for (; i < stream_status->plane_count; i++)
1240 stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1241
1242 stream_status->plane_states[stream_status->plane_count] = NULL;
1243
1244 return true;
1245 }
1246
1247 bool dc_rem_all_planes_for_stream(
1248 const struct dc *dc,
1249 struct dc_stream_state *stream,
1250 struct dc_state *context)
1251 {
1252 int i, old_plane_count;
1253 struct dc_stream_status *stream_status = NULL;
1254 struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1255
1256 for (i = 0; i < context->stream_count; i++)
1257 if (context->streams[i] == stream) {
1258 stream_status = &context->stream_status[i];
1259 break;
1260 }
1261
1262 if (stream_status == NULL) {
1263 dm_error("Existing stream %p not found!\n", stream);
1264 return false;
1265 }
1266
1267 old_plane_count = stream_status->plane_count;
1268
1269 for (i = 0; i < old_plane_count; i++)
1270 del_planes[i] = stream_status->plane_states[i];
1271
1272 for (i = 0; i < old_plane_count; i++)
1273 if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1274 return false;
1275
1276 return true;
1277 }
1278
1279 static bool add_all_planes_for_stream(
1280 const struct dc *dc,
1281 struct dc_stream_state *stream,
1282 const struct dc_validation_set set[],
1283 int set_count,
1284 struct dc_state *context)
1285 {
1286 int i, j;
1287
1288 for (i = 0; i < set_count; i++)
1289 if (set[i].stream == stream)
1290 break;
1291
1292 if (i == set_count) {
1293 dm_error("Stream %p not found in set!\n", stream);
1294 return false;
1295 }
1296
1297 for (j = 0; j < set[i].plane_count; j++)
1298 if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1299 return false;
1300
1301 return true;
1302 }
1303
1304 bool dc_add_all_planes_for_stream(
1305 const struct dc *dc,
1306 struct dc_stream_state *stream,
1307 struct dc_plane_state * const *plane_states,
1308 int plane_count,
1309 struct dc_state *context)
1310 {
1311 struct dc_validation_set set;
1312 int i;
1313
1314 set.stream = stream;
1315 set.plane_count = plane_count;
1316
1317 for (i = 0; i < plane_count; i++)
1318 set.plane_states[i] = plane_states[i];
1319
1320 return add_all_planes_for_stream(dc, stream, &set, 1, context);
1321 }
1322
1323
1324
1325 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1326 struct dc_stream_state *new_stream)
1327 {
1328 if (cur_stream == NULL)
1329 return true;
1330
1331 /* If sink pointer changed, it means this is a hotplug, we should do
1332 * full hw setting.
1333 */
1334 if (cur_stream->sink != new_stream->sink)
1335 return true;
1336
1337 /* If output color space is changed, need to reprogram info frames */
1338 if (cur_stream->output_color_space != new_stream->output_color_space)
1339 return true;
1340
1341 return memcmp(
1342 &cur_stream->timing,
1343 &new_stream->timing,
1344 sizeof(struct dc_crtc_timing)) != 0;
1345 }
1346
1347 static bool are_stream_backends_same(
1348 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1349 {
1350 if (stream_a == stream_b)
1351 return true;
1352
1353 if (stream_a == NULL || stream_b == NULL)
1354 return false;
1355
1356 if (is_timing_changed(stream_a, stream_b))
1357 return false;
1358
1359 return true;
1360 }
1361
1362 bool dc_is_stream_unchanged(
1363 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1364 {
1365
1366 if (!are_stream_backends_same(old_stream, stream))
1367 return false;
1368
1369 return true;
1370 }
1371
1372 bool dc_is_stream_scaling_unchanged(
1373 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1374 {
1375 if (old_stream == stream)
1376 return true;
1377
1378 if (old_stream == NULL || stream == NULL)
1379 return false;
1380
1381 if (memcmp(&old_stream->src,
1382 &stream->src,
1383 sizeof(struct rect)) != 0)
1384 return false;
1385
1386 if (memcmp(&old_stream->dst,
1387 &stream->dst,
1388 sizeof(struct rect)) != 0)
1389 return false;
1390
1391 return true;
1392 }
1393
1394 /* Maximum TMDS single link pixel clock 165MHz */
1395 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
1396
1397 static void update_stream_engine_usage(
1398 struct resource_context *res_ctx,
1399 const struct resource_pool *pool,
1400 struct stream_encoder *stream_enc,
1401 bool acquired)
1402 {
1403 int i;
1404
1405 for (i = 0; i < pool->stream_enc_count; i++) {
1406 if (pool->stream_enc[i] == stream_enc)
1407 res_ctx->is_stream_enc_acquired[i] = acquired;
1408 }
1409 }
1410
1411 /* TODO: release audio object */
1412 void update_audio_usage(
1413 struct resource_context *res_ctx,
1414 const struct resource_pool *pool,
1415 struct audio *audio,
1416 bool acquired)
1417 {
1418 int i;
1419 for (i = 0; i < pool->audio_count; i++) {
1420 if (pool->audios[i] == audio)
1421 res_ctx->is_audio_acquired[i] = acquired;
1422 }
1423 }
1424
1425 static int acquire_first_free_pipe(
1426 struct resource_context *res_ctx,
1427 const struct resource_pool *pool,
1428 struct dc_stream_state *stream)
1429 {
1430 int i;
1431
1432 for (i = 0; i < pool->pipe_count; i++) {
1433 if (!res_ctx->pipe_ctx[i].stream) {
1434 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1435
1436 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1437 pipe_ctx->plane_res.mi = pool->mis[i];
1438 pipe_ctx->plane_res.hubp = pool->hubps[i];
1439 pipe_ctx->plane_res.ipp = pool->ipps[i];
1440 pipe_ctx->plane_res.xfm = pool->transforms[i];
1441 pipe_ctx->plane_res.dpp = pool->dpps[i];
1442 pipe_ctx->stream_res.opp = pool->opps[i];
1443 pipe_ctx->pipe_idx = i;
1444
1445
1446 pipe_ctx->stream = stream;
1447 return i;
1448 }
1449 }
1450 return -1;
1451 }
1452
1453 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1454 struct resource_context *res_ctx,
1455 const struct resource_pool *pool,
1456 struct dc_stream_state *stream)
1457 {
1458 int i;
1459 int j = -1;
1460 struct dc_link *link = stream->sink->link;
1461
1462 for (i = 0; i < pool->stream_enc_count; i++) {
1463 if (!res_ctx->is_stream_enc_acquired[i] &&
1464 pool->stream_enc[i]) {
1465 /* Store first available for MST second display
1466 * in daisy chain use case */
1467 j = i;
1468 if (pool->stream_enc[i]->id ==
1469 link->link_enc->preferred_engine)
1470 return pool->stream_enc[i];
1471 }
1472 }
1473
1474 /*
1475 * below can happen in cases when stream encoder is acquired:
1476 * 1) for second MST display in chain, so preferred engine already
1477 * acquired;
1478 * 2) for another link, which preferred engine already acquired by any
1479 * MST configuration.
1480 *
1481 * If signal is of DP type and preferred engine not found, return last available
1482 *
1483 * TODO - This is just a patch up and a generic solution is
1484 * required for non DP connectors.
1485 */
1486
1487 if (j >= 0 && dc_is_dp_signal(stream->signal))
1488 return pool->stream_enc[j];
1489
1490 return NULL;
1491 }
1492
1493 static struct audio *find_first_free_audio(
1494 struct resource_context *res_ctx,
1495 const struct resource_pool *pool,
1496 enum engine_id id)
1497 {
1498 int i;
1499 for (i = 0; i < pool->audio_count; i++) {
1500 if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1501 /*we have enough audio endpoint, find the matching inst*/
1502 if (id != i)
1503 continue;
1504
1505 return pool->audios[i];
1506 }
1507 }
1508
1509 /* use engine id to find free audio */
1510 if ((id < pool->audio_count) && (res_ctx->is_audio_acquired[id] == false)) {
1511 return pool->audios[id];
1512 }
1513
1514 /*not found the matching one, first come first serve*/
1515 for (i = 0; i < pool->audio_count; i++) {
1516 if (res_ctx->is_audio_acquired[i] == false) {
1517 return pool->audios[i];
1518 }
1519 }
1520 return 0;
1521 }
1522
1523 bool resource_is_stream_unchanged(
1524 struct dc_state *old_context, struct dc_stream_state *stream)
1525 {
1526 int i;
1527
1528 for (i = 0; i < old_context->stream_count; i++) {
1529 struct dc_stream_state *old_stream = old_context->streams[i];
1530
1531 if (are_stream_backends_same(old_stream, stream))
1532 return true;
1533 }
1534
1535 return false;
1536 }
1537
1538 enum dc_status dc_add_stream_to_ctx(
1539 struct dc *dc,
1540 struct dc_state *new_ctx,
1541 struct dc_stream_state *stream)
1542 {
1543 struct dc_context *dc_ctx = dc->ctx;
1544 enum dc_status res;
1545
1546 if (new_ctx->stream_count >= dc->res_pool->pipe_count) {
1547 DC_ERROR("Max streams reached, can add stream %p !\n", stream);
1548 return DC_ERROR_UNEXPECTED;
1549 }
1550
1551 new_ctx->streams[new_ctx->stream_count] = stream;
1552 dc_stream_retain(stream);
1553 new_ctx->stream_count++;
1554
1555 res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1556 if (res != DC_OK)
1557 DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1558
1559 return res;
1560 }
1561
1562 enum dc_status dc_remove_stream_from_ctx(
1563 struct dc *dc,
1564 struct dc_state *new_ctx,
1565 struct dc_stream_state *stream)
1566 {
1567 int i;
1568 struct dc_context *dc_ctx = dc->ctx;
1569 struct pipe_ctx *del_pipe = NULL;
1570
1571 /* Release primary pipe */
1572 for (i = 0; i < MAX_PIPES; i++) {
1573 if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1574 !new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1575 del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1576
1577 ASSERT(del_pipe->stream_res.stream_enc);
1578 update_stream_engine_usage(
1579 &new_ctx->res_ctx,
1580 dc->res_pool,
1581 del_pipe->stream_res.stream_enc,
1582 false);
1583
1584 if (del_pipe->stream_res.audio)
1585 update_audio_usage(
1586 &new_ctx->res_ctx,
1587 dc->res_pool,
1588 del_pipe->stream_res.audio,
1589 false);
1590
1591 resource_unreference_clock_source(&new_ctx->res_ctx,
1592 dc->res_pool,
1593 del_pipe->clock_source);
1594
1595 memset(del_pipe, 0, sizeof(*del_pipe));
1596
1597 break;
1598 }
1599 }
1600
1601 if (!del_pipe) {
1602 DC_ERROR("Pipe not found for stream %p !\n", stream);
1603 return DC_ERROR_UNEXPECTED;
1604 }
1605
1606 for (i = 0; i < new_ctx->stream_count; i++)
1607 if (new_ctx->streams[i] == stream)
1608 break;
1609
1610 if (new_ctx->streams[i] != stream) {
1611 DC_ERROR("Context doesn't have stream %p !\n", stream);
1612 return DC_ERROR_UNEXPECTED;
1613 }
1614
1615 dc_stream_release(new_ctx->streams[i]);
1616 new_ctx->stream_count--;
1617
1618 /* Trim back arrays */
1619 for (; i < new_ctx->stream_count; i++) {
1620 new_ctx->streams[i] = new_ctx->streams[i + 1];
1621 new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1622 }
1623
1624 new_ctx->streams[new_ctx->stream_count] = NULL;
1625 memset(
1626 &new_ctx->stream_status[new_ctx->stream_count],
1627 0,
1628 sizeof(new_ctx->stream_status[0]));
1629
1630 return DC_OK;
1631 }
1632
1633 static void copy_pipe_ctx(
1634 const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1635 {
1636 struct dc_plane_state *plane_state = to_pipe_ctx->plane_state;
1637 struct dc_stream_state *stream = to_pipe_ctx->stream;
1638
1639 *to_pipe_ctx = *from_pipe_ctx;
1640 to_pipe_ctx->stream = stream;
1641 if (plane_state != NULL)
1642 to_pipe_ctx->plane_state = plane_state;
1643 }
1644
1645 static struct dc_stream_state *find_pll_sharable_stream(
1646 struct dc_stream_state *stream_needs_pll,
1647 struct dc_state *context)
1648 {
1649 int i;
1650
1651 for (i = 0; i < context->stream_count; i++) {
1652 struct dc_stream_state *stream_has_pll = context->streams[i];
1653
1654 /* We are looking for non dp, non virtual stream */
1655 if (resource_are_streams_timing_synchronizable(
1656 stream_needs_pll, stream_has_pll)
1657 && !dc_is_dp_signal(stream_has_pll->signal)
1658 && stream_has_pll->sink->link->connector_signal
1659 != SIGNAL_TYPE_VIRTUAL)
1660 return stream_has_pll;
1661
1662 }
1663
1664 return NULL;
1665 }
1666
1667 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1668 {
1669 uint32_t pix_clk = timing->pix_clk_khz;
1670 uint32_t normalized_pix_clk = pix_clk;
1671
1672 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1673 pix_clk /= 2;
1674 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1675 switch (timing->display_color_depth) {
1676 case COLOR_DEPTH_888:
1677 normalized_pix_clk = pix_clk;
1678 break;
1679 case COLOR_DEPTH_101010:
1680 normalized_pix_clk = (pix_clk * 30) / 24;
1681 break;
1682 case COLOR_DEPTH_121212:
1683 normalized_pix_clk = (pix_clk * 36) / 24;
1684 break;
1685 case COLOR_DEPTH_161616:
1686 normalized_pix_clk = (pix_clk * 48) / 24;
1687 break;
1688 default:
1689 ASSERT(0);
1690 break;
1691 }
1692 }
1693 return normalized_pix_clk;
1694 }
1695
1696 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1697 {
1698 /* update actual pixel clock on all streams */
1699 if (dc_is_hdmi_signal(stream->signal))
1700 stream->phy_pix_clk = get_norm_pix_clk(
1701 &stream->timing);
1702 else
1703 stream->phy_pix_clk =
1704 stream->timing.pix_clk_khz;
1705 }
1706
1707 enum dc_status resource_map_pool_resources(
1708 const struct dc *dc,
1709 struct dc_state *context,
1710 struct dc_stream_state *stream)
1711 {
1712 const struct resource_pool *pool = dc->res_pool;
1713 int i;
1714 struct dc_context *dc_ctx = dc->ctx;
1715 struct pipe_ctx *pipe_ctx = NULL;
1716 int pipe_idx = -1;
1717
1718 /* TODO Check if this is needed */
1719 /*if (!resource_is_stream_unchanged(old_context, stream)) {
1720 if (stream != NULL && old_context->streams[i] != NULL) {
1721 stream->bit_depth_params =
1722 old_context->streams[i]->bit_depth_params;
1723 stream->clamping = old_context->streams[i]->clamping;
1724 continue;
1725 }
1726 }
1727 */
1728
1729 calculate_phy_pix_clks(stream);
1730
1731 /* acquire new resources */
1732 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1733
1734 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1735 if (pipe_idx < 0)
1736 pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1737 #endif
1738
1739 if (pipe_idx < 0)
1740 return DC_NO_CONTROLLER_RESOURCE;
1741
1742 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1743
1744 pipe_ctx->stream_res.stream_enc =
1745 find_first_free_match_stream_enc_for_link(
1746 &context->res_ctx, pool, stream);
1747
1748 if (!pipe_ctx->stream_res.stream_enc)
1749 return DC_NO_STREAM_ENG_RESOURCE;
1750
1751 update_stream_engine_usage(
1752 &context->res_ctx, pool,
1753 pipe_ctx->stream_res.stream_enc,
1754 true);
1755
1756 /* TODO: Add check if ASIC support and EDID audio */
1757 if (!stream->sink->converter_disable_audio &&
1758 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1759 stream->audio_info.mode_count) {
1760 pipe_ctx->stream_res.audio = find_first_free_audio(
1761 &context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1762
1763 /*
1764 * Audio assigned in order first come first get.
1765 * There are asics which has number of audio
1766 * resources less then number of pipes
1767 */
1768 if (pipe_ctx->stream_res.audio)
1769 update_audio_usage(&context->res_ctx, pool,
1770 pipe_ctx->stream_res.audio, true);
1771 }
1772
1773 for (i = 0; i < context->stream_count; i++)
1774 if (context->streams[i] == stream) {
1775 context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1776 context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1777 return DC_OK;
1778 }
1779
1780 DC_ERROR("Stream %p not found in new ctx!\n", stream);
1781 return DC_ERROR_UNEXPECTED;
1782 }
1783
1784 /* first stream in the context is used to populate the rest */
1785 void validate_guaranteed_copy_streams(
1786 struct dc_state *context,
1787 int max_streams)
1788 {
1789 int i;
1790
1791 for (i = 1; i < max_streams; i++) {
1792 context->streams[i] = context->streams[0];
1793
1794 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1795 &context->res_ctx.pipe_ctx[i]);
1796 context->res_ctx.pipe_ctx[i].stream =
1797 context->res_ctx.pipe_ctx[0].stream;
1798
1799 dc_stream_retain(context->streams[i]);
1800 context->stream_count++;
1801 }
1802 }
1803
1804 void dc_resource_state_copy_construct_current(
1805 const struct dc *dc,
1806 struct dc_state *dst_ctx)
1807 {
1808 dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1809 }
1810
1811
1812 void dc_resource_state_construct(
1813 const struct dc *dc,
1814 struct dc_state *dst_ctx)
1815 {
1816 dst_ctx->dis_clk = dc->res_pool->display_clock;
1817 }
1818
1819 enum dc_status dc_validate_global_state(
1820 struct dc *dc,
1821 struct dc_state *new_ctx)
1822 {
1823 enum dc_status result = DC_ERROR_UNEXPECTED;
1824 int i, j;
1825
1826 if (!new_ctx)
1827 return DC_ERROR_UNEXPECTED;
1828
1829 if (dc->res_pool->funcs->validate_global) {
1830 result = dc->res_pool->funcs->validate_global(dc, new_ctx);
1831 if (result != DC_OK)
1832 return result;
1833 }
1834
1835 for (i = 0; i < new_ctx->stream_count; i++) {
1836 struct dc_stream_state *stream = new_ctx->streams[i];
1837
1838 for (j = 0; j < dc->res_pool->pipe_count; j++) {
1839 struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
1840
1841 if (pipe_ctx->stream != stream)
1842 continue;
1843
1844 /* Switch to dp clock source only if there is
1845 * no non dp stream that shares the same timing
1846 * with the dp stream.
1847 */
1848 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1849 !find_pll_sharable_stream(stream, new_ctx)) {
1850
1851 resource_unreference_clock_source(
1852 &new_ctx->res_ctx,
1853 dc->res_pool,
1854 pipe_ctx->clock_source);
1855
1856 pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
1857 resource_reference_clock_source(
1858 &new_ctx->res_ctx,
1859 dc->res_pool,
1860 pipe_ctx->clock_source);
1861 }
1862 }
1863 }
1864
1865 result = resource_build_scaling_params_for_context(dc, new_ctx);
1866
1867 if (result == DC_OK)
1868 if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
1869 result = DC_FAIL_BANDWIDTH_VALIDATE;
1870
1871 return result;
1872 }
1873
1874 static void patch_gamut_packet_checksum(
1875 struct encoder_info_packet *gamut_packet)
1876 {
1877 /* For gamut we recalc checksum */
1878 if (gamut_packet->valid) {
1879 uint8_t chk_sum = 0;
1880 uint8_t *ptr;
1881 uint8_t i;
1882
1883 /*start of the Gamut data. */
1884 ptr = &gamut_packet->sb[3];
1885
1886 for (i = 0; i <= gamut_packet->sb[1]; i++)
1887 chk_sum += ptr[i];
1888
1889 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1890 }
1891 }
1892
1893 static void set_avi_info_frame(
1894 struct encoder_info_packet *info_packet,
1895 struct pipe_ctx *pipe_ctx)
1896 {
1897 struct dc_stream_state *stream = pipe_ctx->stream;
1898 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1899 struct info_frame info_frame = { {0} };
1900 uint32_t pixel_encoding = 0;
1901 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1902 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1903 bool itc = false;
1904 uint8_t itc_value = 0;
1905 uint8_t cn0_cn1 = 0;
1906 unsigned int cn0_cn1_value = 0;
1907 uint8_t *check_sum = NULL;
1908 uint8_t byte_index = 0;
1909 union hdmi_info_packet *hdmi_info = &info_frame.avi_info_packet.info_packet_hdmi;
1910 union display_content_support support = {0};
1911 unsigned int vic = pipe_ctx->stream->timing.vic;
1912 enum dc_timing_3d_format format;
1913
1914 color_space = pipe_ctx->stream->output_color_space;
1915 if (color_space == COLOR_SPACE_UNKNOWN)
1916 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
1917 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
1918
1919 /* Initialize header */
1920 hdmi_info->bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1921 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1922 * not be used in HDMI 2.0 (Section 10.1) */
1923 hdmi_info->bits.header.version = 2;
1924 hdmi_info->bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
1925
1926 /*
1927 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1928 * according to HDMI 2.0 spec (Section 10.1)
1929 */
1930
1931 switch (stream->timing.pixel_encoding) {
1932 case PIXEL_ENCODING_YCBCR422:
1933 pixel_encoding = 1;
1934 break;
1935
1936 case PIXEL_ENCODING_YCBCR444:
1937 pixel_encoding = 2;
1938 break;
1939 case PIXEL_ENCODING_YCBCR420:
1940 pixel_encoding = 3;
1941 break;
1942
1943 case PIXEL_ENCODING_RGB:
1944 default:
1945 pixel_encoding = 0;
1946 }
1947
1948 /* Y0_Y1_Y2 : The pixel encoding */
1949 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1950 hdmi_info->bits.Y0_Y1_Y2 = pixel_encoding;
1951
1952 /* A0 = 1 Active Format Information valid */
1953 hdmi_info->bits.A0 = ACTIVE_FORMAT_VALID;
1954
1955 /* B0, B1 = 3; Bar info data is valid */
1956 hdmi_info->bits.B0_B1 = BAR_INFO_BOTH_VALID;
1957
1958 hdmi_info->bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
1959
1960 /* S0, S1 : Underscan / Overscan */
1961 /* TODO: un-hardcode scan type */
1962 scan_type = SCANNING_TYPE_UNDERSCAN;
1963 hdmi_info->bits.S0_S1 = scan_type;
1964
1965 /* C0, C1 : Colorimetry */
1966 if (color_space == COLOR_SPACE_YCBCR709 ||
1967 color_space == COLOR_SPACE_YCBCR709_LIMITED)
1968 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU709;
1969 else if (color_space == COLOR_SPACE_YCBCR601 ||
1970 color_space == COLOR_SPACE_YCBCR601_LIMITED)
1971 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU601;
1972 else {
1973 hdmi_info->bits.C0_C1 = COLORIMETRY_NO_DATA;
1974 }
1975 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1976 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1977 color_space == COLOR_SPACE_2020_YCBCR) {
1978 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
1979 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED;
1980 } else if (color_space == COLOR_SPACE_ADOBERGB) {
1981 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
1982 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED;
1983 }
1984
1985 /* TODO: un-hardcode aspect ratio */
1986 aspect = stream->timing.aspect_ratio;
1987
1988 switch (aspect) {
1989 case ASPECT_RATIO_4_3:
1990 case ASPECT_RATIO_16_9:
1991 hdmi_info->bits.M0_M1 = aspect;
1992 break;
1993
1994 case ASPECT_RATIO_NO_DATA:
1995 case ASPECT_RATIO_64_27:
1996 case ASPECT_RATIO_256_135:
1997 default:
1998 hdmi_info->bits.M0_M1 = 0;
1999 }
2000
2001 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2002 hdmi_info->bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2003
2004 /* TODO: un-hardcode cn0_cn1 and itc */
2005
2006 cn0_cn1 = 0;
2007 cn0_cn1_value = 0;
2008
2009 itc = true;
2010 itc_value = 1;
2011
2012 support = stream->sink->edid_caps.content_support;
2013
2014 if (itc) {
2015 if (!support.bits.valid_content_type) {
2016 cn0_cn1_value = 0;
2017 } else {
2018 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2019 if (support.bits.graphics_content == 1) {
2020 cn0_cn1_value = 0;
2021 }
2022 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2023 if (support.bits.photo_content == 1) {
2024 cn0_cn1_value = 1;
2025 } else {
2026 cn0_cn1_value = 0;
2027 itc_value = 0;
2028 }
2029 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2030 if (support.bits.cinema_content == 1) {
2031 cn0_cn1_value = 2;
2032 } else {
2033 cn0_cn1_value = 0;
2034 itc_value = 0;
2035 }
2036 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2037 if (support.bits.game_content == 1) {
2038 cn0_cn1_value = 3;
2039 } else {
2040 cn0_cn1_value = 0;
2041 itc_value = 0;
2042 }
2043 }
2044 }
2045 hdmi_info->bits.CN0_CN1 = cn0_cn1_value;
2046 hdmi_info->bits.ITC = itc_value;
2047 }
2048
2049 /* TODO : We should handle YCC quantization */
2050 /* but we do not have matrix calculation */
2051 if (stream->sink->edid_caps.qs_bit == 1 &&
2052 stream->sink->edid_caps.qy_bit == 1) {
2053 if (color_space == COLOR_SPACE_SRGB ||
2054 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2055 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE;
2056 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2057 } else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2058 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2059 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE;
2060 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2061 } else {
2062 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2063 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2064 }
2065 } else {
2066 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2067 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2068 }
2069
2070 ///VIC
2071 format = stream->timing.timing_3d_format;
2072 /*todo, add 3DStereo support*/
2073 if (format != TIMING_3D_FORMAT_NONE) {
2074 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2075 switch (pipe_ctx->stream->timing.hdmi_vic) {
2076 case 1:
2077 vic = 95;
2078 break;
2079 case 2:
2080 vic = 94;
2081 break;
2082 case 3:
2083 vic = 93;
2084 break;
2085 case 4:
2086 vic = 98;
2087 break;
2088 default:
2089 break;
2090 }
2091 }
2092 hdmi_info->bits.VIC0_VIC7 = vic;
2093
2094 /* pixel repetition
2095 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2096 * repetition start from 1 */
2097 hdmi_info->bits.PR0_PR3 = 0;
2098
2099 /* Bar Info
2100 * barTop: Line Number of End of Top Bar.
2101 * barBottom: Line Number of Start of Bottom Bar.
2102 * barLeft: Pixel Number of End of Left Bar.
2103 * barRight: Pixel Number of Start of Right Bar. */
2104 hdmi_info->bits.bar_top = stream->timing.v_border_top;
2105 hdmi_info->bits.bar_bottom = (stream->timing.v_total
2106 - stream->timing.v_border_bottom + 1);
2107 hdmi_info->bits.bar_left = stream->timing.h_border_left;
2108 hdmi_info->bits.bar_right = (stream->timing.h_total
2109 - stream->timing.h_border_right + 1);
2110
2111 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2112 check_sum = &info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
2113
2114 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2115
2116 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2117 *check_sum += hdmi_info->packet_raw_data.sb[byte_index];
2118
2119 /* one byte complement */
2120 *check_sum = (uint8_t) (0x100 - *check_sum);
2121
2122 /* Store in hw_path_mode */
2123 info_packet->hb0 = hdmi_info->packet_raw_data.hb0;
2124 info_packet->hb1 = hdmi_info->packet_raw_data.hb1;
2125 info_packet->hb2 = hdmi_info->packet_raw_data.hb2;
2126
2127 for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
2128 info_packet_hdmi.packet_raw_data.sb); byte_index++)
2129 info_packet->sb[byte_index] = info_frame.avi_info_packet.
2130 info_packet_hdmi.packet_raw_data.sb[byte_index];
2131
2132 info_packet->valid = true;
2133 }
2134
2135 static void set_vendor_info_packet(
2136 struct encoder_info_packet *info_packet,
2137 struct dc_stream_state *stream)
2138 {
2139 uint32_t length = 0;
2140 bool hdmi_vic_mode = false;
2141 uint8_t checksum = 0;
2142 uint32_t i = 0;
2143 enum dc_timing_3d_format format;
2144 // Can be different depending on packet content /*todo*/
2145 // unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2146
2147 info_packet->valid = false;
2148
2149 format = stream->timing.timing_3d_format;
2150 if (stream->view_format == VIEW_3D_FORMAT_NONE)
2151 format = TIMING_3D_FORMAT_NONE;
2152
2153 /* Can be different depending on packet content */
2154 length = 5;
2155
2156 if (stream->timing.hdmi_vic != 0
2157 && stream->timing.h_total >= 3840
2158 && stream->timing.v_total >= 2160)
2159 hdmi_vic_mode = true;
2160
2161 /* According to HDMI 1.4a CTS, VSIF should be sent
2162 * for both 3D stereo and HDMI VIC modes.
2163 * For all other modes, there is no VSIF sent. */
2164
2165 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2166 return;
2167
2168 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2169 info_packet->sb[1] = 0x03;
2170 info_packet->sb[2] = 0x0C;
2171 info_packet->sb[3] = 0x00;
2172
2173 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2174 * The value for HDMI_Video_Format are:
2175 * 0x0 (0b000) - No additional HDMI video format is presented in this
2176 * packet
2177 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2178 * parameter follows
2179 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2180 * potentially 3D_Ext_Data follows
2181 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2182 if (format != TIMING_3D_FORMAT_NONE)
2183 info_packet->sb[4] = (2 << 5);
2184 else if (hdmi_vic_mode)
2185 info_packet->sb[4] = (1 << 5);
2186
2187 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2188 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2189 * The value for 3D_Structure are:
2190 * 0x0 - Frame Packing
2191 * 0x1 - Field Alternative
2192 * 0x2 - Line Alternative
2193 * 0x3 - Side-by-Side (full)
2194 * 0x4 - L + depth
2195 * 0x5 - L + depth + graphics + graphics-depth
2196 * 0x6 - Top-and-Bottom
2197 * 0x7 - Reserved for future use
2198 * 0x8 - Side-by-Side (Half)
2199 * 0x9..0xE - Reserved for future use
2200 * 0xF - Not used */
2201 switch (format) {
2202 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2203 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2204 info_packet->sb[5] = (0x0 << 4);
2205 break;
2206
2207 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2208 case TIMING_3D_FORMAT_SBS_SW_PACKED:
2209 info_packet->sb[5] = (0x8 << 4);
2210 length = 6;
2211 break;
2212
2213 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2214 case TIMING_3D_FORMAT_TB_SW_PACKED:
2215 info_packet->sb[5] = (0x6 << 4);
2216 break;
2217
2218 default:
2219 break;
2220 }
2221
2222 /*PB5: If PB4 is set to 0x1 (extended resolution format)
2223 * fill PB5 with the correct HDMI VIC code */
2224 if (hdmi_vic_mode)
2225 info_packet->sb[5] = stream->timing.hdmi_vic;
2226
2227 /* Header */
2228 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2229 info_packet->hb1 = 0x01; /* Version */
2230
2231 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2232 info_packet->hb2 = (uint8_t) (length);
2233
2234 /* Calculate checksum */
2235 checksum = 0;
2236 checksum += info_packet->hb0;
2237 checksum += info_packet->hb1;
2238 checksum += info_packet->hb2;
2239
2240 for (i = 1; i <= length; i++)
2241 checksum += info_packet->sb[i];
2242
2243 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2244
2245 info_packet->valid = true;
2246 }
2247
2248 static void set_spd_info_packet(
2249 struct encoder_info_packet *info_packet,
2250 struct dc_stream_state *stream)
2251 {
2252 /* SPD info packet for FreeSync */
2253
2254 unsigned char checksum = 0;
2255 unsigned int idx, payload_size = 0;
2256
2257 /* Check if Freesync is supported. Return if false. If true,
2258 * set the corresponding bit in the info packet
2259 */
2260 if (stream->freesync_ctx.supported == false)
2261 return;
2262
2263 if (dc_is_hdmi_signal(stream->signal)) {
2264
2265 /* HEADER */
2266
2267 /* HB0 = Packet Type = 0x83 (Source Product
2268 * Descriptor InfoFrame)
2269 */
2270 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2271
2272 /* HB1 = Version = 0x01 */
2273 info_packet->hb1 = 0x01;
2274
2275 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2276 info_packet->hb2 = 0x08;
2277
2278 payload_size = 0x08;
2279
2280 } else if (dc_is_dp_signal(stream->signal)) {
2281
2282 /* HEADER */
2283
2284 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
2285 * when used to associate audio related info packets
2286 */
2287 info_packet->hb0 = 0x00;
2288
2289 /* HB1 = Packet Type = 0x83 (Source Product
2290 * Descriptor InfoFrame)
2291 */
2292 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2293
2294 /* HB2 = [Bits 7:0 = Least significant eight bits -
2295 * For INFOFRAME, the value must be 1Bh]
2296 */
2297 info_packet->hb2 = 0x1B;
2298
2299 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2300 * [Bits 1:0 = Most significant two bits = 0x00]
2301 */
2302 info_packet->hb3 = 0x04;
2303
2304 payload_size = 0x1B;
2305 }
2306
2307 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2308 info_packet->sb[1] = 0x1A;
2309
2310 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2311 info_packet->sb[2] = 0x00;
2312
2313 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2314 info_packet->sb[3] = 0x00;
2315
2316 /* PB4 = Reserved */
2317 info_packet->sb[4] = 0x00;
2318
2319 /* PB5 = Reserved */
2320 info_packet->sb[5] = 0x00;
2321
2322 /* PB6 = [Bits 7:3 = Reserved] */
2323 info_packet->sb[6] = 0x00;
2324
2325 if (stream->freesync_ctx.supported == true)
2326 /* PB6 = [Bit 0 = FreeSync Supported] */
2327 info_packet->sb[6] |= 0x01;
2328
2329 if (stream->freesync_ctx.enabled == true)
2330 /* PB6 = [Bit 1 = FreeSync Enabled] */
2331 info_packet->sb[6] |= 0x02;
2332
2333 if (stream->freesync_ctx.active == true)
2334 /* PB6 = [Bit 2 = FreeSync Active] */
2335 info_packet->sb[6] |= 0x04;
2336
2337 /* PB7 = FreeSync Minimum refresh rate (Hz) */
2338 info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2339 min_refresh_in_micro_hz / 1000000);
2340
2341 /* PB8 = FreeSync Maximum refresh rate (Hz)
2342 *
2343 * Note: We do not use the maximum capable refresh rate
2344 * of the panel, because we should never go above the field
2345 * rate of the mode timing set.
2346 */
2347 info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2348 nominal_refresh_in_micro_hz / 1000000);
2349
2350 /* PB9 - PB27 = Reserved */
2351 for (idx = 9; idx <= 27; idx++)
2352 info_packet->sb[idx] = 0x00;
2353
2354 /* Calculate checksum */
2355 checksum += info_packet->hb0;
2356 checksum += info_packet->hb1;
2357 checksum += info_packet->hb2;
2358 checksum += info_packet->hb3;
2359
2360 for (idx = 1; idx <= payload_size; idx++)
2361 checksum += info_packet->sb[idx];
2362
2363 /* PB0 = Checksum (one byte complement) */
2364 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2365
2366 info_packet->valid = true;
2367 }
2368
2369 static void set_hdr_static_info_packet(
2370 struct encoder_info_packet *info_packet,
2371 struct dc_plane_state *plane_state,
2372 struct dc_stream_state *stream)
2373 {
2374 uint16_t i = 0;
2375 enum signal_type signal = stream->signal;
2376 struct dc_hdr_static_metadata hdr_metadata;
2377 uint32_t data;
2378
2379 if (!plane_state)
2380 return;
2381
2382 hdr_metadata = plane_state->hdr_static_ctx;
2383
2384 if (!hdr_metadata.hdr_supported)
2385 return;
2386
2387 if (dc_is_hdmi_signal(signal)) {
2388 info_packet->valid = true;
2389
2390 info_packet->hb0 = 0x87;
2391 info_packet->hb1 = 0x01;
2392 info_packet->hb2 = 0x1A;
2393 i = 1;
2394 } else if (dc_is_dp_signal(signal)) {
2395 info_packet->valid = true;
2396
2397 info_packet->hb0 = 0x00;
2398 info_packet->hb1 = 0x87;
2399 info_packet->hb2 = 0x1D;
2400 info_packet->hb3 = (0x13 << 2);
2401 i = 2;
2402 }
2403
2404 data = hdr_metadata.is_hdr;
2405 info_packet->sb[i++] = data ? 0x02 : 0x00;
2406 info_packet->sb[i++] = 0x00;
2407
2408 data = hdr_metadata.chromaticity_green_x / 2;
2409 info_packet->sb[i++] = data & 0xFF;
2410 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2411
2412 data = hdr_metadata.chromaticity_green_y / 2;
2413 info_packet->sb[i++] = data & 0xFF;
2414 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2415
2416 data = hdr_metadata.chromaticity_blue_x / 2;
2417 info_packet->sb[i++] = data & 0xFF;
2418 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2419
2420 data = hdr_metadata.chromaticity_blue_y / 2;
2421 info_packet->sb[i++] = data & 0xFF;
2422 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2423
2424 data = hdr_metadata.chromaticity_red_x / 2;
2425 info_packet->sb[i++] = data & 0xFF;
2426 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2427
2428 data = hdr_metadata.chromaticity_red_y / 2;
2429 info_packet->sb[i++] = data & 0xFF;
2430 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2431
2432 data = hdr_metadata.chromaticity_white_point_x / 2;
2433 info_packet->sb[i++] = data & 0xFF;
2434 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2435
2436 data = hdr_metadata.chromaticity_white_point_y / 2;
2437 info_packet->sb[i++] = data & 0xFF;
2438 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2439
2440 data = hdr_metadata.max_luminance;
2441 info_packet->sb[i++] = data & 0xFF;
2442 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2443
2444 data = hdr_metadata.min_luminance;
2445 info_packet->sb[i++] = data & 0xFF;
2446 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2447
2448 data = hdr_metadata.maximum_content_light_level;
2449 info_packet->sb[i++] = data & 0xFF;
2450 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2451
2452 data = hdr_metadata.maximum_frame_average_light_level;
2453 info_packet->sb[i++] = data & 0xFF;
2454 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2455
2456 if (dc_is_hdmi_signal(signal)) {
2457 uint32_t checksum = 0;
2458
2459 checksum += info_packet->hb0;
2460 checksum += info_packet->hb1;
2461 checksum += info_packet->hb2;
2462
2463 for (i = 1; i <= info_packet->hb2; i++)
2464 checksum += info_packet->sb[i];
2465
2466 info_packet->sb[0] = 0x100 - checksum;
2467 } else if (dc_is_dp_signal(signal)) {
2468 info_packet->sb[0] = 0x01;
2469 info_packet->sb[1] = 0x1A;
2470 }
2471 }
2472
2473 static void set_vsc_info_packet(
2474 struct encoder_info_packet *info_packet,
2475 struct dc_stream_state *stream)
2476 {
2477 unsigned int vscPacketRevision = 0;
2478 unsigned int i;
2479
2480 /*VSC packet set to 2 when DP revision >= 1.2*/
2481 if (stream->sink->link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
2482 vscPacketRevision = 2;
2483 }
2484
2485 /* VSC packet not needed based on the features
2486 * supported by this DP display
2487 */
2488 if (vscPacketRevision == 0)
2489 return;
2490
2491 if (vscPacketRevision == 0x2) {
2492 /* Secondary-data Packet ID = 0*/
2493 info_packet->hb0 = 0x00;
2494 /* 07h - Packet Type Value indicating Video
2495 * Stream Configuration packet
2496 */
2497 info_packet->hb1 = 0x07;
2498 /* 02h = VSC SDP supporting 3D stereo and PSR
2499 * (applies to eDP v1.3 or higher).
2500 */
2501 info_packet->hb2 = 0x02;
2502 /* 08h = VSC packet supporting 3D stereo + PSR
2503 * (HB2 = 02h).
2504 */
2505 info_packet->hb3 = 0x08;
2506
2507 for (i = 0; i < 28; i++)
2508 info_packet->sb[i] = 0;
2509
2510 info_packet->valid = true;
2511 }
2512
2513 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2514 }
2515
2516 void dc_resource_state_destruct(struct dc_state *context)
2517 {
2518 int i, j;
2519
2520 for (i = 0; i < context->stream_count; i++) {
2521 for (j = 0; j < context->stream_status[i].plane_count; j++)
2522 dc_plane_state_release(
2523 context->stream_status[i].plane_states[j]);
2524
2525 context->stream_status[i].plane_count = 0;
2526 dc_stream_release(context->streams[i]);
2527 context->streams[i] = NULL;
2528 }
2529 }
2530
2531 /*
2532 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2533 * by the src_ctx
2534 */
2535 void dc_resource_state_copy_construct(
2536 const struct dc_state *src_ctx,
2537 struct dc_state *dst_ctx)
2538 {
2539 int i, j;
2540 struct kref refcount = dst_ctx->refcount;
2541
2542 *dst_ctx = *src_ctx;
2543
2544 for (i = 0; i < MAX_PIPES; i++) {
2545 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2546
2547 if (cur_pipe->top_pipe)
2548 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2549
2550 if (cur_pipe->bottom_pipe)
2551 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2552
2553 }
2554
2555 for (i = 0; i < dst_ctx->stream_count; i++) {
2556 dc_stream_retain(dst_ctx->streams[i]);
2557 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2558 dc_plane_state_retain(
2559 dst_ctx->stream_status[i].plane_states[j]);
2560 }
2561
2562 /* context refcount should not be overridden */
2563 dst_ctx->refcount = refcount;
2564
2565 }
2566
2567 struct clock_source *dc_resource_find_first_free_pll(
2568 struct resource_context *res_ctx,
2569 const struct resource_pool *pool)
2570 {
2571 int i;
2572
2573 for (i = 0; i < pool->clk_src_count; ++i) {
2574 if (res_ctx->clock_source_ref_count[i] == 0)
2575 return pool->clock_sources[i];
2576 }
2577
2578 return NULL;
2579 }
2580
2581 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2582 {
2583 enum signal_type signal = SIGNAL_TYPE_NONE;
2584 struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2585
2586 /* default all packets to invalid */
2587 info->avi.valid = false;
2588 info->gamut.valid = false;
2589 info->vendor.valid = false;
2590 info->spd.valid = false;
2591 info->hdrsmd.valid = false;
2592 info->vsc.valid = false;
2593
2594 signal = pipe_ctx->stream->signal;
2595
2596 /* HDMi and DP have different info packets*/
2597 if (dc_is_hdmi_signal(signal)) {
2598 set_avi_info_frame(&info->avi, pipe_ctx);
2599
2600 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2601
2602 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2603
2604 set_hdr_static_info_packet(&info->hdrsmd,
2605 pipe_ctx->plane_state, pipe_ctx->stream);
2606
2607 } else if (dc_is_dp_signal(signal)) {
2608 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2609
2610 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2611
2612 set_hdr_static_info_packet(&info->hdrsmd,
2613 pipe_ctx->plane_state, pipe_ctx->stream);
2614 }
2615
2616 patch_gamut_packet_checksum(&info->gamut);
2617 }
2618
2619 enum dc_status resource_map_clock_resources(
2620 const struct dc *dc,
2621 struct dc_state *context,
2622 struct dc_stream_state *stream)
2623 {
2624 /* acquire new resources */
2625 const struct resource_pool *pool = dc->res_pool;
2626 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2627 &context->res_ctx, stream);
2628
2629 if (!pipe_ctx)
2630 return DC_ERROR_UNEXPECTED;
2631
2632 if (dc_is_dp_signal(pipe_ctx->stream->signal)
2633 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2634 pipe_ctx->clock_source = pool->dp_clock_source;
2635 else {
2636 pipe_ctx->clock_source = NULL;
2637
2638 if (!dc->config.disable_disp_pll_sharing)
2639 pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2640 &context->res_ctx,
2641 pipe_ctx);
2642
2643 if (pipe_ctx->clock_source == NULL)
2644 pipe_ctx->clock_source =
2645 dc_resource_find_first_free_pll(
2646 &context->res_ctx,
2647 pool);
2648 }
2649
2650 if (pipe_ctx->clock_source == NULL)
2651 return DC_NO_CLOCK_SOURCE_RESOURCE;
2652
2653 resource_reference_clock_source(
2654 &context->res_ctx, pool,
2655 pipe_ctx->clock_source);
2656
2657 return DC_OK;
2658 }
2659
2660 /*
2661 * Note: We need to disable output if clock sources change,
2662 * since bios does optimization and doesn't apply if changing
2663 * PHY when not already disabled.
2664 */
2665 bool pipe_need_reprogram(
2666 struct pipe_ctx *pipe_ctx_old,
2667 struct pipe_ctx *pipe_ctx)
2668 {
2669 if (!pipe_ctx_old->stream)
2670 return false;
2671
2672 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2673 return true;
2674
2675 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2676 return true;
2677
2678 if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2679 return true;
2680
2681 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2682 && pipe_ctx_old->stream != pipe_ctx->stream)
2683 return true;
2684
2685 if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2686 return true;
2687
2688 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2689 return true;
2690
2691
2692 return false;
2693 }
2694
2695 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2696 struct bit_depth_reduction_params *fmt_bit_depth)
2697 {
2698 enum dc_dither_option option = stream->dither_option;
2699 enum dc_pixel_encoding pixel_encoding =
2700 stream->timing.pixel_encoding;
2701
2702 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2703
2704 if (option == DITHER_OPTION_DEFAULT) {
2705 switch (stream->timing.display_color_depth) {
2706 case COLOR_DEPTH_666:
2707 option = DITHER_OPTION_SPATIAL6;
2708 break;
2709 case COLOR_DEPTH_888:
2710 option = DITHER_OPTION_SPATIAL8;
2711 break;
2712 case COLOR_DEPTH_101010:
2713 option = DITHER_OPTION_SPATIAL10;
2714 break;
2715 default:
2716 option = DITHER_OPTION_DISABLE;
2717 }
2718 }
2719
2720 if (option == DITHER_OPTION_DISABLE)
2721 return;
2722
2723 if (option == DITHER_OPTION_TRUN6) {
2724 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2725 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2726 } else if (option == DITHER_OPTION_TRUN8 ||
2727 option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2728 option == DITHER_OPTION_TRUN8_FM6) {
2729 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2730 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2731 } else if (option == DITHER_OPTION_TRUN10 ||
2732 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2733 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2734 option == DITHER_OPTION_TRUN10_FM8 ||
2735 option == DITHER_OPTION_TRUN10_FM6 ||
2736 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2737 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2738 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2739 }
2740
2741 /* special case - Formatter can only reduce by 4 bits at most.
2742 * When reducing from 12 to 6 bits,
2743 * HW recommends we use trunc with round mode
2744 * (if we did nothing, trunc to 10 bits would be used)
2745 * note that any 12->10 bit reduction is ignored prior to DCE8,
2746 * as the input was 10 bits.
2747 */
2748 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2749 option == DITHER_OPTION_SPATIAL6 ||
2750 option == DITHER_OPTION_FM6) {
2751 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2752 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2753 fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2754 }
2755
2756 /* spatial dither
2757 * note that spatial modes 1-3 are never used
2758 */
2759 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2760 option == DITHER_OPTION_SPATIAL6 ||
2761 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2762 option == DITHER_OPTION_TRUN8_SPATIAL6) {
2763 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2764 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2765 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2766 fmt_bit_depth->flags.RGB_RANDOM =
2767 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2768 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM ||
2769 option == DITHER_OPTION_SPATIAL8 ||
2770 option == DITHER_OPTION_SPATIAL8_FM6 ||
2771 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2772 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2773 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2774 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2775 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2776 fmt_bit_depth->flags.RGB_RANDOM =
2777 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2778 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2779 option == DITHER_OPTION_SPATIAL10 ||
2780 option == DITHER_OPTION_SPATIAL10_FM8 ||
2781 option == DITHER_OPTION_SPATIAL10_FM6) {
2782 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2783 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2784 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2785 fmt_bit_depth->flags.RGB_RANDOM =
2786 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2787 }
2788
2789 if (option == DITHER_OPTION_SPATIAL6 ||
2790 option == DITHER_OPTION_SPATIAL8 ||
2791 option == DITHER_OPTION_SPATIAL10) {
2792 fmt_bit_depth->flags.FRAME_RANDOM = 0;
2793 } else {
2794 fmt_bit_depth->flags.FRAME_RANDOM = 1;
2795 }
2796
2797 //////////////////////
2798 //// temporal dither
2799 //////////////////////
2800 if (option == DITHER_OPTION_FM6 ||
2801 option == DITHER_OPTION_SPATIAL8_FM6 ||
2802 option == DITHER_OPTION_SPATIAL10_FM6 ||
2803 option == DITHER_OPTION_TRUN10_FM6 ||
2804 option == DITHER_OPTION_TRUN8_FM6 ||
2805 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2806 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2807 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2808 } else if (option == DITHER_OPTION_FM8 ||
2809 option == DITHER_OPTION_SPATIAL10_FM8 ||
2810 option == DITHER_OPTION_TRUN10_FM8) {
2811 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2812 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2813 } else if (option == DITHER_OPTION_FM10) {
2814 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2815 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2816 }
2817
2818 fmt_bit_depth->pixel_encoding = pixel_encoding;
2819 }
2820
2821 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2822 {
2823 struct dc *core_dc = dc;
2824 struct dc_link *link = stream->sink->link;
2825 struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2826 enum dc_status res = DC_OK;
2827
2828 calculate_phy_pix_clks(stream);
2829
2830 if (!tg->funcs->validate_timing(tg, &stream->timing))
2831 res = DC_FAIL_CONTROLLER_VALIDATE;
2832
2833 if (res == DC_OK)
2834 if (!link->link_enc->funcs->validate_output_with_stream(
2835 link->link_enc, stream))
2836 res = DC_FAIL_ENC_VALIDATE;
2837
2838 /* TODO: validate audio ASIC caps, encoder */
2839
2840 if (res == DC_OK)
2841 res = dc_link_validate_mode_timing(stream,
2842 link,
2843 &stream->timing);
2844
2845 return res;
2846 }
2847
2848 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2849 {
2850 enum dc_status res = DC_OK;
2851
2852 /* TODO For now validates pixel format only */
2853 if (dc->res_pool->funcs->validate_plane)
2854 return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2855
2856 return res;
2857 }