2 * Copyright 2012-15 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "dm_services.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "core_types.h"
35 #include "set_mode_types.h"
36 #include "virtual/virtual_stream_encoder.h"
38 #include "dce80/dce80_resource.h"
39 #include "dce100/dce100_resource.h"
40 #include "dce110/dce110_resource.h"
41 #include "dce112/dce112_resource.h"
42 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
43 #include "dcn10/dcn10_resource.h"
45 #include "dce120/dce120_resource.h"
47 enum dce_version
resource_parse_asic_id(struct hw_asic_id asic_id
)
49 enum dce_version dc_version
= DCE_VERSION_UNKNOWN
;
50 switch (asic_id
.chip_family
) {
54 dc_version
= DCE_VERSION_8_0
;
57 dc_version
= DCE_VERSION_11_0
;
61 if (ASIC_REV_IS_TONGA_P(asic_id
.hw_internal_rev
) ||
62 ASIC_REV_IS_FIJI_P(asic_id
.hw_internal_rev
)) {
63 dc_version
= DCE_VERSION_10_0
;
66 if (ASIC_REV_IS_POLARIS10_P(asic_id
.hw_internal_rev
) ||
67 ASIC_REV_IS_POLARIS11_M(asic_id
.hw_internal_rev
) ||
68 ASIC_REV_IS_POLARIS12_V(asic_id
.hw_internal_rev
)) {
69 dc_version
= DCE_VERSION_11_2
;
73 dc_version
= DCE_VERSION_12_0
;
75 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
77 dc_version
= DCN_VERSION_1_0
;
81 dc_version
= DCE_VERSION_UNKNOWN
;
87 struct resource_pool
*dc_create_resource_pool(
89 int num_virtual_links
,
90 enum dce_version dc_version
,
91 struct hw_asic_id asic_id
)
93 struct resource_pool
*res_pool
= NULL
;
97 res_pool
= dce80_create_resource_pool(
98 num_virtual_links
, dc
);
100 case DCE_VERSION_10_0
:
101 res_pool
= dce100_create_resource_pool(
102 num_virtual_links
, dc
);
104 case DCE_VERSION_11_0
:
105 res_pool
= dce110_create_resource_pool(
106 num_virtual_links
, dc
, asic_id
);
108 case DCE_VERSION_11_2
:
109 res_pool
= dce112_create_resource_pool(
110 num_virtual_links
, dc
);
112 case DCE_VERSION_12_0
:
113 res_pool
= dce120_create_resource_pool(
114 num_virtual_links
, dc
);
117 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
118 case DCN_VERSION_1_0
:
119 res_pool
= dcn10_create_resource_pool(
120 num_virtual_links
, dc
);
128 if (res_pool
!= NULL
) {
129 struct dc_firmware_info fw_info
= { { 0 } };
131 if (dc
->ctx
->dc_bios
->funcs
->get_firmware_info(
132 dc
->ctx
->dc_bios
, &fw_info
) == BP_RESULT_OK
) {
133 res_pool
->ref_clock_inKhz
= fw_info
.pll_info
.crystal_frequency
;
135 ASSERT_CRITICAL(false);
141 void dc_destroy_resource_pool(struct core_dc
*dc
)
145 dc
->res_pool
->funcs
->destroy(&dc
->res_pool
);
152 static void update_num_audio(
153 const struct resource_straps
*straps
,
154 unsigned int *num_audio
,
155 struct audio_support
*aud_support
)
157 if (straps
->hdmi_disable
== 0) {
158 aud_support
->hdmi_audio_native
= true;
159 aud_support
->hdmi_audio_on_dongle
= true;
160 aud_support
->dp_audio
= true;
162 if (straps
->dc_pinstraps_audio
& 0x2) {
163 aud_support
->hdmi_audio_on_dongle
= true;
164 aud_support
->dp_audio
= true;
166 aud_support
->dp_audio
= true;
170 switch (straps
->audio_stream_number
) {
171 case 0: /* multi streams supported */
173 case 1: /* multi streams not supported */
177 DC_ERR("DC: unexpected audio fuse!\n");
181 bool resource_construct(
182 unsigned int num_virtual_links
,
184 struct resource_pool
*pool
,
185 const struct resource_create_funcs
*create_funcs
)
187 struct dc_context
*ctx
= dc
->ctx
;
188 const struct resource_caps
*caps
= pool
->res_cap
;
190 unsigned int num_audio
= caps
->num_audio
;
191 struct resource_straps straps
= {0};
193 if (create_funcs
->read_dce_straps
)
194 create_funcs
->read_dce_straps(dc
->ctx
, &straps
);
196 pool
->audio_count
= 0;
197 if (create_funcs
->create_audio
) {
198 /* find the total number of streams available via the
199 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
200 * registers (one for each pin) starting from pin 1
201 * up to the max number of audio pins.
202 * We stop on the first pin where
203 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
205 update_num_audio(&straps
, &num_audio
, &pool
->audio_support
);
206 for (i
= 0; i
< pool
->pipe_count
&& i
< num_audio
; i
++) {
207 struct audio
*aud
= create_funcs
->create_audio(ctx
, i
);
210 DC_ERR("DC: failed to create audio!\n");
214 if (!aud
->funcs
->endpoint_valid(aud
)) {
215 aud
->funcs
->destroy(&aud
);
219 pool
->audios
[i
] = aud
;
224 pool
->stream_enc_count
= 0;
225 if (create_funcs
->create_stream_encoder
) {
226 for (i
= 0; i
< caps
->num_stream_encoder
; i
++) {
227 pool
->stream_enc
[i
] = create_funcs
->create_stream_encoder(i
, ctx
);
228 if (pool
->stream_enc
[i
] == NULL
)
229 DC_ERR("DC: failed to create stream_encoder!\n");
230 pool
->stream_enc_count
++;
234 for (i
= 0; i
< num_virtual_links
; i
++) {
235 pool
->stream_enc
[pool
->stream_enc_count
] =
236 virtual_stream_encoder_create(
238 if (pool
->stream_enc
[pool
->stream_enc_count
] == NULL
) {
239 DC_ERR("DC: failed to create stream_encoder!\n");
242 pool
->stream_enc_count
++;
245 dc
->hwseq
= create_funcs
->create_hwseq(ctx
);
251 void resource_unreference_clock_source(
252 struct resource_context
*res_ctx
,
253 const struct resource_pool
*pool
,
254 struct clock_source
**clock_source
)
257 for (i
= 0; i
< pool
->clk_src_count
; i
++) {
258 if (pool
->clock_sources
[i
] != *clock_source
)
261 res_ctx
->clock_source_ref_count
[i
]--;
263 if (res_ctx
->clock_source_ref_count
[i
] == 0)
264 (*clock_source
)->funcs
->cs_power_down(*clock_source
);
269 if (pool
->dp_clock_source
== *clock_source
) {
270 res_ctx
->dp_clock_source_ref_count
--;
272 if (res_ctx
->dp_clock_source_ref_count
== 0)
273 (*clock_source
)->funcs
->cs_power_down(*clock_source
);
275 *clock_source
= NULL
;
278 void resource_reference_clock_source(
279 struct resource_context
*res_ctx
,
280 const struct resource_pool
*pool
,
281 struct clock_source
*clock_source
)
284 for (i
= 0; i
< pool
->clk_src_count
; i
++) {
285 if (pool
->clock_sources
[i
] != clock_source
)
288 res_ctx
->clock_source_ref_count
[i
]++;
292 if (pool
->dp_clock_source
== clock_source
)
293 res_ctx
->dp_clock_source_ref_count
++;
296 bool resource_are_streams_timing_synchronizable(
297 struct dc_stream_state
*stream1
,
298 struct dc_stream_state
*stream2
)
300 if (stream1
->timing
.h_total
!= stream2
->timing
.h_total
)
303 if (stream1
->timing
.v_total
!= stream2
->timing
.v_total
)
306 if (stream1
->timing
.h_addressable
307 != stream2
->timing
.h_addressable
)
310 if (stream1
->timing
.v_addressable
311 != stream2
->timing
.v_addressable
)
314 if (stream1
->timing
.pix_clk_khz
315 != stream2
->timing
.pix_clk_khz
)
318 if (stream1
->phy_pix_clk
!= stream2
->phy_pix_clk
319 && (!dc_is_dp_signal(stream1
->signal
)
320 || !dc_is_dp_signal(stream2
->signal
)))
326 static bool is_sharable_clk_src(
327 const struct pipe_ctx
*pipe_with_clk_src
,
328 const struct pipe_ctx
*pipe
)
330 if (pipe_with_clk_src
->clock_source
== NULL
)
333 if (pipe_with_clk_src
->stream
->signal
== SIGNAL_TYPE_VIRTUAL
)
336 if (dc_is_dp_signal(pipe_with_clk_src
->stream
->signal
))
339 if (dc_is_hdmi_signal(pipe_with_clk_src
->stream
->signal
)
340 && dc_is_dvi_signal(pipe
->stream
->signal
))
343 if (dc_is_hdmi_signal(pipe
->stream
->signal
)
344 && dc_is_dvi_signal(pipe_with_clk_src
->stream
->signal
))
347 if (!resource_are_streams_timing_synchronizable(
348 pipe_with_clk_src
->stream
, pipe
->stream
))
354 struct clock_source
*resource_find_used_clk_src_for_sharing(
355 struct resource_context
*res_ctx
,
356 struct pipe_ctx
*pipe_ctx
)
360 for (i
= 0; i
< MAX_PIPES
; i
++) {
361 if (is_sharable_clk_src(&res_ctx
->pipe_ctx
[i
], pipe_ctx
))
362 return res_ctx
->pipe_ctx
[i
].clock_source
;
368 static enum pixel_format
convert_pixel_format_to_dalsurface(
369 enum surface_pixel_format surface_pixel_format
)
371 enum pixel_format dal_pixel_format
= PIXEL_FORMAT_UNKNOWN
;
373 switch (surface_pixel_format
) {
374 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS
:
375 dal_pixel_format
= PIXEL_FORMAT_INDEX8
;
377 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555
:
378 dal_pixel_format
= PIXEL_FORMAT_RGB565
;
380 case SURFACE_PIXEL_FORMAT_GRPH_RGB565
:
381 dal_pixel_format
= PIXEL_FORMAT_RGB565
;
383 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888
:
384 dal_pixel_format
= PIXEL_FORMAT_ARGB8888
;
386 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888
:
387 dal_pixel_format
= PIXEL_FORMAT_ARGB8888
;
389 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010
:
390 dal_pixel_format
= PIXEL_FORMAT_ARGB2101010
;
392 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010
:
393 dal_pixel_format
= PIXEL_FORMAT_ARGB2101010
;
395 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS
:
396 dal_pixel_format
= PIXEL_FORMAT_ARGB2101010_XRBIAS
;
398 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F
:
399 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F
:
400 dal_pixel_format
= PIXEL_FORMAT_FP16
;
402 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr
:
403 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb
:
404 dal_pixel_format
= PIXEL_FORMAT_420BPP8
;
406 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr
:
407 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb
:
408 dal_pixel_format
= PIXEL_FORMAT_420BPP10
;
410 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616
:
412 dal_pixel_format
= PIXEL_FORMAT_UNKNOWN
;
415 return dal_pixel_format
;
418 static void rect_swap_helper(struct rect
*rect
)
423 rect
->height
= rect
->width
;
431 static void calculate_viewport(struct pipe_ctx
*pipe_ctx
)
433 const struct dc_plane_state
*plane_state
= pipe_ctx
->plane_state
;
434 const struct dc_stream_state
*stream
= pipe_ctx
->stream
;
435 struct scaler_data
*data
= &pipe_ctx
->scl_data
;
436 struct rect surf_src
= plane_state
->src_rect
;
437 struct rect clip
= { 0 };
438 int vpc_div
= (data
->format
== PIXEL_FORMAT_420BPP8
439 || data
->format
== PIXEL_FORMAT_420BPP10
) ? 2 : 1;
440 bool pri_split
= pipe_ctx
->bottom_pipe
&&
441 pipe_ctx
->bottom_pipe
->plane_state
== pipe_ctx
->plane_state
;
442 bool sec_split
= pipe_ctx
->top_pipe
&&
443 pipe_ctx
->top_pipe
->plane_state
== pipe_ctx
->plane_state
;
445 if (stream
->view_format
== VIEW_3D_FORMAT_SIDE_BY_SIDE
||
446 stream
->view_format
== VIEW_3D_FORMAT_TOP_AND_BOTTOM
) {
451 if (pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_90
||
452 pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_270
)
453 rect_swap_helper(&surf_src
);
455 /* The actual clip is an intersection between stream
456 * source and surface clip
458 clip
.x
= stream
->src
.x
> plane_state
->clip_rect
.x
?
459 stream
->src
.x
: plane_state
->clip_rect
.x
;
461 clip
.width
= stream
->src
.x
+ stream
->src
.width
<
462 plane_state
->clip_rect
.x
+ plane_state
->clip_rect
.width
?
463 stream
->src
.x
+ stream
->src
.width
- clip
.x
:
464 plane_state
->clip_rect
.x
+ plane_state
->clip_rect
.width
- clip
.x
;
466 clip
.y
= stream
->src
.y
> plane_state
->clip_rect
.y
?
467 stream
->src
.y
: plane_state
->clip_rect
.y
;
469 clip
.height
= stream
->src
.y
+ stream
->src
.height
<
470 plane_state
->clip_rect
.y
+ plane_state
->clip_rect
.height
?
471 stream
->src
.y
+ stream
->src
.height
- clip
.y
:
472 plane_state
->clip_rect
.y
+ plane_state
->clip_rect
.height
- clip
.y
;
474 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
475 * num_pixels = clip.num_pix * scl_ratio
477 data
->viewport
.x
= surf_src
.x
+ (clip
.x
- plane_state
->dst_rect
.x
) *
478 surf_src
.width
/ plane_state
->dst_rect
.width
;
479 data
->viewport
.width
= clip
.width
*
480 surf_src
.width
/ plane_state
->dst_rect
.width
;
482 data
->viewport
.y
= surf_src
.y
+ (clip
.y
- plane_state
->dst_rect
.y
) *
483 surf_src
.height
/ plane_state
->dst_rect
.height
;
484 data
->viewport
.height
= clip
.height
*
485 surf_src
.height
/ plane_state
->dst_rect
.height
;
487 /* Round down, compensate in init */
488 data
->viewport_c
.x
= data
->viewport
.x
/ vpc_div
;
489 data
->viewport_c
.y
= data
->viewport
.y
/ vpc_div
;
490 data
->inits
.h_c
= (data
->viewport
.x
% vpc_div
) != 0 ?
491 dal_fixed31_32_half
: dal_fixed31_32_zero
;
492 data
->inits
.v_c
= (data
->viewport
.y
% vpc_div
) != 0 ?
493 dal_fixed31_32_half
: dal_fixed31_32_zero
;
494 /* Round up, assume original video size always even dimensions */
495 data
->viewport_c
.width
= (data
->viewport
.width
+ vpc_div
- 1) / vpc_div
;
496 data
->viewport_c
.height
= (data
->viewport
.height
+ vpc_div
- 1) / vpc_div
;
499 if (pri_split
|| sec_split
) {
500 /* HMirror XOR Secondary_pipe XOR Rotation_180 */
501 bool right_view
= (sec_split
!= plane_state
->horizontal_mirror
) !=
502 (plane_state
->rotation
== ROTATION_ANGLE_180
);
504 if (plane_state
->rotation
== ROTATION_ANGLE_90
505 || plane_state
->rotation
== ROTATION_ANGLE_270
)
506 /* Secondary_pipe XOR Rotation_270 */
507 right_view
= (plane_state
->rotation
== ROTATION_ANGLE_270
) != sec_split
;
510 data
->viewport
.width
/= 2;
511 data
->viewport_c
.width
/= 2;
512 data
->viewport
.x
+= data
->viewport
.width
;
513 data
->viewport_c
.x
+= data
->viewport_c
.width
;
514 /* Ceil offset pipe */
515 data
->viewport
.width
+= data
->viewport
.width
% 2;
516 data
->viewport_c
.width
+= data
->viewport_c
.width
% 2;
518 data
->viewport
.width
/= 2;
519 data
->viewport_c
.width
/= 2;
523 if (plane_state
->rotation
== ROTATION_ANGLE_90
||
524 plane_state
->rotation
== ROTATION_ANGLE_270
) {
525 rect_swap_helper(&data
->viewport_c
);
526 rect_swap_helper(&data
->viewport
);
530 static void calculate_recout(struct pipe_ctx
*pipe_ctx
, struct view
*recout_skip
)
532 const struct dc_plane_state
*plane_state
= pipe_ctx
->plane_state
;
533 const struct dc_stream_state
*stream
= pipe_ctx
->stream
;
534 struct rect surf_src
= plane_state
->src_rect
;
535 struct rect surf_clip
= plane_state
->clip_rect
;
536 int recout_full_x
, recout_full_y
;
538 if (pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_90
||
539 pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_270
)
540 rect_swap_helper(&surf_src
);
542 pipe_ctx
->scl_data
.recout
.x
= stream
->dst
.x
;
543 if (stream
->src
.x
< surf_clip
.x
)
544 pipe_ctx
->scl_data
.recout
.x
+= (surf_clip
.x
545 - stream
->src
.x
) * stream
->dst
.width
548 pipe_ctx
->scl_data
.recout
.width
= surf_clip
.width
*
549 stream
->dst
.width
/ stream
->src
.width
;
550 if (pipe_ctx
->scl_data
.recout
.width
+ pipe_ctx
->scl_data
.recout
.x
>
551 stream
->dst
.x
+ stream
->dst
.width
)
552 pipe_ctx
->scl_data
.recout
.width
=
553 stream
->dst
.x
+ stream
->dst
.width
554 - pipe_ctx
->scl_data
.recout
.x
;
556 pipe_ctx
->scl_data
.recout
.y
= stream
->dst
.y
;
557 if (stream
->src
.y
< surf_clip
.y
)
558 pipe_ctx
->scl_data
.recout
.y
+= (surf_clip
.y
559 - stream
->src
.y
) * stream
->dst
.height
560 / stream
->src
.height
;
562 pipe_ctx
->scl_data
.recout
.height
= surf_clip
.height
*
563 stream
->dst
.height
/ stream
->src
.height
;
564 if (pipe_ctx
->scl_data
.recout
.height
+ pipe_ctx
->scl_data
.recout
.y
>
565 stream
->dst
.y
+ stream
->dst
.height
)
566 pipe_ctx
->scl_data
.recout
.height
=
567 stream
->dst
.y
+ stream
->dst
.height
568 - pipe_ctx
->scl_data
.recout
.y
;
570 /* Handle h & vsplit */
571 if (pipe_ctx
->top_pipe
&& pipe_ctx
->top_pipe
->plane_state
==
572 pipe_ctx
->plane_state
) {
573 if (stream
->view_format
== VIEW_3D_FORMAT_TOP_AND_BOTTOM
) {
574 pipe_ctx
->scl_data
.recout
.height
/= 2;
575 pipe_ctx
->scl_data
.recout
.y
+= pipe_ctx
->scl_data
.recout
.height
;
576 /* Floor primary pipe, ceil 2ndary pipe */
577 pipe_ctx
->scl_data
.recout
.height
+= pipe_ctx
->scl_data
.recout
.height
% 2;
579 pipe_ctx
->scl_data
.recout
.width
/= 2;
580 pipe_ctx
->scl_data
.recout
.x
+= pipe_ctx
->scl_data
.recout
.width
;
581 pipe_ctx
->scl_data
.recout
.width
+= pipe_ctx
->scl_data
.recout
.width
% 2;
583 } else if (pipe_ctx
->bottom_pipe
&&
584 pipe_ctx
->bottom_pipe
->plane_state
== pipe_ctx
->plane_state
) {
585 if (stream
->view_format
== VIEW_3D_FORMAT_TOP_AND_BOTTOM
)
586 pipe_ctx
->scl_data
.recout
.height
/= 2;
588 pipe_ctx
->scl_data
.recout
.width
/= 2;
591 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
592 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
595 recout_full_x
= stream
->dst
.x
+ (plane_state
->dst_rect
.x
- stream
->src
.x
)
596 * stream
->dst
.width
/ stream
->src
.width
-
597 surf_src
.x
* plane_state
->dst_rect
.width
/ surf_src
.width
598 * stream
->dst
.width
/ stream
->src
.width
;
599 recout_full_y
= stream
->dst
.y
+ (plane_state
->dst_rect
.y
- stream
->src
.y
)
600 * stream
->dst
.height
/ stream
->src
.height
-
601 surf_src
.y
* plane_state
->dst_rect
.height
/ surf_src
.height
602 * stream
->dst
.height
/ stream
->src
.height
;
604 recout_skip
->width
= pipe_ctx
->scl_data
.recout
.x
- recout_full_x
;
605 recout_skip
->height
= pipe_ctx
->scl_data
.recout
.y
- recout_full_y
;
608 static void calculate_scaling_ratios(struct pipe_ctx
*pipe_ctx
)
610 const struct dc_plane_state
*plane_state
= pipe_ctx
->plane_state
;
611 const struct dc_stream_state
*stream
= pipe_ctx
->stream
;
612 struct rect surf_src
= plane_state
->src_rect
;
613 const int in_w
= stream
->src
.width
;
614 const int in_h
= stream
->src
.height
;
615 const int out_w
= stream
->dst
.width
;
616 const int out_h
= stream
->dst
.height
;
618 if (pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_90
||
619 pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_270
)
620 rect_swap_helper(&surf_src
);
622 pipe_ctx
->scl_data
.ratios
.horz
= dal_fixed31_32_from_fraction(
624 plane_state
->dst_rect
.width
);
625 pipe_ctx
->scl_data
.ratios
.vert
= dal_fixed31_32_from_fraction(
627 plane_state
->dst_rect
.height
);
629 if (stream
->view_format
== VIEW_3D_FORMAT_SIDE_BY_SIDE
)
630 pipe_ctx
->scl_data
.ratios
.horz
.value
*= 2;
631 else if (stream
->view_format
== VIEW_3D_FORMAT_TOP_AND_BOTTOM
)
632 pipe_ctx
->scl_data
.ratios
.vert
.value
*= 2;
634 pipe_ctx
->scl_data
.ratios
.vert
.value
= div64_s64(
635 pipe_ctx
->scl_data
.ratios
.vert
.value
* in_h
, out_h
);
636 pipe_ctx
->scl_data
.ratios
.horz
.value
= div64_s64(
637 pipe_ctx
->scl_data
.ratios
.horz
.value
* in_w
, out_w
);
639 pipe_ctx
->scl_data
.ratios
.horz_c
= pipe_ctx
->scl_data
.ratios
.horz
;
640 pipe_ctx
->scl_data
.ratios
.vert_c
= pipe_ctx
->scl_data
.ratios
.vert
;
642 if (pipe_ctx
->scl_data
.format
== PIXEL_FORMAT_420BPP8
643 || pipe_ctx
->scl_data
.format
== PIXEL_FORMAT_420BPP10
) {
644 pipe_ctx
->scl_data
.ratios
.horz_c
.value
/= 2;
645 pipe_ctx
->scl_data
.ratios
.vert_c
.value
/= 2;
649 static void calculate_inits_and_adj_vp(struct pipe_ctx
*pipe_ctx
, struct view
*recout_skip
)
651 struct scaler_data
*data
= &pipe_ctx
->scl_data
;
652 struct rect src
= pipe_ctx
->plane_state
->src_rect
;
653 int vpc_div
= (data
->format
== PIXEL_FORMAT_420BPP8
654 || data
->format
== PIXEL_FORMAT_420BPP10
) ? 2 : 1;
657 if (pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_90
||
658 pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_270
) {
659 rect_swap_helper(&src
);
660 rect_swap_helper(&data
->viewport_c
);
661 rect_swap_helper(&data
->viewport
);
665 * Init calculated according to formula:
666 * init = (scaling_ratio + number_of_taps + 1) / 2
667 * init_bot = init + scaling_ratio
668 * init_c = init + truncated_vp_c_offset(from calculate viewport)
670 data
->inits
.h
= dal_fixed31_32_div_int(
671 dal_fixed31_32_add_int(data
->ratios
.horz
, data
->taps
.h_taps
+ 1), 2);
673 data
->inits
.h_c
= dal_fixed31_32_add(data
->inits
.h_c
, dal_fixed31_32_div_int(
674 dal_fixed31_32_add_int(data
->ratios
.horz_c
, data
->taps
.h_taps_c
+ 1), 2));
676 data
->inits
.v
= dal_fixed31_32_div_int(
677 dal_fixed31_32_add_int(data
->ratios
.vert
, data
->taps
.v_taps
+ 1), 2);
679 data
->inits
.v_c
= dal_fixed31_32_add(data
->inits
.v_c
, dal_fixed31_32_div_int(
680 dal_fixed31_32_add_int(data
->ratios
.vert_c
, data
->taps
.v_taps_c
+ 1), 2));
683 /* Adjust for viewport end clip-off */
684 if ((data
->viewport
.x
+ data
->viewport
.width
) < (src
.x
+ src
.width
)) {
685 int vp_clip
= src
.x
+ src
.width
- data
->viewport
.width
- data
->viewport
.x
;
686 int int_part
= dal_fixed31_32_floor(
687 dal_fixed31_32_sub(data
->inits
.h
, data
->ratios
.horz
));
689 int_part
= int_part
> 0 ? int_part
: 0;
690 data
->viewport
.width
+= int_part
< vp_clip
? int_part
: vp_clip
;
692 if ((data
->viewport
.y
+ data
->viewport
.height
) < (src
.y
+ src
.height
)) {
693 int vp_clip
= src
.y
+ src
.height
- data
->viewport
.height
- data
->viewport
.y
;
694 int int_part
= dal_fixed31_32_floor(
695 dal_fixed31_32_sub(data
->inits
.v
, data
->ratios
.vert
));
697 int_part
= int_part
> 0 ? int_part
: 0;
698 data
->viewport
.height
+= int_part
< vp_clip
? int_part
: vp_clip
;
700 if ((data
->viewport_c
.x
+ data
->viewport_c
.width
) < (src
.x
+ src
.width
) / vpc_div
) {
701 int vp_clip
= (src
.x
+ src
.width
) / vpc_div
-
702 data
->viewport_c
.width
- data
->viewport_c
.x
;
703 int int_part
= dal_fixed31_32_floor(
704 dal_fixed31_32_sub(data
->inits
.h_c
, data
->ratios
.horz_c
));
706 int_part
= int_part
> 0 ? int_part
: 0;
707 data
->viewport_c
.width
+= int_part
< vp_clip
? int_part
: vp_clip
;
709 if ((data
->viewport_c
.y
+ data
->viewport_c
.height
) < (src
.y
+ src
.height
) / vpc_div
) {
710 int vp_clip
= (src
.y
+ src
.height
) / vpc_div
-
711 data
->viewport_c
.height
- data
->viewport_c
.y
;
712 int int_part
= dal_fixed31_32_floor(
713 dal_fixed31_32_sub(data
->inits
.v_c
, data
->ratios
.vert_c
));
715 int_part
= int_part
> 0 ? int_part
: 0;
716 data
->viewport_c
.height
+= int_part
< vp_clip
? int_part
: vp_clip
;
719 /* Adjust for non-0 viewport offset */
720 if (data
->viewport
.x
) {
723 data
->inits
.h
= dal_fixed31_32_add(data
->inits
.h
, dal_fixed31_32_mul_int(
724 data
->ratios
.horz
, recout_skip
->width
));
725 int_part
= dal_fixed31_32_floor(data
->inits
.h
) - data
->viewport
.x
;
726 if (int_part
< data
->taps
.h_taps
) {
727 int int_adj
= data
->viewport
.x
>= (data
->taps
.h_taps
- int_part
) ?
728 (data
->taps
.h_taps
- int_part
) : data
->viewport
.x
;
729 data
->viewport
.x
-= int_adj
;
730 data
->viewport
.width
+= int_adj
;
732 } else if (int_part
> data
->taps
.h_taps
) {
733 data
->viewport
.x
+= int_part
- data
->taps
.h_taps
;
734 data
->viewport
.width
-= int_part
- data
->taps
.h_taps
;
735 int_part
= data
->taps
.h_taps
;
737 data
->inits
.h
.value
&= 0xffffffff;
738 data
->inits
.h
= dal_fixed31_32_add_int(data
->inits
.h
, int_part
);
741 if (data
->viewport_c
.x
) {
744 data
->inits
.h_c
= dal_fixed31_32_add(data
->inits
.h_c
, dal_fixed31_32_mul_int(
745 data
->ratios
.horz_c
, recout_skip
->width
));
746 int_part
= dal_fixed31_32_floor(data
->inits
.h_c
) - data
->viewport_c
.x
;
747 if (int_part
< data
->taps
.h_taps_c
) {
748 int int_adj
= data
->viewport_c
.x
>= (data
->taps
.h_taps_c
- int_part
) ?
749 (data
->taps
.h_taps_c
- int_part
) : data
->viewport_c
.x
;
750 data
->viewport_c
.x
-= int_adj
;
751 data
->viewport_c
.width
+= int_adj
;
753 } else if (int_part
> data
->taps
.h_taps_c
) {
754 data
->viewport_c
.x
+= int_part
- data
->taps
.h_taps_c
;
755 data
->viewport_c
.width
-= int_part
- data
->taps
.h_taps_c
;
756 int_part
= data
->taps
.h_taps_c
;
758 data
->inits
.h_c
.value
&= 0xffffffff;
759 data
->inits
.h_c
= dal_fixed31_32_add_int(data
->inits
.h_c
, int_part
);
762 if (data
->viewport
.y
) {
765 data
->inits
.v
= dal_fixed31_32_add(data
->inits
.v
, dal_fixed31_32_mul_int(
766 data
->ratios
.vert
, recout_skip
->height
));
767 int_part
= dal_fixed31_32_floor(data
->inits
.v
) - data
->viewport
.y
;
768 if (int_part
< data
->taps
.v_taps
) {
769 int int_adj
= data
->viewport
.y
>= (data
->taps
.v_taps
- int_part
) ?
770 (data
->taps
.v_taps
- int_part
) : data
->viewport
.y
;
771 data
->viewport
.y
-= int_adj
;
772 data
->viewport
.height
+= int_adj
;
774 } else if (int_part
> data
->taps
.v_taps
) {
775 data
->viewport
.y
+= int_part
- data
->taps
.v_taps
;
776 data
->viewport
.height
-= int_part
- data
->taps
.v_taps
;
777 int_part
= data
->taps
.v_taps
;
779 data
->inits
.v
.value
&= 0xffffffff;
780 data
->inits
.v
= dal_fixed31_32_add_int(data
->inits
.v
, int_part
);
783 if (data
->viewport_c
.y
) {
786 data
->inits
.v_c
= dal_fixed31_32_add(data
->inits
.v_c
, dal_fixed31_32_mul_int(
787 data
->ratios
.vert_c
, recout_skip
->height
));
788 int_part
= dal_fixed31_32_floor(data
->inits
.v_c
) - data
->viewport_c
.y
;
789 if (int_part
< data
->taps
.v_taps_c
) {
790 int int_adj
= data
->viewport_c
.y
>= (data
->taps
.v_taps_c
- int_part
) ?
791 (data
->taps
.v_taps_c
- int_part
) : data
->viewport_c
.y
;
792 data
->viewport_c
.y
-= int_adj
;
793 data
->viewport_c
.height
+= int_adj
;
795 } else if (int_part
> data
->taps
.v_taps_c
) {
796 data
->viewport_c
.y
+= int_part
- data
->taps
.v_taps_c
;
797 data
->viewport_c
.height
-= int_part
- data
->taps
.v_taps_c
;
798 int_part
= data
->taps
.v_taps_c
;
800 data
->inits
.v_c
.value
&= 0xffffffff;
801 data
->inits
.v_c
= dal_fixed31_32_add_int(data
->inits
.v_c
, int_part
);
804 /* Interlaced inits based on final vert inits */
805 data
->inits
.v_bot
= dal_fixed31_32_add(data
->inits
.v
, data
->ratios
.vert
);
806 data
->inits
.v_c_bot
= dal_fixed31_32_add(data
->inits
.v_c
, data
->ratios
.vert_c
);
808 if (pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_90
||
809 pipe_ctx
->plane_state
->rotation
== ROTATION_ANGLE_270
) {
810 rect_swap_helper(&data
->viewport_c
);
811 rect_swap_helper(&data
->viewport
);
815 bool resource_build_scaling_params(struct pipe_ctx
*pipe_ctx
)
817 const struct dc_plane_state
*plane_state
= pipe_ctx
->plane_state
;
818 struct dc_crtc_timing
*timing
= &pipe_ctx
->stream
->timing
;
819 struct view recout_skip
= { 0 };
822 /* Important: scaling ratio calculation requires pixel format,
823 * lb depth calculation requires recout and taps require scaling ratios.
824 * Inits require viewport, taps, ratios and recout of split pipe
826 pipe_ctx
->scl_data
.format
= convert_pixel_format_to_dalsurface(
827 pipe_ctx
->plane_state
->format
);
829 calculate_scaling_ratios(pipe_ctx
);
831 calculate_viewport(pipe_ctx
);
833 if (pipe_ctx
->scl_data
.viewport
.height
< 16 || pipe_ctx
->scl_data
.viewport
.width
< 16)
836 calculate_recout(pipe_ctx
, &recout_skip
);
839 * Setting line buffer pixel depth to 24bpp yields banding
840 * on certain displays, such as the Sharp 4k
842 pipe_ctx
->scl_data
.lb_params
.depth
= LB_PIXEL_DEPTH_30BPP
;
844 pipe_ctx
->scl_data
.h_active
= timing
->h_addressable
;
845 pipe_ctx
->scl_data
.v_active
= timing
->v_addressable
;
847 /* Taps calculations */
848 res
= pipe_ctx
->xfm
->funcs
->transform_get_optimal_number_of_taps(
849 pipe_ctx
->xfm
, &pipe_ctx
->scl_data
, &plane_state
->scaling_quality
);
852 /* Try 24 bpp linebuffer */
853 pipe_ctx
->scl_data
.lb_params
.depth
= LB_PIXEL_DEPTH_24BPP
;
855 res
= pipe_ctx
->xfm
->funcs
->transform_get_optimal_number_of_taps(
856 pipe_ctx
->xfm
, &pipe_ctx
->scl_data
, &plane_state
->scaling_quality
);
860 /* May need to re-check lb size after this in some obscure scenario */
861 calculate_inits_and_adj_vp(pipe_ctx
, &recout_skip
);
863 dm_logger_write(pipe_ctx
->stream
->ctx
->logger
, LOG_SCALER
,
864 "%s: Viewport:\nheight:%d width:%d x:%d "
865 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
868 pipe_ctx
->scl_data
.viewport
.height
,
869 pipe_ctx
->scl_data
.viewport
.width
,
870 pipe_ctx
->scl_data
.viewport
.x
,
871 pipe_ctx
->scl_data
.viewport
.y
,
872 plane_state
->dst_rect
.height
,
873 plane_state
->dst_rect
.width
,
874 plane_state
->dst_rect
.x
,
875 plane_state
->dst_rect
.y
);
881 enum dc_status
resource_build_scaling_params_for_context(
882 const struct core_dc
*dc
,
883 struct validate_context
*context
)
887 for (i
= 0; i
< MAX_PIPES
; i
++) {
888 if (context
->res_ctx
.pipe_ctx
[i
].plane_state
!= NULL
&&
889 context
->res_ctx
.pipe_ctx
[i
].stream
!= NULL
)
890 if (!resource_build_scaling_params(&context
->res_ctx
.pipe_ctx
[i
]))
891 return DC_FAIL_SCALING
;
897 struct pipe_ctx
*find_idle_secondary_pipe(
898 struct resource_context
*res_ctx
,
899 const struct resource_pool
*pool
)
902 struct pipe_ctx
*secondary_pipe
= NULL
;
905 * search backwards for the second pipe to keep pipe
906 * assignment more consistent
909 for (i
= pool
->pipe_count
- 1; i
>= 0; i
--) {
910 if (res_ctx
->pipe_ctx
[i
].stream
== NULL
) {
911 secondary_pipe
= &res_ctx
->pipe_ctx
[i
];
912 secondary_pipe
->pipe_idx
= i
;
918 return secondary_pipe
;
921 struct pipe_ctx
*resource_get_head_pipe_for_stream(
922 struct resource_context
*res_ctx
,
923 struct dc_stream_state
*stream
)
926 for (i
= 0; i
< MAX_PIPES
; i
++) {
927 if (res_ctx
->pipe_ctx
[i
].stream
== stream
&&
928 res_ctx
->pipe_ctx
[i
].stream_enc
) {
929 return &res_ctx
->pipe_ctx
[i
];
937 * A free_pipe for a stream is defined here as a pipe
938 * that has no surface attached yet
940 static struct pipe_ctx
*acquire_free_pipe_for_stream(
941 struct validate_context
*context
,
942 const struct resource_pool
*pool
,
943 struct dc_stream_state
*stream
)
946 struct resource_context
*res_ctx
= &context
->res_ctx
;
948 struct pipe_ctx
*head_pipe
= NULL
;
950 /* Find head pipe, which has the back end set up*/
952 head_pipe
= resource_get_head_pipe_for_stream(res_ctx
, stream
);
957 if (!head_pipe
->plane_state
)
960 /* Re-use pipe already acquired for this stream if available*/
961 for (i
= pool
->pipe_count
- 1; i
>= 0; i
--) {
962 if (res_ctx
->pipe_ctx
[i
].stream
== stream
&&
963 !res_ctx
->pipe_ctx
[i
].plane_state
) {
964 return &res_ctx
->pipe_ctx
[i
];
969 * At this point we have no re-useable pipe for this stream and we need
970 * to acquire an idle one to satisfy the request
973 if (!pool
->funcs
->acquire_idle_pipe_for_layer
)
976 return pool
->funcs
->acquire_idle_pipe_for_layer(context
, pool
, stream
);
980 static void release_free_pipes_for_stream(
981 struct resource_context
*res_ctx
,
982 struct dc_stream_state
*stream
)
986 for (i
= MAX_PIPES
- 1; i
>= 0; i
--) {
987 /* never release the topmost pipe*/
988 if (res_ctx
->pipe_ctx
[i
].stream
== stream
&&
989 res_ctx
->pipe_ctx
[i
].top_pipe
&&
990 !res_ctx
->pipe_ctx
[i
].plane_state
) {
991 memset(&res_ctx
->pipe_ctx
[i
], 0, sizeof(struct pipe_ctx
));
996 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
997 static int acquire_first_split_pipe(
998 struct resource_context
*res_ctx
,
999 const struct resource_pool
*pool
,
1000 struct dc_stream_state
*stream
)
1004 for (i
= 0; i
< pool
->pipe_count
; i
++) {
1005 struct pipe_ctx
*pipe_ctx
= &res_ctx
->pipe_ctx
[i
];
1007 if (pipe_ctx
->top_pipe
&&
1008 pipe_ctx
->top_pipe
->plane_state
== pipe_ctx
->plane_state
) {
1009 pipe_ctx
->top_pipe
->bottom_pipe
= pipe_ctx
->bottom_pipe
;
1010 if (pipe_ctx
->bottom_pipe
)
1011 pipe_ctx
->bottom_pipe
->top_pipe
= pipe_ctx
->top_pipe
;
1013 memset(pipe_ctx
, 0, sizeof(*pipe_ctx
));
1014 pipe_ctx
->tg
= pool
->timing_generators
[i
];
1015 pipe_ctx
->mi
= pool
->mis
[i
];
1016 pipe_ctx
->ipp
= pool
->ipps
[i
];
1017 pipe_ctx
->xfm
= pool
->transforms
[i
];
1018 pipe_ctx
->opp
= pool
->opps
[i
];
1019 pipe_ctx
->dis_clk
= pool
->display_clock
;
1020 pipe_ctx
->pipe_idx
= i
;
1022 pipe_ctx
->stream
= stream
;
1030 bool resource_attach_surfaces_to_context(
1031 struct dc_plane_state
* const *plane_states
,
1033 struct dc_stream_state
*stream
,
1034 struct validate_context
*context
,
1035 const struct resource_pool
*pool
)
1038 struct pipe_ctx
*tail_pipe
;
1039 struct dc_stream_status
*stream_status
= NULL
;
1042 if (surface_count
> MAX_SURFACE_NUM
) {
1043 dm_error("Surface: can not attach %d surfaces! Maximum is: %d\n",
1044 surface_count
, MAX_SURFACE_NUM
);
1048 for (i
= 0; i
< context
->stream_count
; i
++)
1049 if (context
->streams
[i
] == stream
) {
1050 stream_status
= &context
->stream_status
[i
];
1053 if (stream_status
== NULL
) {
1054 dm_error("Existing stream not found; failed to attach surfaces\n");
1058 /* retain new surfaces */
1059 for (i
= 0; i
< surface_count
; i
++)
1060 dc_plane_state_retain(plane_states
[i
]);
1062 /* detach surfaces from pipes */
1063 for (i
= 0; i
< pool
->pipe_count
; i
++)
1064 if (context
->res_ctx
.pipe_ctx
[i
].stream
== stream
) {
1065 context
->res_ctx
.pipe_ctx
[i
].plane_state
= NULL
;
1066 context
->res_ctx
.pipe_ctx
[i
].bottom_pipe
= NULL
;
1069 /* release existing surfaces*/
1070 for (i
= 0; i
< stream_status
->plane_count
; i
++)
1071 dc_plane_state_release(stream_status
->plane_states
[i
]);
1073 for (i
= surface_count
; i
< stream_status
->plane_count
; i
++)
1074 stream_status
->plane_states
[i
] = NULL
;
1077 for (i
= 0; i
< surface_count
; i
++) {
1078 struct dc_plane_state
*plane_state
= plane_states
[i
];
1079 struct pipe_ctx
*free_pipe
= acquire_free_pipe_for_stream(
1080 context
, pool
, stream
);
1082 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1084 int pipe_idx
= acquire_first_split_pipe(&context
->res_ctx
, pool
, stream
);
1086 free_pipe
= &context
->res_ctx
.pipe_ctx
[pipe_idx
];
1090 stream_status
->plane_states
[i
] = NULL
;
1094 free_pipe
->plane_state
= plane_state
;
1097 free_pipe
->tg
= tail_pipe
->tg
;
1098 free_pipe
->opp
= tail_pipe
->opp
;
1099 free_pipe
->stream_enc
= tail_pipe
->stream_enc
;
1100 free_pipe
->audio
= tail_pipe
->audio
;
1101 free_pipe
->clock_source
= tail_pipe
->clock_source
;
1102 free_pipe
->top_pipe
= tail_pipe
;
1103 tail_pipe
->bottom_pipe
= free_pipe
;
1106 tail_pipe
= free_pipe
;
1109 release_free_pipes_for_stream(&context
->res_ctx
, stream
);
1111 /* assign new surfaces*/
1112 for (i
= 0; i
< surface_count
; i
++)
1113 stream_status
->plane_states
[i
] = plane_states
[i
];
1115 stream_status
->plane_count
= surface_count
;
1121 static bool is_timing_changed(struct dc_stream_state
*cur_stream
,
1122 struct dc_stream_state
*new_stream
)
1124 if (cur_stream
== NULL
)
1127 /* If sink pointer changed, it means this is a hotplug, we should do
1130 if (cur_stream
->sink
!= new_stream
->sink
)
1133 /* If output color space is changed, need to reprogram info frames */
1134 if (cur_stream
->output_color_space
!= new_stream
->output_color_space
)
1138 &cur_stream
->timing
,
1139 &new_stream
->timing
,
1140 sizeof(struct dc_crtc_timing
)) != 0;
1143 static bool are_stream_backends_same(
1144 struct dc_stream_state
*stream_a
, struct dc_stream_state
*stream_b
)
1146 if (stream_a
== stream_b
)
1149 if (stream_a
== NULL
|| stream_b
== NULL
)
1152 if (is_timing_changed(stream_a
, stream_b
))
1158 bool dc_is_stream_unchanged(
1159 struct dc_stream_state
*old_stream
, struct dc_stream_state
*stream
)
1162 if (!are_stream_backends_same(old_stream
, stream
))
1168 bool resource_validate_attach_surfaces(
1169 const struct dc_validation_set set
[],
1171 const struct validate_context
*old_context
,
1172 struct validate_context
*context
,
1173 const struct resource_pool
*pool
)
1177 for (i
= 0; i
< set_count
; i
++) {
1178 for (j
= 0; old_context
&& j
< old_context
->stream_count
; j
++)
1179 if (dc_is_stream_unchanged(
1180 old_context
->streams
[j
],
1181 context
->streams
[i
])) {
1182 if (!resource_attach_surfaces_to_context(
1183 old_context
->stream_status
[j
].plane_states
,
1184 old_context
->stream_status
[j
].plane_count
,
1185 context
->streams
[i
],
1188 context
->stream_status
[i
] = old_context
->stream_status
[j
];
1190 if (set
[i
].plane_count
!= 0)
1191 if (!resource_attach_surfaces_to_context(
1192 set
[i
].plane_states
,
1194 context
->streams
[i
],
1203 /* Maximum TMDS single link pixel clock 165MHz */
1204 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
1205 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ_UPMOST 297000
1207 static void set_stream_engine_in_use(
1208 struct resource_context
*res_ctx
,
1209 const struct resource_pool
*pool
,
1210 struct stream_encoder
*stream_enc
)
1214 for (i
= 0; i
< pool
->stream_enc_count
; i
++) {
1215 if (pool
->stream_enc
[i
] == stream_enc
)
1216 res_ctx
->is_stream_enc_acquired
[i
] = true;
1220 /* TODO: release audio object */
1221 static void set_audio_in_use(
1222 struct resource_context
*res_ctx
,
1223 const struct resource_pool
*pool
,
1224 struct audio
*audio
)
1227 for (i
= 0; i
< pool
->audio_count
; i
++) {
1228 if (pool
->audios
[i
] == audio
)
1229 res_ctx
->is_audio_acquired
[i
] = true;
1233 static int acquire_first_free_pipe(
1234 struct resource_context
*res_ctx
,
1235 const struct resource_pool
*pool
,
1236 struct dc_stream_state
*stream
)
1240 for (i
= 0; i
< pool
->pipe_count
; i
++) {
1241 if (!res_ctx
->pipe_ctx
[i
].stream
) {
1242 struct pipe_ctx
*pipe_ctx
= &res_ctx
->pipe_ctx
[i
];
1244 pipe_ctx
->tg
= pool
->timing_generators
[i
];
1245 pipe_ctx
->mi
= pool
->mis
[i
];
1246 pipe_ctx
->ipp
= pool
->ipps
[i
];
1247 pipe_ctx
->xfm
= pool
->transforms
[i
];
1248 pipe_ctx
->opp
= pool
->opps
[i
];
1249 pipe_ctx
->dis_clk
= pool
->display_clock
;
1250 pipe_ctx
->pipe_idx
= i
;
1253 pipe_ctx
->stream
= stream
;
1260 static struct stream_encoder
*find_first_free_match_stream_enc_for_link(
1261 struct resource_context
*res_ctx
,
1262 const struct resource_pool
*pool
,
1263 struct dc_stream_state
*stream
)
1267 struct dc_link
*link
= stream
->sink
->link
;
1269 for (i
= 0; i
< pool
->stream_enc_count
; i
++) {
1270 if (!res_ctx
->is_stream_enc_acquired
[i
] &&
1271 pool
->stream_enc
[i
]) {
1272 /* Store first available for MST second display
1273 * in daisy chain use case */
1275 if (pool
->stream_enc
[i
]->id
==
1276 link
->link_enc
->preferred_engine
)
1277 return pool
->stream_enc
[i
];
1282 * below can happen in cases when stream encoder is acquired:
1283 * 1) for second MST display in chain, so preferred engine already
1285 * 2) for another link, which preferred engine already acquired by any
1286 * MST configuration.
1288 * If signal is of DP type and preferred engine not found, return last available
1290 * TODO - This is just a patch up and a generic solution is
1291 * required for non DP connectors.
1294 if (j
>= 0 && dc_is_dp_signal(stream
->signal
))
1295 return pool
->stream_enc
[j
];
1300 static struct audio
*find_first_free_audio(
1301 struct resource_context
*res_ctx
,
1302 const struct resource_pool
*pool
)
1305 for (i
= 0; i
< pool
->audio_count
; i
++) {
1306 if (res_ctx
->is_audio_acquired
[i
] == false) {
1307 return pool
->audios
[i
];
1314 static void update_stream_signal(struct dc_stream_state
*stream
)
1316 if (stream
->output_signal
== SIGNAL_TYPE_NONE
) {
1317 struct dc_sink
*dc_sink
= stream
->sink
;
1319 if (dc_sink
->sink_signal
== SIGNAL_TYPE_NONE
)
1320 stream
->signal
= stream
->sink
->link
->connector_signal
;
1322 stream
->signal
= dc_sink
->sink_signal
;
1324 stream
->signal
= stream
->output_signal
;
1327 if (dc_is_dvi_signal(stream
->signal
)) {
1328 if (stream
->timing
.pix_clk_khz
> TMDS_MAX_PIXEL_CLOCK_IN_KHZ_UPMOST
&&
1329 stream
->sink
->sink_signal
!= SIGNAL_TYPE_DVI_SINGLE_LINK
)
1330 stream
->signal
= SIGNAL_TYPE_DVI_DUAL_LINK
;
1332 stream
->signal
= SIGNAL_TYPE_DVI_SINGLE_LINK
;
1336 bool resource_is_stream_unchanged(
1337 struct validate_context
*old_context
, struct dc_stream_state
*stream
)
1341 for (i
= 0; i
< old_context
->stream_count
; i
++) {
1342 struct dc_stream_state
*old_stream
= old_context
->streams
[i
];
1344 if (are_stream_backends_same(old_stream
, stream
))
1351 static void copy_pipe_ctx(
1352 const struct pipe_ctx
*from_pipe_ctx
, struct pipe_ctx
*to_pipe_ctx
)
1354 struct dc_plane_state
*plane_state
= to_pipe_ctx
->plane_state
;
1355 struct dc_stream_state
*stream
= to_pipe_ctx
->stream
;
1357 *to_pipe_ctx
= *from_pipe_ctx
;
1358 to_pipe_ctx
->stream
= stream
;
1359 if (plane_state
!= NULL
)
1360 to_pipe_ctx
->plane_state
= plane_state
;
1363 static struct dc_stream_state
*find_pll_sharable_stream(
1364 struct dc_stream_state
*stream_needs_pll
,
1365 struct validate_context
*context
)
1369 for (i
= 0; i
< context
->stream_count
; i
++) {
1370 struct dc_stream_state
*stream_has_pll
= context
->streams
[i
];
1372 /* We are looking for non dp, non virtual stream */
1373 if (resource_are_streams_timing_synchronizable(
1374 stream_needs_pll
, stream_has_pll
)
1375 && !dc_is_dp_signal(stream_has_pll
->signal
)
1376 && stream_has_pll
->sink
->link
->connector_signal
1377 != SIGNAL_TYPE_VIRTUAL
)
1378 return stream_has_pll
;
1385 static int get_norm_pix_clk(const struct dc_crtc_timing
*timing
)
1387 uint32_t pix_clk
= timing
->pix_clk_khz
;
1388 uint32_t normalized_pix_clk
= pix_clk
;
1390 if (timing
->pixel_encoding
== PIXEL_ENCODING_YCBCR420
)
1392 if (timing
->pixel_encoding
!= PIXEL_ENCODING_YCBCR422
) {
1393 switch (timing
->display_color_depth
) {
1394 case COLOR_DEPTH_888
:
1395 normalized_pix_clk
= pix_clk
;
1397 case COLOR_DEPTH_101010
:
1398 normalized_pix_clk
= (pix_clk
* 30) / 24;
1400 case COLOR_DEPTH_121212
:
1401 normalized_pix_clk
= (pix_clk
* 36) / 24;
1403 case COLOR_DEPTH_161616
:
1404 normalized_pix_clk
= (pix_clk
* 48) / 24;
1411 return normalized_pix_clk
;
1414 static void calculate_phy_pix_clks(struct dc_stream_state
*stream
)
1416 update_stream_signal(stream
);
1418 /* update actual pixel clock on all streams */
1419 if (dc_is_hdmi_signal(stream
->signal
))
1420 stream
->phy_pix_clk
= get_norm_pix_clk(
1423 stream
->phy_pix_clk
=
1424 stream
->timing
.pix_clk_khz
;
1427 enum dc_status
resource_map_pool_resources(
1428 const struct core_dc
*dc
,
1429 struct validate_context
*context
,
1430 struct validate_context
*old_context
)
1432 const struct resource_pool
*pool
= dc
->res_pool
;
1435 for (i
= 0; old_context
&& i
< context
->stream_count
; i
++) {
1436 struct dc_stream_state
*stream
= context
->streams
[i
];
1438 if (!resource_is_stream_unchanged(old_context
, stream
)) {
1439 if (stream
!= NULL
&& old_context
->streams
[i
] != NULL
) {
1440 stream
->bit_depth_params
=
1441 old_context
->streams
[i
]->bit_depth_params
;
1442 stream
->clamping
= old_context
->streams
[i
]->clamping
;
1447 /* mark resources used for stream that is already active */
1448 for (j
= 0; j
< pool
->pipe_count
; j
++) {
1449 struct pipe_ctx
*pipe_ctx
=
1450 &context
->res_ctx
.pipe_ctx
[j
];
1451 const struct pipe_ctx
*old_pipe_ctx
=
1452 &old_context
->res_ctx
.pipe_ctx
[j
];
1454 if (!are_stream_backends_same(old_pipe_ctx
->stream
, stream
))
1457 if (old_pipe_ctx
->top_pipe
)
1460 pipe_ctx
->stream
= stream
;
1461 copy_pipe_ctx(old_pipe_ctx
, pipe_ctx
);
1463 /* Split pipe resource, do not acquire back end */
1464 if (!pipe_ctx
->stream_enc
)
1467 set_stream_engine_in_use(
1468 &context
->res_ctx
, pool
,
1469 pipe_ctx
->stream_enc
);
1471 /* Switch to dp clock source only if there is
1472 * no non dp stream that shares the same timing
1473 * with the dp stream.
1475 if (dc_is_dp_signal(pipe_ctx
->stream
->signal
) &&
1476 !find_pll_sharable_stream(stream
, context
))
1477 pipe_ctx
->clock_source
= pool
->dp_clock_source
;
1479 resource_reference_clock_source(
1480 &context
->res_ctx
, pool
,
1481 pipe_ctx
->clock_source
);
1483 set_audio_in_use(&context
->res_ctx
, pool
,
1488 for (i
= 0; i
< context
->stream_count
; i
++) {
1489 struct dc_stream_state
*stream
= context
->streams
[i
];
1490 struct pipe_ctx
*pipe_ctx
= NULL
;
1493 if (old_context
&& resource_is_stream_unchanged(old_context
, stream
))
1495 /* acquire new resources */
1496 pipe_idx
= acquire_first_free_pipe(&context
->res_ctx
, pool
, stream
);
1497 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1499 acquire_first_split_pipe(&context
->res_ctx
, pool
, stream
);
1502 return DC_NO_CONTROLLER_RESOURCE
;
1504 pipe_ctx
= &context
->res_ctx
.pipe_ctx
[pipe_idx
];
1506 pipe_ctx
->stream_enc
=
1507 find_first_free_match_stream_enc_for_link(
1508 &context
->res_ctx
, pool
, stream
);
1510 if (!pipe_ctx
->stream_enc
)
1511 return DC_NO_STREAM_ENG_RESOURCE
;
1513 set_stream_engine_in_use(
1514 &context
->res_ctx
, pool
,
1515 pipe_ctx
->stream_enc
);
1517 /* TODO: Add check if ASIC support and EDID audio */
1518 if (!stream
->sink
->converter_disable_audio
&&
1519 dc_is_audio_capable_signal(pipe_ctx
->stream
->signal
) &&
1520 stream
->audio_info
.mode_count
) {
1521 pipe_ctx
->audio
= find_first_free_audio(
1522 &context
->res_ctx
, pool
);
1525 * Audio assigned in order first come first get.
1526 * There are asics which has number of audio
1527 * resources less then number of pipes
1529 if (pipe_ctx
->audio
)
1531 &context
->res_ctx
, pool
,
1535 context
->stream_status
[i
].primary_otg_inst
= pipe_ctx
->tg
->inst
;
1541 /* first stream in the context is used to populate the rest */
1542 void validate_guaranteed_copy_streams(
1543 struct validate_context
*context
,
1548 for (i
= 1; i
< max_streams
; i
++) {
1549 context
->streams
[i
] = context
->streams
[0];
1551 copy_pipe_ctx(&context
->res_ctx
.pipe_ctx
[0],
1552 &context
->res_ctx
.pipe_ctx
[i
]);
1553 context
->res_ctx
.pipe_ctx
[i
].stream
=
1554 context
->res_ctx
.pipe_ctx
[0].stream
;
1556 dc_stream_retain(context
->streams
[i
]);
1557 context
->stream_count
++;
1561 static void patch_gamut_packet_checksum(
1562 struct encoder_info_packet
*gamut_packet
)
1564 /* For gamut we recalc checksum */
1565 if (gamut_packet
->valid
) {
1566 uint8_t chk_sum
= 0;
1570 /*start of the Gamut data. */
1571 ptr
= &gamut_packet
->sb
[3];
1573 for (i
= 0; i
<= gamut_packet
->sb
[1]; i
++)
1576 gamut_packet
->sb
[2] = (uint8_t) (0x100 - chk_sum
);
1580 static void set_avi_info_frame(
1581 struct encoder_info_packet
*info_packet
,
1582 struct pipe_ctx
*pipe_ctx
)
1584 struct dc_stream_state
*stream
= pipe_ctx
->stream
;
1585 enum dc_color_space color_space
= COLOR_SPACE_UNKNOWN
;
1586 struct info_frame info_frame
= { {0} };
1587 uint32_t pixel_encoding
= 0;
1588 enum scanning_type scan_type
= SCANNING_TYPE_NODATA
;
1589 enum dc_aspect_ratio aspect
= ASPECT_RATIO_NO_DATA
;
1591 uint8_t itc_value
= 0;
1592 uint8_t cn0_cn1
= 0;
1593 unsigned int cn0_cn1_value
= 0;
1594 uint8_t *check_sum
= NULL
;
1595 uint8_t byte_index
= 0;
1596 union hdmi_info_packet
*hdmi_info
= &info_frame
.avi_info_packet
.info_packet_hdmi
;
1597 union display_content_support support
= {0};
1598 unsigned int vic
= pipe_ctx
->stream
->timing
.vic
;
1599 enum dc_timing_3d_format format
;
1601 color_space
= pipe_ctx
->stream
->output_color_space
;
1602 if (color_space
== COLOR_SPACE_UNKNOWN
)
1603 color_space
= (stream
->timing
.pixel_encoding
== PIXEL_ENCODING_RGB
) ?
1604 COLOR_SPACE_SRGB
:COLOR_SPACE_YCBCR709
;
1606 /* Initialize header */
1607 hdmi_info
->bits
.header
.info_frame_type
= HDMI_INFOFRAME_TYPE_AVI
;
1608 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1609 * not be used in HDMI 2.0 (Section 10.1) */
1610 hdmi_info
->bits
.header
.version
= 2;
1611 hdmi_info
->bits
.header
.length
= HDMI_AVI_INFOFRAME_SIZE
;
1614 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1615 * according to HDMI 2.0 spec (Section 10.1)
1618 switch (stream
->timing
.pixel_encoding
) {
1619 case PIXEL_ENCODING_YCBCR422
:
1623 case PIXEL_ENCODING_YCBCR444
:
1626 case PIXEL_ENCODING_YCBCR420
:
1630 case PIXEL_ENCODING_RGB
:
1635 /* Y0_Y1_Y2 : The pixel encoding */
1636 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1637 hdmi_info
->bits
.Y0_Y1_Y2
= pixel_encoding
;
1639 /* A0 = 1 Active Format Information valid */
1640 hdmi_info
->bits
.A0
= ACTIVE_FORMAT_VALID
;
1642 /* B0, B1 = 3; Bar info data is valid */
1643 hdmi_info
->bits
.B0_B1
= BAR_INFO_BOTH_VALID
;
1645 hdmi_info
->bits
.SC0_SC1
= PICTURE_SCALING_UNIFORM
;
1647 /* S0, S1 : Underscan / Overscan */
1648 /* TODO: un-hardcode scan type */
1649 scan_type
= SCANNING_TYPE_UNDERSCAN
;
1650 hdmi_info
->bits
.S0_S1
= scan_type
;
1652 /* C0, C1 : Colorimetry */
1653 if (color_space
== COLOR_SPACE_YCBCR709
||
1654 color_space
== COLOR_SPACE_YCBCR709_LIMITED
)
1655 hdmi_info
->bits
.C0_C1
= COLORIMETRY_ITU709
;
1656 else if (color_space
== COLOR_SPACE_YCBCR601
||
1657 color_space
== COLOR_SPACE_YCBCR601_LIMITED
)
1658 hdmi_info
->bits
.C0_C1
= COLORIMETRY_ITU601
;
1660 hdmi_info
->bits
.C0_C1
= COLORIMETRY_NO_DATA
;
1662 if (color_space
== COLOR_SPACE_2020_RGB_FULLRANGE
||
1663 color_space
== COLOR_SPACE_2020_RGB_LIMITEDRANGE
||
1664 color_space
== COLOR_SPACE_2020_YCBCR
) {
1665 hdmi_info
->bits
.EC0_EC2
= COLORIMETRYEX_BT2020RGBYCBCR
;
1666 hdmi_info
->bits
.C0_C1
= COLORIMETRY_EXTENDED
;
1667 } else if (color_space
== COLOR_SPACE_ADOBERGB
) {
1668 hdmi_info
->bits
.EC0_EC2
= COLORIMETRYEX_ADOBERGB
;
1669 hdmi_info
->bits
.C0_C1
= COLORIMETRY_EXTENDED
;
1672 /* TODO: un-hardcode aspect ratio */
1673 aspect
= stream
->timing
.aspect_ratio
;
1676 case ASPECT_RATIO_4_3
:
1677 case ASPECT_RATIO_16_9
:
1678 hdmi_info
->bits
.M0_M1
= aspect
;
1681 case ASPECT_RATIO_NO_DATA
:
1682 case ASPECT_RATIO_64_27
:
1683 case ASPECT_RATIO_256_135
:
1685 hdmi_info
->bits
.M0_M1
= 0;
1688 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1689 hdmi_info
->bits
.R0_R3
= ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE
;
1691 /* TODO: un-hardcode cn0_cn1 and itc */
1699 support
= stream
->sink
->edid_caps
.content_support
;
1702 if (!support
.bits
.valid_content_type
) {
1705 if (cn0_cn1
== DISPLAY_CONTENT_TYPE_GRAPHICS
) {
1706 if (support
.bits
.graphics_content
== 1) {
1709 } else if (cn0_cn1
== DISPLAY_CONTENT_TYPE_PHOTO
) {
1710 if (support
.bits
.photo_content
== 1) {
1716 } else if (cn0_cn1
== DISPLAY_CONTENT_TYPE_CINEMA
) {
1717 if (support
.bits
.cinema_content
== 1) {
1723 } else if (cn0_cn1
== DISPLAY_CONTENT_TYPE_GAME
) {
1724 if (support
.bits
.game_content
== 1) {
1732 hdmi_info
->bits
.CN0_CN1
= cn0_cn1_value
;
1733 hdmi_info
->bits
.ITC
= itc_value
;
1736 /* TODO : We should handle YCC quantization */
1737 /* but we do not have matrix calculation */
1738 if (stream
->sink
->edid_caps
.qs_bit
== 1 &&
1739 stream
->sink
->edid_caps
.qy_bit
== 1) {
1740 if (color_space
== COLOR_SPACE_SRGB
||
1741 color_space
== COLOR_SPACE_2020_RGB_FULLRANGE
) {
1742 hdmi_info
->bits
.Q0_Q1
= RGB_QUANTIZATION_FULL_RANGE
;
1743 hdmi_info
->bits
.YQ0_YQ1
= YYC_QUANTIZATION_FULL_RANGE
;
1744 } else if (color_space
== COLOR_SPACE_SRGB_LIMITED
||
1745 color_space
== COLOR_SPACE_2020_RGB_LIMITEDRANGE
) {
1746 hdmi_info
->bits
.Q0_Q1
= RGB_QUANTIZATION_LIMITED_RANGE
;
1747 hdmi_info
->bits
.YQ0_YQ1
= YYC_QUANTIZATION_LIMITED_RANGE
;
1749 hdmi_info
->bits
.Q0_Q1
= RGB_QUANTIZATION_DEFAULT_RANGE
;
1750 hdmi_info
->bits
.YQ0_YQ1
= YYC_QUANTIZATION_LIMITED_RANGE
;
1753 hdmi_info
->bits
.Q0_Q1
= RGB_QUANTIZATION_DEFAULT_RANGE
;
1754 hdmi_info
->bits
.YQ0_YQ1
= YYC_QUANTIZATION_LIMITED_RANGE
;
1758 format
= stream
->timing
.timing_3d_format
;
1759 /*todo, add 3DStereo support*/
1760 if (format
!= TIMING_3D_FORMAT_NONE
) {
1761 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
1762 switch (pipe_ctx
->stream
->timing
.hdmi_vic
) {
1779 hdmi_info
->bits
.VIC0_VIC7
= vic
;
1782 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
1783 * repetition start from 1 */
1784 hdmi_info
->bits
.PR0_PR3
= 0;
1787 * barTop: Line Number of End of Top Bar.
1788 * barBottom: Line Number of Start of Bottom Bar.
1789 * barLeft: Pixel Number of End of Left Bar.
1790 * barRight: Pixel Number of Start of Right Bar. */
1791 hdmi_info
->bits
.bar_top
= stream
->timing
.v_border_top
;
1792 hdmi_info
->bits
.bar_bottom
= (stream
->timing
.v_total
1793 - stream
->timing
.v_border_bottom
+ 1);
1794 hdmi_info
->bits
.bar_left
= stream
->timing
.h_border_left
;
1795 hdmi_info
->bits
.bar_right
= (stream
->timing
.h_total
1796 - stream
->timing
.h_border_right
+ 1);
1798 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
1799 check_sum
= &info_frame
.avi_info_packet
.info_packet_hdmi
.packet_raw_data
.sb
[0];
1801 *check_sum
= HDMI_INFOFRAME_TYPE_AVI
+ HDMI_AVI_INFOFRAME_SIZE
+ 2;
1803 for (byte_index
= 1; byte_index
<= HDMI_AVI_INFOFRAME_SIZE
; byte_index
++)
1804 *check_sum
+= hdmi_info
->packet_raw_data
.sb
[byte_index
];
1806 /* one byte complement */
1807 *check_sum
= (uint8_t) (0x100 - *check_sum
);
1809 /* Store in hw_path_mode */
1810 info_packet
->hb0
= hdmi_info
->packet_raw_data
.hb0
;
1811 info_packet
->hb1
= hdmi_info
->packet_raw_data
.hb1
;
1812 info_packet
->hb2
= hdmi_info
->packet_raw_data
.hb2
;
1814 for (byte_index
= 0; byte_index
< sizeof(info_frame
.avi_info_packet
.
1815 info_packet_hdmi
.packet_raw_data
.sb
); byte_index
++)
1816 info_packet
->sb
[byte_index
] = info_frame
.avi_info_packet
.
1817 info_packet_hdmi
.packet_raw_data
.sb
[byte_index
];
1819 info_packet
->valid
= true;
1822 static void set_vendor_info_packet(
1823 struct encoder_info_packet
*info_packet
,
1824 struct dc_stream_state
*stream
)
1826 uint32_t length
= 0;
1827 bool hdmi_vic_mode
= false;
1828 uint8_t checksum
= 0;
1830 enum dc_timing_3d_format format
;
1831 // Can be different depending on packet content /*todo*/
1832 // unsigned int length = pPathMode->dolbyVision ? 24 : 5;
1834 info_packet
->valid
= false;
1836 format
= stream
->timing
.timing_3d_format
;
1837 if (stream
->view_format
== VIEW_3D_FORMAT_NONE
)
1838 format
= TIMING_3D_FORMAT_NONE
;
1840 /* Can be different depending on packet content */
1843 if (stream
->timing
.hdmi_vic
!= 0
1844 && stream
->timing
.h_total
>= 3840
1845 && stream
->timing
.v_total
>= 2160)
1846 hdmi_vic_mode
= true;
1848 /* According to HDMI 1.4a CTS, VSIF should be sent
1849 * for both 3D stereo and HDMI VIC modes.
1850 * For all other modes, there is no VSIF sent. */
1852 if (format
== TIMING_3D_FORMAT_NONE
&& !hdmi_vic_mode
)
1855 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
1856 info_packet
->sb
[1] = 0x03;
1857 info_packet
->sb
[2] = 0x0C;
1858 info_packet
->sb
[3] = 0x00;
1860 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
1861 * The value for HDMI_Video_Format are:
1862 * 0x0 (0b000) - No additional HDMI video format is presented in this
1864 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
1866 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
1867 * potentially 3D_Ext_Data follows
1868 * 0x3..0x7 (0b011..0b111) - reserved for future use */
1869 if (format
!= TIMING_3D_FORMAT_NONE
)
1870 info_packet
->sb
[4] = (2 << 5);
1871 else if (hdmi_vic_mode
)
1872 info_packet
->sb
[4] = (1 << 5);
1874 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
1875 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
1876 * The value for 3D_Structure are:
1877 * 0x0 - Frame Packing
1878 * 0x1 - Field Alternative
1879 * 0x2 - Line Alternative
1880 * 0x3 - Side-by-Side (full)
1882 * 0x5 - L + depth + graphics + graphics-depth
1883 * 0x6 - Top-and-Bottom
1884 * 0x7 - Reserved for future use
1885 * 0x8 - Side-by-Side (Half)
1886 * 0x9..0xE - Reserved for future use
1889 case TIMING_3D_FORMAT_HW_FRAME_PACKING
:
1890 case TIMING_3D_FORMAT_SW_FRAME_PACKING
:
1891 info_packet
->sb
[5] = (0x0 << 4);
1894 case TIMING_3D_FORMAT_SIDE_BY_SIDE
:
1895 case TIMING_3D_FORMAT_SBS_SW_PACKED
:
1896 info_packet
->sb
[5] = (0x8 << 4);
1900 case TIMING_3D_FORMAT_TOP_AND_BOTTOM
:
1901 case TIMING_3D_FORMAT_TB_SW_PACKED
:
1902 info_packet
->sb
[5] = (0x6 << 4);
1909 /*PB5: If PB4 is set to 0x1 (extended resolution format)
1910 * fill PB5 with the correct HDMI VIC code */
1912 info_packet
->sb
[5] = stream
->timing
.hdmi_vic
;
1915 info_packet
->hb0
= HDMI_INFOFRAME_TYPE_VENDOR
; /* VSIF packet type. */
1916 info_packet
->hb1
= 0x01; /* Version */
1918 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
1919 info_packet
->hb2
= (uint8_t) (length
);
1921 /* Calculate checksum */
1923 checksum
+= info_packet
->hb0
;
1924 checksum
+= info_packet
->hb1
;
1925 checksum
+= info_packet
->hb2
;
1927 for (i
= 1; i
<= length
; i
++)
1928 checksum
+= info_packet
->sb
[i
];
1930 info_packet
->sb
[0] = (uint8_t) (0x100 - checksum
);
1932 info_packet
->valid
= true;
1935 static void set_spd_info_packet(
1936 struct encoder_info_packet
*info_packet
,
1937 struct dc_stream_state
*stream
)
1939 /* SPD info packet for FreeSync */
1941 unsigned char checksum
= 0;
1942 unsigned int idx
, payload_size
= 0;
1944 /* Check if Freesync is supported. Return if false. If true,
1945 * set the corresponding bit in the info packet
1947 if (stream
->freesync_ctx
.supported
== false)
1950 if (dc_is_hdmi_signal(stream
->signal
)) {
1954 /* HB0 = Packet Type = 0x83 (Source Product
1955 * Descriptor InfoFrame)
1957 info_packet
->hb0
= HDMI_INFOFRAME_TYPE_SPD
;
1959 /* HB1 = Version = 0x01 */
1960 info_packet
->hb1
= 0x01;
1962 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
1963 info_packet
->hb2
= 0x08;
1965 payload_size
= 0x08;
1967 } else if (dc_is_dp_signal(stream
->signal
)) {
1971 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
1972 * when used to associate audio related info packets
1974 info_packet
->hb0
= 0x00;
1976 /* HB1 = Packet Type = 0x83 (Source Product
1977 * Descriptor InfoFrame)
1979 info_packet
->hb1
= HDMI_INFOFRAME_TYPE_SPD
;
1981 /* HB2 = [Bits 7:0 = Least significant eight bits -
1982 * For INFOFRAME, the value must be 1Bh]
1984 info_packet
->hb2
= 0x1B;
1986 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
1987 * [Bits 1:0 = Most significant two bits = 0x00]
1989 info_packet
->hb3
= 0x04;
1991 payload_size
= 0x1B;
1994 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
1995 info_packet
->sb
[1] = 0x1A;
1997 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
1998 info_packet
->sb
[2] = 0x00;
2000 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2001 info_packet
->sb
[3] = 0x00;
2003 /* PB4 = Reserved */
2004 info_packet
->sb
[4] = 0x00;
2006 /* PB5 = Reserved */
2007 info_packet
->sb
[5] = 0x00;
2009 /* PB6 = [Bits 7:3 = Reserved] */
2010 info_packet
->sb
[6] = 0x00;
2012 if (stream
->freesync_ctx
.supported
== true)
2013 /* PB6 = [Bit 0 = FreeSync Supported] */
2014 info_packet
->sb
[6] |= 0x01;
2016 if (stream
->freesync_ctx
.enabled
== true)
2017 /* PB6 = [Bit 1 = FreeSync Enabled] */
2018 info_packet
->sb
[6] |= 0x02;
2020 if (stream
->freesync_ctx
.active
== true)
2021 /* PB6 = [Bit 2 = FreeSync Active] */
2022 info_packet
->sb
[6] |= 0x04;
2024 /* PB7 = FreeSync Minimum refresh rate (Hz) */
2025 info_packet
->sb
[7] = (unsigned char) (stream
->freesync_ctx
.
2026 min_refresh_in_micro_hz
/ 1000000);
2028 /* PB8 = FreeSync Maximum refresh rate (Hz)
2030 * Note: We do not use the maximum capable refresh rate
2031 * of the panel, because we should never go above the field
2032 * rate of the mode timing set.
2034 info_packet
->sb
[8] = (unsigned char) (stream
->freesync_ctx
.
2035 nominal_refresh_in_micro_hz
/ 1000000);
2037 /* PB9 - PB27 = Reserved */
2038 for (idx
= 9; idx
<= 27; idx
++)
2039 info_packet
->sb
[idx
] = 0x00;
2041 /* Calculate checksum */
2042 checksum
+= info_packet
->hb0
;
2043 checksum
+= info_packet
->hb1
;
2044 checksum
+= info_packet
->hb2
;
2045 checksum
+= info_packet
->hb3
;
2047 for (idx
= 1; idx
<= payload_size
; idx
++)
2048 checksum
+= info_packet
->sb
[idx
];
2050 /* PB0 = Checksum (one byte complement) */
2051 info_packet
->sb
[0] = (unsigned char) (0x100 - checksum
);
2053 info_packet
->valid
= true;
2056 static void set_hdr_static_info_packet(
2057 struct encoder_info_packet
*info_packet
,
2058 struct dc_plane_state
*plane_state
,
2059 struct dc_stream_state
*stream
)
2062 enum signal_type signal
= stream
->signal
;
2063 struct dc_hdr_static_metadata hdr_metadata
;
2069 hdr_metadata
= plane_state
->hdr_static_ctx
;
2071 if (!hdr_metadata
.hdr_supported
)
2074 if (dc_is_hdmi_signal(signal
)) {
2075 info_packet
->valid
= true;
2077 info_packet
->hb0
= 0x87;
2078 info_packet
->hb1
= 0x01;
2079 info_packet
->hb2
= 0x1A;
2081 } else if (dc_is_dp_signal(signal
)) {
2082 info_packet
->valid
= true;
2084 info_packet
->hb0
= 0x00;
2085 info_packet
->hb1
= 0x87;
2086 info_packet
->hb2
= 0x1D;
2087 info_packet
->hb3
= (0x13 << 2);
2091 data
= hdr_metadata
.is_hdr
;
2092 info_packet
->sb
[i
++] = data
? 0x02 : 0x00;
2093 info_packet
->sb
[i
++] = 0x00;
2095 data
= hdr_metadata
.chromaticity_green_x
/ 2;
2096 info_packet
->sb
[i
++] = data
& 0xFF;
2097 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2099 data
= hdr_metadata
.chromaticity_green_y
/ 2;
2100 info_packet
->sb
[i
++] = data
& 0xFF;
2101 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2103 data
= hdr_metadata
.chromaticity_blue_x
/ 2;
2104 info_packet
->sb
[i
++] = data
& 0xFF;
2105 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2107 data
= hdr_metadata
.chromaticity_blue_y
/ 2;
2108 info_packet
->sb
[i
++] = data
& 0xFF;
2109 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2111 data
= hdr_metadata
.chromaticity_red_x
/ 2;
2112 info_packet
->sb
[i
++] = data
& 0xFF;
2113 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2115 data
= hdr_metadata
.chromaticity_red_y
/ 2;
2116 info_packet
->sb
[i
++] = data
& 0xFF;
2117 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2119 data
= hdr_metadata
.chromaticity_white_point_x
/ 2;
2120 info_packet
->sb
[i
++] = data
& 0xFF;
2121 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2123 data
= hdr_metadata
.chromaticity_white_point_y
/ 2;
2124 info_packet
->sb
[i
++] = data
& 0xFF;
2125 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2127 data
= hdr_metadata
.max_luminance
;
2128 info_packet
->sb
[i
++] = data
& 0xFF;
2129 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2131 data
= hdr_metadata
.min_luminance
;
2132 info_packet
->sb
[i
++] = data
& 0xFF;
2133 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2135 data
= hdr_metadata
.maximum_content_light_level
;
2136 info_packet
->sb
[i
++] = data
& 0xFF;
2137 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2139 data
= hdr_metadata
.maximum_frame_average_light_level
;
2140 info_packet
->sb
[i
++] = data
& 0xFF;
2141 info_packet
->sb
[i
++] = (data
& 0xFF00) >> 8;
2143 if (dc_is_hdmi_signal(signal
)) {
2144 uint32_t checksum
= 0;
2146 checksum
+= info_packet
->hb0
;
2147 checksum
+= info_packet
->hb1
;
2148 checksum
+= info_packet
->hb2
;
2150 for (i
= 1; i
<= info_packet
->hb2
; i
++)
2151 checksum
+= info_packet
->sb
[i
];
2153 info_packet
->sb
[0] = 0x100 - checksum
;
2154 } else if (dc_is_dp_signal(signal
)) {
2155 info_packet
->sb
[0] = 0x01;
2156 info_packet
->sb
[1] = 0x1A;
2160 static void set_vsc_info_packet(
2161 struct encoder_info_packet
*info_packet
,
2162 struct dc_stream_state
*stream
)
2164 unsigned int vscPacketRevision
= 0;
2167 if (stream
->sink
->link
->psr_enabled
) {
2168 vscPacketRevision
= 2;
2171 /* VSC packet not needed based on the features
2172 * supported by this DP display
2174 if (vscPacketRevision
== 0)
2177 if (vscPacketRevision
== 0x2) {
2178 /* Secondary-data Packet ID = 0*/
2179 info_packet
->hb0
= 0x00;
2180 /* 07h - Packet Type Value indicating Video
2181 * Stream Configuration packet
2183 info_packet
->hb1
= 0x07;
2184 /* 02h = VSC SDP supporting 3D stereo and PSR
2185 * (applies to eDP v1.3 or higher).
2187 info_packet
->hb2
= 0x02;
2188 /* 08h = VSC packet supporting 3D stereo + PSR
2191 info_packet
->hb3
= 0x08;
2193 for (i
= 0; i
< 28; i
++)
2194 info_packet
->sb
[i
] = 0;
2196 info_packet
->valid
= true;
2199 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2202 void dc_resource_validate_ctx_destruct(struct validate_context
*context
)
2206 for (i
= 0; i
< context
->stream_count
; i
++) {
2207 for (j
= 0; j
< context
->stream_status
[i
].plane_count
; j
++)
2208 dc_plane_state_release(
2209 context
->stream_status
[i
].plane_states
[j
]);
2211 context
->stream_status
[i
].plane_count
= 0;
2212 dc_stream_release(context
->streams
[i
]);
2213 context
->streams
[i
] = NULL
;
2218 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2221 void dc_resource_validate_ctx_copy_construct(
2222 const struct validate_context
*src_ctx
,
2223 struct validate_context
*dst_ctx
)
2226 int ref_count
= dst_ctx
->ref_count
;
2228 *dst_ctx
= *src_ctx
;
2230 for (i
= 0; i
< MAX_PIPES
; i
++) {
2231 struct pipe_ctx
*cur_pipe
= &dst_ctx
->res_ctx
.pipe_ctx
[i
];
2233 if (cur_pipe
->top_pipe
)
2234 cur_pipe
->top_pipe
= &dst_ctx
->res_ctx
.pipe_ctx
[cur_pipe
->top_pipe
->pipe_idx
];
2236 if (cur_pipe
->bottom_pipe
)
2237 cur_pipe
->bottom_pipe
= &dst_ctx
->res_ctx
.pipe_ctx
[cur_pipe
->bottom_pipe
->pipe_idx
];
2241 for (i
= 0; i
< dst_ctx
->stream_count
; i
++) {
2242 dc_stream_retain(dst_ctx
->streams
[i
]);
2243 for (j
= 0; j
< dst_ctx
->stream_status
[i
].plane_count
; j
++)
2244 dc_plane_state_retain(
2245 dst_ctx
->stream_status
[i
].plane_states
[j
]);
2248 /* context refcount should not be overridden */
2249 dst_ctx
->ref_count
= ref_count
;
2253 struct clock_source
*dc_resource_find_first_free_pll(
2254 struct resource_context
*res_ctx
,
2255 const struct resource_pool
*pool
)
2259 for (i
= 0; i
< pool
->clk_src_count
; ++i
) {
2260 if (res_ctx
->clock_source_ref_count
[i
] == 0)
2261 return pool
->clock_sources
[i
];
2267 void resource_build_info_frame(struct pipe_ctx
*pipe_ctx
)
2269 enum signal_type signal
= SIGNAL_TYPE_NONE
;
2270 struct encoder_info_frame
*info
= &pipe_ctx
->encoder_info_frame
;
2272 /* default all packets to invalid */
2273 info
->avi
.valid
= false;
2274 info
->gamut
.valid
= false;
2275 info
->vendor
.valid
= false;
2276 info
->spd
.valid
= false;
2277 info
->hdrsmd
.valid
= false;
2278 info
->vsc
.valid
= false;
2280 signal
= pipe_ctx
->stream
->signal
;
2282 /* HDMi and DP have different info packets*/
2283 if (dc_is_hdmi_signal(signal
)) {
2284 set_avi_info_frame(&info
->avi
, pipe_ctx
);
2286 set_vendor_info_packet(&info
->vendor
, pipe_ctx
->stream
);
2288 set_spd_info_packet(&info
->spd
, pipe_ctx
->stream
);
2290 set_hdr_static_info_packet(&info
->hdrsmd
,
2291 pipe_ctx
->plane_state
, pipe_ctx
->stream
);
2293 } else if (dc_is_dp_signal(signal
)) {
2294 set_vsc_info_packet(&info
->vsc
, pipe_ctx
->stream
);
2296 set_spd_info_packet(&info
->spd
, pipe_ctx
->stream
);
2298 set_hdr_static_info_packet(&info
->hdrsmd
,
2299 pipe_ctx
->plane_state
, pipe_ctx
->stream
);
2302 patch_gamut_packet_checksum(&info
->gamut
);
2305 enum dc_status
resource_map_clock_resources(
2306 const struct core_dc
*dc
,
2307 struct validate_context
*context
,
2308 struct validate_context
*old_context
)
2311 const struct resource_pool
*pool
= dc
->res_pool
;
2313 /* acquire new resources */
2314 for (i
= 0; i
< context
->stream_count
; i
++) {
2315 struct dc_stream_state
*stream
= context
->streams
[i
];
2317 if (old_context
&& resource_is_stream_unchanged(old_context
, stream
))
2320 for (j
= 0; j
< MAX_PIPES
; j
++) {
2321 struct pipe_ctx
*pipe_ctx
=
2322 &context
->res_ctx
.pipe_ctx
[j
];
2324 if (context
->res_ctx
.pipe_ctx
[j
].stream
!= stream
)
2327 if (dc_is_dp_signal(pipe_ctx
->stream
->signal
)
2328 || pipe_ctx
->stream
->signal
== SIGNAL_TYPE_VIRTUAL
)
2329 pipe_ctx
->clock_source
= pool
->dp_clock_source
;
2331 pipe_ctx
->clock_source
= NULL
;
2333 if (!dc
->public.config
.disable_disp_pll_sharing
)
2334 resource_find_used_clk_src_for_sharing(
2338 if (pipe_ctx
->clock_source
== NULL
)
2339 pipe_ctx
->clock_source
=
2340 dc_resource_find_first_free_pll(
2345 if (pipe_ctx
->clock_source
== NULL
)
2346 return DC_NO_CLOCK_SOURCE_RESOURCE
;
2348 resource_reference_clock_source(
2349 &context
->res_ctx
, pool
,
2350 pipe_ctx
->clock_source
);
2352 /* only one cs per stream regardless of mpo */
2361 * Note: We need to disable output if clock sources change,
2362 * since bios does optimization and doesn't apply if changing
2363 * PHY when not already disabled.
2365 bool pipe_need_reprogram(
2366 struct pipe_ctx
*pipe_ctx_old
,
2367 struct pipe_ctx
*pipe_ctx
)
2369 if (!pipe_ctx_old
->stream
)
2372 if (pipe_ctx_old
->stream
->sink
!= pipe_ctx
->stream
->sink
)
2375 if (pipe_ctx_old
->stream
->signal
!= pipe_ctx
->stream
->signal
)
2378 if (pipe_ctx_old
->audio
!= pipe_ctx
->audio
)
2381 if (pipe_ctx_old
->clock_source
!= pipe_ctx
->clock_source
2382 && pipe_ctx_old
->stream
!= pipe_ctx
->stream
)
2385 if (pipe_ctx_old
->stream_enc
!= pipe_ctx
->stream_enc
)
2388 if (is_timing_changed(pipe_ctx_old
->stream
, pipe_ctx
->stream
))
2395 void resource_build_bit_depth_reduction_params(struct dc_stream_state
*stream
,
2396 struct bit_depth_reduction_params
*fmt_bit_depth
)
2398 enum dc_dither_option option
= stream
->dither_option
;
2399 enum dc_pixel_encoding pixel_encoding
=
2400 stream
->timing
.pixel_encoding
;
2402 memset(fmt_bit_depth
, 0, sizeof(*fmt_bit_depth
));
2404 if (option
== DITHER_OPTION_DISABLE
)
2407 if (option
== DITHER_OPTION_TRUN6
) {
2408 fmt_bit_depth
->flags
.TRUNCATE_ENABLED
= 1;
2409 fmt_bit_depth
->flags
.TRUNCATE_DEPTH
= 0;
2410 } else if (option
== DITHER_OPTION_TRUN8
||
2411 option
== DITHER_OPTION_TRUN8_SPATIAL6
||
2412 option
== DITHER_OPTION_TRUN8_FM6
) {
2413 fmt_bit_depth
->flags
.TRUNCATE_ENABLED
= 1;
2414 fmt_bit_depth
->flags
.TRUNCATE_DEPTH
= 1;
2415 } else if (option
== DITHER_OPTION_TRUN10
||
2416 option
== DITHER_OPTION_TRUN10_SPATIAL6
||
2417 option
== DITHER_OPTION_TRUN10_SPATIAL8
||
2418 option
== DITHER_OPTION_TRUN10_FM8
||
2419 option
== DITHER_OPTION_TRUN10_FM6
||
2420 option
== DITHER_OPTION_TRUN10_SPATIAL8_FM6
) {
2421 fmt_bit_depth
->flags
.TRUNCATE_ENABLED
= 1;
2422 fmt_bit_depth
->flags
.TRUNCATE_DEPTH
= 2;
2425 /* special case - Formatter can only reduce by 4 bits at most.
2426 * When reducing from 12 to 6 bits,
2427 * HW recommends we use trunc with round mode
2428 * (if we did nothing, trunc to 10 bits would be used)
2429 * note that any 12->10 bit reduction is ignored prior to DCE8,
2430 * as the input was 10 bits.
2432 if (option
== DITHER_OPTION_SPATIAL6_FRAME_RANDOM
||
2433 option
== DITHER_OPTION_SPATIAL6
||
2434 option
== DITHER_OPTION_FM6
) {
2435 fmt_bit_depth
->flags
.TRUNCATE_ENABLED
= 1;
2436 fmt_bit_depth
->flags
.TRUNCATE_DEPTH
= 2;
2437 fmt_bit_depth
->flags
.TRUNCATE_MODE
= 1;
2441 * note that spatial modes 1-3 are never used
2443 if (option
== DITHER_OPTION_SPATIAL6_FRAME_RANDOM
||
2444 option
== DITHER_OPTION_SPATIAL6
||
2445 option
== DITHER_OPTION_TRUN10_SPATIAL6
||
2446 option
== DITHER_OPTION_TRUN8_SPATIAL6
) {
2447 fmt_bit_depth
->flags
.SPATIAL_DITHER_ENABLED
= 1;
2448 fmt_bit_depth
->flags
.SPATIAL_DITHER_DEPTH
= 0;
2449 fmt_bit_depth
->flags
.HIGHPASS_RANDOM
= 1;
2450 fmt_bit_depth
->flags
.RGB_RANDOM
=
2451 (pixel_encoding
== PIXEL_ENCODING_RGB
) ? 1 : 0;
2452 } else if (option
== DITHER_OPTION_SPATIAL8_FRAME_RANDOM
||
2453 option
== DITHER_OPTION_SPATIAL8
||
2454 option
== DITHER_OPTION_SPATIAL8_FM6
||
2455 option
== DITHER_OPTION_TRUN10_SPATIAL8
||
2456 option
== DITHER_OPTION_TRUN10_SPATIAL8_FM6
) {
2457 fmt_bit_depth
->flags
.SPATIAL_DITHER_ENABLED
= 1;
2458 fmt_bit_depth
->flags
.SPATIAL_DITHER_DEPTH
= 1;
2459 fmt_bit_depth
->flags
.HIGHPASS_RANDOM
= 1;
2460 fmt_bit_depth
->flags
.RGB_RANDOM
=
2461 (pixel_encoding
== PIXEL_ENCODING_RGB
) ? 1 : 0;
2462 } else if (option
== DITHER_OPTION_SPATIAL10_FRAME_RANDOM
||
2463 option
== DITHER_OPTION_SPATIAL10
||
2464 option
== DITHER_OPTION_SPATIAL10_FM8
||
2465 option
== DITHER_OPTION_SPATIAL10_FM6
) {
2466 fmt_bit_depth
->flags
.SPATIAL_DITHER_ENABLED
= 1;
2467 fmt_bit_depth
->flags
.SPATIAL_DITHER_DEPTH
= 2;
2468 fmt_bit_depth
->flags
.HIGHPASS_RANDOM
= 1;
2469 fmt_bit_depth
->flags
.RGB_RANDOM
=
2470 (pixel_encoding
== PIXEL_ENCODING_RGB
) ? 1 : 0;
2473 if (option
== DITHER_OPTION_SPATIAL6
||
2474 option
== DITHER_OPTION_SPATIAL8
||
2475 option
== DITHER_OPTION_SPATIAL10
) {
2476 fmt_bit_depth
->flags
.FRAME_RANDOM
= 0;
2478 fmt_bit_depth
->flags
.FRAME_RANDOM
= 1;
2481 //////////////////////
2482 //// temporal dither
2483 //////////////////////
2484 if (option
== DITHER_OPTION_FM6
||
2485 option
== DITHER_OPTION_SPATIAL8_FM6
||
2486 option
== DITHER_OPTION_SPATIAL10_FM6
||
2487 option
== DITHER_OPTION_TRUN10_FM6
||
2488 option
== DITHER_OPTION_TRUN8_FM6
||
2489 option
== DITHER_OPTION_TRUN10_SPATIAL8_FM6
) {
2490 fmt_bit_depth
->flags
.FRAME_MODULATION_ENABLED
= 1;
2491 fmt_bit_depth
->flags
.FRAME_MODULATION_DEPTH
= 0;
2492 } else if (option
== DITHER_OPTION_FM8
||
2493 option
== DITHER_OPTION_SPATIAL10_FM8
||
2494 option
== DITHER_OPTION_TRUN10_FM8
) {
2495 fmt_bit_depth
->flags
.FRAME_MODULATION_ENABLED
= 1;
2496 fmt_bit_depth
->flags
.FRAME_MODULATION_DEPTH
= 1;
2497 } else if (option
== DITHER_OPTION_FM10
) {
2498 fmt_bit_depth
->flags
.FRAME_MODULATION_ENABLED
= 1;
2499 fmt_bit_depth
->flags
.FRAME_MODULATION_DEPTH
= 2;
2502 fmt_bit_depth
->pixel_encoding
= pixel_encoding
;
2505 bool dc_validate_stream(const struct dc
*dc
, struct dc_stream_state
*stream
)
2507 struct core_dc
*core_dc
= DC_TO_CORE(dc
);
2508 struct dc_context
*dc_ctx
= core_dc
->ctx
;
2509 struct dc_link
*link
= stream
->sink
->link
;
2510 struct timing_generator
*tg
= core_dc
->res_pool
->timing_generators
[0];
2511 enum dc_status res
= DC_OK
;
2513 calculate_phy_pix_clks(stream
);
2515 if (!tg
->funcs
->validate_timing(tg
, &stream
->timing
))
2516 res
= DC_FAIL_CONTROLLER_VALIDATE
;
2519 if (!link
->link_enc
->funcs
->validate_output_with_stream(
2520 link
->link_enc
, stream
))
2521 res
= DC_FAIL_ENC_VALIDATE
;
2523 /* TODO: validate audio ASIC caps, encoder */
2526 res
= dc_link_validate_mode_timing(stream
,
2531 DC_ERROR("Failed validation for stream %p, err:%d, !\n",
2534 return res
== DC_OK
;
2537 bool dc_validate_plane(const struct dc
*dc
, const struct dc_plane_state
*plane_state
)
2539 struct core_dc
*core_dc
= DC_TO_CORE(dc
);
2541 /* TODO For now validates pixel format only */
2542 if (core_dc
->res_pool
->funcs
->validate_plane
)
2543 return core_dc
->res_pool
->funcs
->validate_plane(plane_state
) == DC_OK
;