]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c
drm/amd/dc: Add dc display driver (v2)
[mirror_ubuntu-jammy-kernel.git] / drivers / gpu / drm / amd / display / dc / core / dc_link_dp.c
1 /* Copyright 2015 Advanced Micro Devices, Inc. */
2 #include "dm_services.h"
3 #include "dc.h"
4 #include "dc_link_dp.h"
5 #include "dm_helpers.h"
6
7 #include "inc/core_types.h"
8 #include "link_hwss.h"
9 #include "dc_link_ddc.h"
10 #include "core_status.h"
11 #include "dpcd_defs.h"
12
13 #include "core_dc.h"
14
15 /* maximum pre emphasis level allowed for each voltage swing level*/
16 static const enum dc_pre_emphasis voltage_swing_to_pre_emphasis[] = {
17 PRE_EMPHASIS_LEVEL3,
18 PRE_EMPHASIS_LEVEL2,
19 PRE_EMPHASIS_LEVEL1,
20 PRE_EMPHASIS_DISABLED };
21
22 enum {
23 POST_LT_ADJ_REQ_LIMIT = 6,
24 POST_LT_ADJ_REQ_TIMEOUT = 200
25 };
26
27 enum {
28 LINK_TRAINING_MAX_RETRY_COUNT = 5,
29 /* to avoid infinite loop where-in the receiver
30 * switches between different VS
31 */
32 LINK_TRAINING_MAX_CR_RETRY = 100
33 };
34
35 static const struct dc_link_settings link_training_fallback_table[] = {
36 /* 4320 Mbytes/sec*/
37 { LANE_COUNT_FOUR, LINK_RATE_HIGH3, LINK_SPREAD_DISABLED },
38 /* 2160 Mbytes/sec*/
39 { LANE_COUNT_FOUR, LINK_RATE_HIGH2, LINK_SPREAD_DISABLED },
40 /* 1080 Mbytes/sec*/
41 { LANE_COUNT_FOUR, LINK_RATE_HIGH, LINK_SPREAD_DISABLED },
42 /* 648 Mbytes/sec*/
43 { LANE_COUNT_FOUR, LINK_RATE_LOW, LINK_SPREAD_DISABLED },
44 /* 2160 Mbytes/sec*/
45 { LANE_COUNT_TWO, LINK_RATE_HIGH3, LINK_SPREAD_DISABLED },
46 /* 1080 Mbytes/sec*/
47 { LANE_COUNT_TWO, LINK_RATE_HIGH2, LINK_SPREAD_DISABLED },
48 /* 540 Mbytes/sec*/
49 { LANE_COUNT_TWO, LINK_RATE_HIGH, LINK_SPREAD_DISABLED },
50 /* 324 Mbytes/sec*/
51 { LANE_COUNT_TWO, LINK_RATE_LOW, LINK_SPREAD_DISABLED },
52 /* 1080 Mbytes/sec*/
53 { LANE_COUNT_ONE, LINK_RATE_HIGH3, LINK_SPREAD_DISABLED },
54 /* 540 Mbytes/sec*/
55 { LANE_COUNT_ONE, LINK_RATE_HIGH2, LINK_SPREAD_DISABLED },
56 /* 270 Mbytes/sec*/
57 { LANE_COUNT_ONE, LINK_RATE_HIGH, LINK_SPREAD_DISABLED },
58 /* 162 Mbytes/sec*/
59 { LANE_COUNT_ONE, LINK_RATE_LOW, LINK_SPREAD_DISABLED } };
60
61 static void wait_for_training_aux_rd_interval(
62 struct core_link* link,
63 uint32_t default_wait_in_micro_secs)
64 {
65 union training_aux_rd_interval training_rd_interval;
66
67 /* overwrite the delay if rev > 1.1*/
68 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
69 /* DP 1.2 or later - retrieve delay through
70 * "DPCD_ADDR_TRAINING_AUX_RD_INTERVAL" register */
71 core_link_read_dpcd(
72 link,
73 DPCD_ADDRESS_TRAINING_AUX_RD_INTERVAL,
74 (uint8_t *)&training_rd_interval,
75 sizeof(training_rd_interval));
76
77 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
78 default_wait_in_micro_secs =
79 training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
80 }
81
82 udelay(default_wait_in_micro_secs);
83
84 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
85 "%s:\n wait = %d\n",
86 __func__,
87 default_wait_in_micro_secs);
88 }
89
90 static void dpcd_set_training_pattern(
91 struct core_link* link,
92 union dpcd_training_pattern dpcd_pattern)
93 {
94 core_link_write_dpcd(
95 link,
96 DPCD_ADDRESS_TRAINING_PATTERN_SET,
97 &dpcd_pattern.raw,
98 1);
99
100 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
101 "%s\n %x pattern = %x\n",
102 __func__,
103 DPCD_ADDRESS_TRAINING_PATTERN_SET,
104 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
105 }
106
107 static void dpcd_set_link_settings(
108 struct core_link* link,
109 const struct link_training_settings *lt_settings)
110 {
111 uint8_t rate = (uint8_t)
112 (lt_settings->link_settings.link_rate);
113
114 union down_spread_ctrl downspread = {{0}};
115 union lane_count_set lane_count_set = {{0}};
116 uint8_t link_set_buffer[2];
117
118 downspread.raw = (uint8_t)
119 (lt_settings->link_settings.link_spread);
120
121 lane_count_set.bits.LANE_COUNT_SET =
122 lt_settings->link_settings.lane_count;
123
124 lane_count_set.bits.ENHANCED_FRAMING = 1;
125
126 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
127 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
128
129 link_set_buffer[0] = rate;
130 link_set_buffer[1] = lane_count_set.raw;
131
132 core_link_write_dpcd(link, DPCD_ADDRESS_LINK_BW_SET,
133 link_set_buffer, 2);
134 core_link_write_dpcd(link, DPCD_ADDRESS_DOWNSPREAD_CNTL,
135 &downspread.raw, sizeof(downspread));
136
137 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
138 "%s\n %x rate = %x\n %x lane = %x\n %x spread = %x\n",
139 __func__,
140 DPCD_ADDRESS_LINK_BW_SET,
141 lt_settings->link_settings.link_rate,
142 DPCD_ADDRESS_LANE_COUNT_SET,
143 lt_settings->link_settings.lane_count,
144 DPCD_ADDRESS_DOWNSPREAD_CNTL,
145 lt_settings->link_settings.link_spread);
146
147 }
148
149 static enum dpcd_training_patterns
150 hw_training_pattern_to_dpcd_training_pattern(
151 struct core_link* link,
152 enum hw_dp_training_pattern pattern)
153 {
154 enum dpcd_training_patterns dpcd_tr_pattern =
155 DPCD_TRAINING_PATTERN_VIDEOIDLE;
156
157 switch (pattern) {
158 case HW_DP_TRAINING_PATTERN_1:
159 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_1;
160 break;
161 case HW_DP_TRAINING_PATTERN_2:
162 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_2;
163 break;
164 case HW_DP_TRAINING_PATTERN_3:
165 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_3;
166 break;
167 case HW_DP_TRAINING_PATTERN_4:
168 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_4;
169 break;
170 default:
171 ASSERT(0);
172 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
173 "%s: Invalid HW Training pattern: %d\n",
174 __func__, pattern);
175 break;
176 }
177
178 return dpcd_tr_pattern;
179
180 }
181
182 static void dpcd_set_lt_pattern_and_lane_settings(
183 struct core_link* link,
184 const struct link_training_settings *lt_settings,
185 enum hw_dp_training_pattern pattern)
186 {
187 union dpcd_training_lane dpcd_lane[LANE_COUNT_DP_MAX] = {{{0}}};
188 const uint32_t dpcd_base_lt_offset =
189 DPCD_ADDRESS_TRAINING_PATTERN_SET;
190 uint8_t dpcd_lt_buffer[5] = {0};
191 union dpcd_training_pattern dpcd_pattern = {{0}};
192 uint32_t lane;
193 uint32_t size_in_bytes;
194 bool edp_workaround = false; /* TODO link_prop.INTERNAL */
195
196 /*****************************************************************
197 * DpcdAddress_TrainingPatternSet
198 *****************************************************************/
199 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
200 hw_training_pattern_to_dpcd_training_pattern(link, pattern);
201
202 dpcd_lt_buffer[DPCD_ADDRESS_TRAINING_PATTERN_SET - dpcd_base_lt_offset]
203 = dpcd_pattern.raw;
204
205 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
206 "%s\n %x pattern = %x\n",
207 __func__,
208 DPCD_ADDRESS_TRAINING_PATTERN_SET,
209 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
210
211 /*****************************************************************
212 * DpcdAddress_Lane0Set -> DpcdAddress_Lane3Set
213 *****************************************************************/
214 for (lane = 0; lane <
215 (uint32_t)(lt_settings->link_settings.lane_count); lane++) {
216
217 dpcd_lane[lane].bits.VOLTAGE_SWING_SET =
218 (uint8_t)(lt_settings->lane_settings[lane].VOLTAGE_SWING);
219 dpcd_lane[lane].bits.PRE_EMPHASIS_SET =
220 (uint8_t)(lt_settings->lane_settings[lane].PRE_EMPHASIS);
221
222 dpcd_lane[lane].bits.MAX_SWING_REACHED =
223 (lt_settings->lane_settings[lane].VOLTAGE_SWING ==
224 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
225 dpcd_lane[lane].bits.MAX_PRE_EMPHASIS_REACHED =
226 (lt_settings->lane_settings[lane].PRE_EMPHASIS ==
227 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
228 }
229
230 /* concatinate everything into one buffer*/
231
232 size_in_bytes = lt_settings->link_settings.lane_count * sizeof(dpcd_lane[0]);
233
234 // 0x00103 - 0x00102
235 memmove(
236 &dpcd_lt_buffer[DPCD_ADDRESS_LANE0_SET - dpcd_base_lt_offset],
237 dpcd_lane,
238 size_in_bytes);
239
240 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
241 "%s:\n %x VS set = %x PE set = %x \
242 max VS Reached = %x max PE Reached = %x\n",
243 __func__,
244 DPCD_ADDRESS_LANE0_SET,
245 dpcd_lane[0].bits.VOLTAGE_SWING_SET,
246 dpcd_lane[0].bits.PRE_EMPHASIS_SET,
247 dpcd_lane[0].bits.MAX_SWING_REACHED,
248 dpcd_lane[0].bits.MAX_PRE_EMPHASIS_REACHED);
249
250 if (edp_workaround) {
251 /* for eDP write in 2 parts because the 5-byte burst is
252 * causing issues on some eDP panels (EPR#366724)
253 */
254 core_link_write_dpcd(
255 link,
256 DPCD_ADDRESS_TRAINING_PATTERN_SET,
257 &dpcd_pattern.raw,
258 sizeof(dpcd_pattern.raw) );
259
260 core_link_write_dpcd(
261 link,
262 DPCD_ADDRESS_LANE0_SET,
263 (uint8_t *)(dpcd_lane),
264 size_in_bytes);
265
266 } else
267 /* write it all in (1 + number-of-lanes)-byte burst*/
268 core_link_write_dpcd(
269 link,
270 dpcd_base_lt_offset,
271 dpcd_lt_buffer,
272 size_in_bytes + sizeof(dpcd_pattern.raw) );
273
274 link->public.cur_lane_setting = lt_settings->lane_settings[0];
275 }
276
277 static bool is_cr_done(enum dc_lane_count ln_count,
278 union lane_status *dpcd_lane_status)
279 {
280 bool done = true;
281 uint32_t lane;
282 /*LANEx_CR_DONE bits All 1's?*/
283 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
284 if (!dpcd_lane_status[lane].bits.CR_DONE_0)
285 done = false;
286 }
287 return done;
288
289 }
290
291 static bool is_ch_eq_done(enum dc_lane_count ln_count,
292 union lane_status *dpcd_lane_status,
293 union lane_align_status_updated *lane_status_updated)
294 {
295 bool done = true;
296 uint32_t lane;
297 if (!lane_status_updated->bits.INTERLANE_ALIGN_DONE)
298 done = false;
299 else {
300 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
301 if (!dpcd_lane_status[lane].bits.SYMBOL_LOCKED_0 ||
302 !dpcd_lane_status[lane].bits.CHANNEL_EQ_DONE_0)
303 done = false;
304 }
305 }
306 return done;
307
308 }
309
310 static void update_drive_settings(
311 struct link_training_settings *dest,
312 struct link_training_settings src)
313 {
314 uint32_t lane;
315 for (lane = 0; lane < src.link_settings.lane_count; lane++) {
316 dest->lane_settings[lane].VOLTAGE_SWING =
317 src.lane_settings[lane].VOLTAGE_SWING;
318 dest->lane_settings[lane].PRE_EMPHASIS =
319 src.lane_settings[lane].PRE_EMPHASIS;
320 dest->lane_settings[lane].POST_CURSOR2 =
321 src.lane_settings[lane].POST_CURSOR2;
322 }
323 }
324
325 static uint8_t get_nibble_at_index(const uint8_t *buf,
326 uint32_t index)
327 {
328 uint8_t nibble;
329 nibble = buf[index / 2];
330
331 if (index % 2)
332 nibble >>= 4;
333 else
334 nibble &= 0x0F;
335
336 return nibble;
337 }
338
339 static enum dc_pre_emphasis get_max_pre_emphasis_for_voltage_swing(
340 enum dc_voltage_swing voltage)
341 {
342 enum dc_pre_emphasis pre_emphasis;
343 pre_emphasis = PRE_EMPHASIS_MAX_LEVEL;
344
345 if (voltage <= VOLTAGE_SWING_MAX_LEVEL)
346 pre_emphasis = voltage_swing_to_pre_emphasis[voltage];
347
348 return pre_emphasis;
349
350 }
351
352 static void find_max_drive_settings(
353 const struct link_training_settings *link_training_setting,
354 struct link_training_settings *max_lt_setting)
355 {
356 uint32_t lane;
357 struct dc_lane_settings max_requested;
358
359 max_requested.VOLTAGE_SWING =
360 link_training_setting->
361 lane_settings[0].VOLTAGE_SWING;
362 max_requested.PRE_EMPHASIS =
363 link_training_setting->
364 lane_settings[0].PRE_EMPHASIS;
365 /*max_requested.postCursor2 =
366 * link_training_setting->laneSettings[0].postCursor2;*/
367
368 /* Determine what the maximum of the requested settings are*/
369 for (lane = 1; lane < link_training_setting->link_settings.lane_count;
370 lane++) {
371 if (link_training_setting->lane_settings[lane].VOLTAGE_SWING >
372 max_requested.VOLTAGE_SWING)
373
374 max_requested.VOLTAGE_SWING =
375 link_training_setting->
376 lane_settings[lane].VOLTAGE_SWING;
377
378 if (link_training_setting->lane_settings[lane].PRE_EMPHASIS >
379 max_requested.PRE_EMPHASIS)
380 max_requested.PRE_EMPHASIS =
381 link_training_setting->
382 lane_settings[lane].PRE_EMPHASIS;
383
384 /*
385 if (link_training_setting->laneSettings[lane].postCursor2 >
386 max_requested.postCursor2)
387 {
388 max_requested.postCursor2 =
389 link_training_setting->laneSettings[lane].postCursor2;
390 }
391 */
392 }
393
394 /* make sure the requested settings are
395 * not higher than maximum settings*/
396 if (max_requested.VOLTAGE_SWING > VOLTAGE_SWING_MAX_LEVEL)
397 max_requested.VOLTAGE_SWING = VOLTAGE_SWING_MAX_LEVEL;
398
399 if (max_requested.PRE_EMPHASIS > PRE_EMPHASIS_MAX_LEVEL)
400 max_requested.PRE_EMPHASIS = PRE_EMPHASIS_MAX_LEVEL;
401 /*
402 if (max_requested.postCursor2 > PostCursor2_MaxLevel)
403 max_requested.postCursor2 = PostCursor2_MaxLevel;
404 */
405
406 /* make sure the pre-emphasis matches the voltage swing*/
407 if (max_requested.PRE_EMPHASIS >
408 get_max_pre_emphasis_for_voltage_swing(
409 max_requested.VOLTAGE_SWING))
410 max_requested.PRE_EMPHASIS =
411 get_max_pre_emphasis_for_voltage_swing(
412 max_requested.VOLTAGE_SWING);
413
414 /*
415 * Post Cursor2 levels are completely independent from
416 * pre-emphasis (Post Cursor1) levels. But Post Cursor2 levels
417 * can only be applied to each allowable combination of voltage
418 * swing and pre-emphasis levels */
419 /* if ( max_requested.postCursor2 >
420 * getMaxPostCursor2ForVoltageSwing(max_requested.voltageSwing))
421 * max_requested.postCursor2 =
422 * getMaxPostCursor2ForVoltageSwing(max_requested.voltageSwing);
423 */
424
425 max_lt_setting->link_settings.link_rate =
426 link_training_setting->link_settings.link_rate;
427 max_lt_setting->link_settings.lane_count =
428 link_training_setting->link_settings.lane_count;
429 max_lt_setting->link_settings.link_spread =
430 link_training_setting->link_settings.link_spread;
431
432 for (lane = 0; lane <
433 link_training_setting->link_settings.lane_count;
434 lane++) {
435 max_lt_setting->lane_settings[lane].VOLTAGE_SWING =
436 max_requested.VOLTAGE_SWING;
437 max_lt_setting->lane_settings[lane].PRE_EMPHASIS =
438 max_requested.PRE_EMPHASIS;
439 /*max_lt_setting->laneSettings[lane].postCursor2 =
440 * max_requested.postCursor2;
441 */
442 }
443
444 }
445
446 static void get_lane_status_and_drive_settings(
447 struct core_link* link,
448 const struct link_training_settings *link_training_setting,
449 union lane_status *ln_status,
450 union lane_align_status_updated *ln_status_updated,
451 struct link_training_settings *req_settings)
452 {
453 uint8_t dpcd_buf[6] = {0};
454 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {{{0}}};
455 struct link_training_settings request_settings = {{0}};
456 uint32_t lane;
457
458 memset(req_settings, '\0', sizeof(struct link_training_settings));
459
460 core_link_read_dpcd(
461 link,
462 DPCD_ADDRESS_LANE_01_STATUS,
463 (uint8_t *)(dpcd_buf),
464 sizeof(dpcd_buf));
465
466 for (lane = 0; lane <
467 (uint32_t)(link_training_setting->link_settings.lane_count);
468 lane++) {
469
470 ln_status[lane].raw =
471 get_nibble_at_index(&dpcd_buf[0], lane);
472 dpcd_lane_adjust[lane].raw =
473 get_nibble_at_index(&dpcd_buf[4], lane);
474 }
475
476 ln_status_updated->raw = dpcd_buf[2];
477
478 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
479 "%s:\n%x Lane01Status = %x\n %x Lane23Status = %x\n ",
480 __func__,
481 DPCD_ADDRESS_LANE_01_STATUS, dpcd_buf[0],
482 DPCD_ADDRESS_LANE_23_STATUS, dpcd_buf[1]);
483
484 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
485 "%s:\n %x Lane01AdjustRequest = %x\n %x Lane23AdjustRequest = %x\n",
486 __func__,
487 DPCD_ADDRESS_ADJUST_REQUEST_LANE0_1,
488 dpcd_buf[4],
489 DPCD_ADDRESS_ADJUST_REQUEST_LANE2_3,
490 dpcd_buf[5]);
491
492 /*copy to req_settings*/
493 request_settings.link_settings.lane_count =
494 link_training_setting->link_settings.lane_count;
495 request_settings.link_settings.link_rate =
496 link_training_setting->link_settings.link_rate;
497 request_settings.link_settings.link_spread =
498 link_training_setting->link_settings.link_spread;
499
500 for (lane = 0; lane <
501 (uint32_t)(link_training_setting->link_settings.lane_count);
502 lane++) {
503
504 request_settings.lane_settings[lane].VOLTAGE_SWING =
505 (enum dc_voltage_swing)(dpcd_lane_adjust[lane].bits.
506 VOLTAGE_SWING_LANE);
507 request_settings.lane_settings[lane].PRE_EMPHASIS =
508 (enum dc_pre_emphasis)(dpcd_lane_adjust[lane].bits.
509 PRE_EMPHASIS_LANE);
510 }
511
512 /*Note: for postcursor2, read adjusted
513 * postcursor2 settings from*/
514 /*DpcdAddress_AdjustRequestPostCursor2 =
515 *0x020C (not implemented yet)*/
516
517 /* we find the maximum of the requested settings across all lanes*/
518 /* and set this maximum for all lanes*/
519 find_max_drive_settings(&request_settings, req_settings);
520
521 /* if post cursor 2 is needed in the future,
522 * read DpcdAddress_AdjustRequestPostCursor2 = 0x020C
523 */
524
525 }
526
527 static void dpcd_set_lane_settings(
528 struct core_link* link,
529 const struct link_training_settings *link_training_setting)
530 {
531 union dpcd_training_lane dpcd_lane[LANE_COUNT_DP_MAX] = {{{0}}};
532 uint32_t lane;
533
534 for (lane = 0; lane <
535 (uint32_t)(link_training_setting->
536 link_settings.lane_count);
537 lane++) {
538 dpcd_lane[lane].bits.VOLTAGE_SWING_SET =
539 (uint8_t)(link_training_setting->
540 lane_settings[lane].VOLTAGE_SWING);
541 dpcd_lane[lane].bits.PRE_EMPHASIS_SET =
542 (uint8_t)(link_training_setting->
543 lane_settings[lane].PRE_EMPHASIS);
544 dpcd_lane[lane].bits.MAX_SWING_REACHED =
545 (link_training_setting->
546 lane_settings[lane].VOLTAGE_SWING ==
547 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
548 dpcd_lane[lane].bits.MAX_PRE_EMPHASIS_REACHED =
549 (link_training_setting->
550 lane_settings[lane].PRE_EMPHASIS ==
551 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
552 }
553
554 core_link_write_dpcd(link,
555 DPCD_ADDRESS_LANE0_SET,
556 (uint8_t *)(dpcd_lane),
557 link_training_setting->link_settings.lane_count);
558
559 /*
560 if (LTSettings.link.rate == LinkRate_High2)
561 {
562 DpcdTrainingLaneSet2 dpcd_lane2[lane_count_DPMax] = {0};
563 for ( uint32_t lane = 0;
564 lane < lane_count_DPMax; lane++)
565 {
566 dpcd_lane2[lane].bits.post_cursor2_set =
567 static_cast<unsigned char>(
568 LTSettings.laneSettings[lane].postCursor2);
569 dpcd_lane2[lane].bits.max_post_cursor2_reached = 0;
570 }
571 m_pDpcdAccessSrv->WriteDpcdData(
572 DpcdAddress_Lane0Set2,
573 reinterpret_cast<unsigned char*>(dpcd_lane2),
574 LTSettings.link.lanes);
575 }
576 */
577
578 dm_logger_write(link->ctx->logger, LOG_HW_LINK_TRAINING,
579 "%s\n %x VS set = %x PE set = %x \
580 max VS Reached = %x max PE Reached = %x\n",
581 __func__,
582 DPCD_ADDRESS_LANE0_SET,
583 dpcd_lane[0].bits.VOLTAGE_SWING_SET,
584 dpcd_lane[0].bits.PRE_EMPHASIS_SET,
585 dpcd_lane[0].bits.MAX_SWING_REACHED,
586 dpcd_lane[0].bits.MAX_PRE_EMPHASIS_REACHED);
587
588 link->public.cur_lane_setting = link_training_setting->lane_settings[0];
589
590 }
591
592 static bool is_max_vs_reached(
593 const struct link_training_settings *lt_settings)
594 {
595 uint32_t lane;
596 for (lane = 0; lane <
597 (uint32_t)(lt_settings->link_settings.lane_count);
598 lane++) {
599 if (lt_settings->lane_settings[lane].VOLTAGE_SWING
600 == VOLTAGE_SWING_MAX_LEVEL)
601 return true;
602 }
603 return false;
604
605 }
606
607 void dc_link_dp_set_drive_settings(
608 struct dc_link *link,
609 struct link_training_settings *lt_settings)
610 {
611 struct core_link *core_link = DC_LINK_TO_CORE(link);
612 /* program ASIC PHY settings*/
613 dp_set_hw_lane_settings(core_link, lt_settings);
614
615 /* Notify DP sink the PHY settings from source */
616 dpcd_set_lane_settings(core_link, lt_settings);
617 }
618
619 static bool perform_post_lt_adj_req_sequence(
620 struct core_link *link,
621 struct link_training_settings *lt_settings)
622 {
623 enum dc_lane_count lane_count =
624 lt_settings->link_settings.lane_count;
625
626 uint32_t adj_req_count;
627 uint32_t adj_req_timer;
628 bool req_drv_setting_changed;
629 uint32_t lane;
630
631 req_drv_setting_changed = false;
632 for (adj_req_count = 0; adj_req_count < POST_LT_ADJ_REQ_LIMIT;
633 adj_req_count++) {
634
635 req_drv_setting_changed = false;
636
637 for (adj_req_timer = 0;
638 adj_req_timer < POST_LT_ADJ_REQ_TIMEOUT;
639 adj_req_timer++) {
640
641 struct link_training_settings req_settings;
642 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
643 union lane_align_status_updated
644 dpcd_lane_status_updated;
645
646 get_lane_status_and_drive_settings(
647 link,
648 lt_settings,
649 dpcd_lane_status,
650 &dpcd_lane_status_updated,
651 &req_settings);
652
653 if (dpcd_lane_status_updated.bits.
654 POST_LT_ADJ_REQ_IN_PROGRESS == 0)
655 return true;
656
657 if (!is_cr_done(lane_count, dpcd_lane_status))
658 return false;
659
660 if (!is_ch_eq_done(
661 lane_count,
662 dpcd_lane_status,
663 &dpcd_lane_status_updated))
664 return false;
665
666 for (lane = 0; lane < (uint32_t)(lane_count); lane++) {
667
668 if (lt_settings->
669 lane_settings[lane].VOLTAGE_SWING !=
670 req_settings.lane_settings[lane].
671 VOLTAGE_SWING ||
672 lt_settings->lane_settings[lane].PRE_EMPHASIS !=
673 req_settings.lane_settings[lane].PRE_EMPHASIS) {
674
675 req_drv_setting_changed = true;
676 break;
677 }
678 }
679
680 if (req_drv_setting_changed) {
681 update_drive_settings(
682 lt_settings,req_settings);
683
684 dc_link_dp_set_drive_settings(&link->public,
685 lt_settings);
686 break;
687 }
688
689 msleep(1);
690 }
691
692 if (!req_drv_setting_changed) {
693 dm_logger_write(link->ctx->logger, LOG_WARNING,
694 "%s: Post Link Training Adjust Request Timed out\n",
695 __func__);
696
697 ASSERT(0);
698 return true;
699 }
700 }
701 dm_logger_write(link->ctx->logger, LOG_WARNING,
702 "%s: Post Link Training Adjust Request limit reached\n",
703 __func__);
704
705 ASSERT(0);
706 return true;
707
708 }
709
710 static enum hw_dp_training_pattern get_supported_tp(struct core_link *link)
711 {
712 enum hw_dp_training_pattern highest_tp = HW_DP_TRAINING_PATTERN_2;
713 struct encoder_feature_support *features = &link->link_enc->features;
714 struct dpcd_caps *dpcd_caps = &link->dpcd_caps;
715
716 if (features->flags.bits.IS_TPS3_CAPABLE)
717 highest_tp = HW_DP_TRAINING_PATTERN_3;
718
719 if (features->flags.bits.IS_TPS4_CAPABLE)
720 highest_tp = HW_DP_TRAINING_PATTERN_4;
721
722 if (dpcd_caps->max_down_spread.bits.TPS4_SUPPORTED &&
723 highest_tp >= HW_DP_TRAINING_PATTERN_4)
724 return HW_DP_TRAINING_PATTERN_4;
725
726 if (dpcd_caps->max_ln_count.bits.TPS3_SUPPORTED &&
727 highest_tp >= HW_DP_TRAINING_PATTERN_3)
728 return HW_DP_TRAINING_PATTERN_3;
729
730 return HW_DP_TRAINING_PATTERN_2;
731 }
732
733 static bool perform_channel_equalization_sequence(
734 struct core_link *link,
735 struct link_training_settings *lt_settings)
736 {
737 struct link_training_settings req_settings;
738 enum hw_dp_training_pattern hw_tr_pattern;
739 uint32_t retries_ch_eq;
740 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
741 union lane_align_status_updated dpcd_lane_status_updated = {{0}};
742 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {{{0}}};;
743
744 hw_tr_pattern = get_supported_tp(link);
745
746 dp_set_hw_training_pattern(link, hw_tr_pattern);
747
748 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
749 retries_ch_eq++) {
750
751 dp_set_hw_lane_settings(link, lt_settings);
752
753 /* 2. update DPCD*/
754 if (!retries_ch_eq)
755 /* EPR #361076 - write as a 5-byte burst,
756 * but only for the 1-st iteration*/
757 dpcd_set_lt_pattern_and_lane_settings(
758 link,
759 lt_settings,
760 hw_tr_pattern);
761 else
762 dpcd_set_lane_settings(link, lt_settings);
763
764 /* 3. wait for receiver to lock-on*/
765 wait_for_training_aux_rd_interval(link, 400);
766
767 /* 4. Read lane status and requested
768 * drive settings as set by the sink*/
769
770 get_lane_status_and_drive_settings(
771 link,
772 lt_settings,
773 dpcd_lane_status,
774 &dpcd_lane_status_updated,
775 &req_settings);
776
777 /* 5. check CR done*/
778 if (!is_cr_done(lane_count, dpcd_lane_status))
779 return false;
780
781 /* 6. check CHEQ done*/
782 if (is_ch_eq_done(lane_count,
783 dpcd_lane_status,
784 &dpcd_lane_status_updated))
785 return true;
786
787 /* 7. update VS/PE/PC2 in lt_settings*/
788 update_drive_settings(lt_settings, req_settings);
789 }
790
791 return false;
792
793 }
794
795 static bool perform_clock_recovery_sequence(
796 struct core_link *link,
797 struct link_training_settings *lt_settings)
798 {
799 uint32_t retries_cr;
800 uint32_t retry_count;
801 uint32_t lane;
802 struct link_training_settings req_settings;
803 enum dc_lane_count lane_count =
804 lt_settings->link_settings.lane_count;
805 enum hw_dp_training_pattern hw_tr_pattern = HW_DP_TRAINING_PATTERN_1;
806 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
807 union lane_align_status_updated dpcd_lane_status_updated;
808
809 retries_cr = 0;
810 retry_count = 0;
811 /* initial drive setting (VS/PE/PC2)*/
812 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
813 lt_settings->lane_settings[lane].VOLTAGE_SWING =
814 VOLTAGE_SWING_LEVEL0;
815 lt_settings->lane_settings[lane].PRE_EMPHASIS =
816 PRE_EMPHASIS_DISABLED;
817 lt_settings->lane_settings[lane].POST_CURSOR2 =
818 POST_CURSOR2_DISABLED;
819 }
820
821 dp_set_hw_training_pattern(link, hw_tr_pattern);
822
823 /* najeeb - The synaptics MST hub can put the LT in
824 * infinite loop by switching the VS
825 */
826 /* between level 0 and level 1 continuously, here
827 * we try for CR lock for LinkTrainingMaxCRRetry count*/
828 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
829 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
830
831 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
832 memset(&dpcd_lane_status_updated, '\0',
833 sizeof(dpcd_lane_status_updated));
834
835 /* 1. call HWSS to set lane settings*/
836 dp_set_hw_lane_settings(
837 link,
838 lt_settings);
839
840 /* 2. update DPCD of the receiver*/
841 if (!retries_cr)
842 /* EPR #361076 - write as a 5-byte burst,
843 * but only for the 1-st iteration.*/
844 dpcd_set_lt_pattern_and_lane_settings(
845 link,
846 lt_settings,
847 hw_tr_pattern);
848 else
849 dpcd_set_lane_settings(
850 link,
851 lt_settings);
852
853 /* 3. wait receiver to lock-on*/
854 wait_for_training_aux_rd_interval(
855 link,
856 100);
857
858 /* 4. Read lane status and requested drive
859 * settings as set by the sink
860 */
861 get_lane_status_and_drive_settings(
862 link,
863 lt_settings,
864 dpcd_lane_status,
865 &dpcd_lane_status_updated,
866 &req_settings);
867
868 /* 5. check CR done*/
869 if (is_cr_done(lane_count, dpcd_lane_status))
870 return true;
871
872 /* 6. max VS reached*/
873 if (is_max_vs_reached(lt_settings))
874 return false;
875
876 /* 7. same voltage*/
877 /* Note: VS same for all lanes,
878 * so comparing first lane is sufficient*/
879 if (lt_settings->lane_settings[0].VOLTAGE_SWING ==
880 req_settings.lane_settings[0].VOLTAGE_SWING)
881 retries_cr++;
882 else
883 retries_cr = 0;
884
885 /* 8. update VS/PE/PC2 in lt_settings*/
886 update_drive_settings(lt_settings, req_settings);
887
888 retry_count++;
889 }
890
891 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
892 ASSERT(0);
893 dm_logger_write(link->ctx->logger, LOG_ERROR,
894 "%s: Link Training Error, could not \
895 get CR after %d tries. \
896 Possibly voltage swing issue", __func__,
897 LINK_TRAINING_MAX_CR_RETRY);
898
899 }
900
901 return false;
902 }
903
904 static inline bool perform_link_training_int(
905 struct core_link *link,
906 struct link_training_settings *lt_settings,
907 bool status)
908 {
909 union lane_count_set lane_count_set = { {0} };
910 union dpcd_training_pattern dpcd_pattern = { {0} };
911
912 /* 3. set training not in progress*/
913 dpcd_pattern.v1_4.TRAINING_PATTERN_SET = DPCD_TRAINING_PATTERN_VIDEOIDLE;
914 dpcd_set_training_pattern(link, dpcd_pattern);
915
916 /* 4. mainlink output idle pattern*/
917 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
918
919 /*
920 * 5. post training adjust if required
921 * If the upstream DPTX and downstream DPRX both support TPS4,
922 * TPS4 must be used instead of POST_LT_ADJ_REQ.
923 */
924 if (link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED != 1 &&
925 get_supported_tp(link) == HW_DP_TRAINING_PATTERN_4)
926 return status;
927
928 if (status &&
929 perform_post_lt_adj_req_sequence(link, lt_settings) == false)
930 status = false;
931
932 lane_count_set.bits.LANE_COUNT_SET = lt_settings->link_settings.lane_count;
933 lane_count_set.bits.ENHANCED_FRAMING = 1;
934 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
935
936 core_link_write_dpcd(
937 link,
938 DPCD_ADDRESS_LANE_COUNT_SET,
939 &lane_count_set.raw,
940 sizeof(lane_count_set));
941
942 return status;
943 }
944
945 bool dc_link_dp_perform_link_training(
946 struct dc_link *link,
947 const struct dc_link_settings *link_setting,
948 bool skip_video_pattern)
949 {
950 struct core_link *core_link = DC_LINK_TO_CORE(link);
951 bool status;
952
953 char *link_rate = "Unknown";
954 struct link_training_settings lt_settings;
955
956 status = false;
957 memset(&lt_settings, '\0', sizeof(lt_settings));
958
959 lt_settings.link_settings.link_rate = link_setting->link_rate;
960 lt_settings.link_settings.lane_count = link_setting->lane_count;
961
962 /*@todo[vdevulap] move SS to LS, should not be handled by displaypath*/
963
964 /* TODO hard coded to SS for now
965 * lt_settings.link_settings.link_spread =
966 * dal_display_path_is_ss_supported(
967 * path_mode->display_path) ?
968 * LINK_SPREAD_05_DOWNSPREAD_30KHZ :
969 * LINK_SPREAD_DISABLED;
970 */
971 lt_settings.link_settings.link_spread = LINK_SPREAD_05_DOWNSPREAD_30KHZ;
972
973 /* 1. set link rate, lane count and spread*/
974 dpcd_set_link_settings(core_link, &lt_settings);
975
976 /* 2. perform link training (set link training done
977 * to false is done as well)*/
978 if (perform_clock_recovery_sequence(core_link, &lt_settings)) {
979
980 if (perform_channel_equalization_sequence(core_link,
981 &lt_settings))
982 status = true;
983 }
984
985 if (status || !skip_video_pattern)
986 status = perform_link_training_int(core_link,
987 &lt_settings, status);
988
989 /* 6. print status message*/
990 switch (lt_settings.link_settings.link_rate) {
991
992 case LINK_RATE_LOW:
993 link_rate = "RBR";
994 break;
995 case LINK_RATE_HIGH:
996 link_rate = "HBR";
997 break;
998 case LINK_RATE_HIGH2:
999 link_rate = "HBR2";
1000 break;
1001 case LINK_RATE_RBR2:
1002 link_rate = "RBR2";
1003 break;
1004 case LINK_RATE_HIGH3:
1005 link_rate = "HBR3";
1006 break;
1007 default:
1008 break;
1009 }
1010
1011 /* Connectivity log: link training */
1012 CONN_MSG_LT(core_link, "%sx%d %s VS=%d, PE=%d",
1013 link_rate,
1014 lt_settings.link_settings.lane_count,
1015 status ? "pass" : "fail",
1016 lt_settings.lane_settings[0].VOLTAGE_SWING,
1017 lt_settings.lane_settings[0].PRE_EMPHASIS);
1018
1019 return status;
1020 }
1021
1022
1023 bool perform_link_training_with_retries(
1024 struct core_link *link,
1025 const struct dc_link_settings *link_setting,
1026 bool skip_video_pattern,
1027 int attempts)
1028 {
1029 uint8_t j;
1030 uint8_t delay_between_attempts = LINK_TRAINING_RETRY_DELAY;
1031
1032 for (j = 0; j < attempts; ++j) {
1033
1034 if (dc_link_dp_perform_link_training(
1035 &link->public,
1036 link_setting,
1037 skip_video_pattern))
1038 return true;
1039
1040 msleep(delay_between_attempts);
1041 delay_between_attempts += LINK_TRAINING_RETRY_DELAY;
1042 }
1043
1044 return false;
1045 }
1046
1047 /*TODO add more check to see if link support request link configuration */
1048 static bool is_link_setting_supported(
1049 const struct dc_link_settings *link_setting,
1050 const struct dc_link_settings *max_link_setting)
1051 {
1052 if (link_setting->lane_count > max_link_setting->lane_count ||
1053 link_setting->link_rate > max_link_setting->link_rate)
1054 return false;
1055 return true;
1056 }
1057
1058 static const uint32_t get_link_training_fallback_table_len(
1059 struct core_link *link)
1060 {
1061 return ARRAY_SIZE(link_training_fallback_table);
1062 }
1063
1064 static const struct dc_link_settings *get_link_training_fallback_table(
1065 struct core_link *link, uint32_t i)
1066 {
1067 return &link_training_fallback_table[i];
1068 }
1069
1070 static bool exceeded_limit_link_setting(
1071 const struct dc_link_settings *link_setting,
1072 const struct dc_link_settings *limit_link_setting)
1073 {
1074 return (link_setting->lane_count * link_setting->link_rate
1075 > limit_link_setting->lane_count * limit_link_setting->link_rate ?
1076 true : false);
1077 }
1078
1079 static struct dc_link_settings get_max_link_cap(struct core_link *link)
1080 {
1081 /* Set Default link settings */
1082 struct dc_link_settings max_link_cap = {LANE_COUNT_FOUR, LINK_RATE_HIGH,
1083 LINK_SPREAD_05_DOWNSPREAD_30KHZ};
1084
1085 /* Higher link settings based on feature supported */
1086 if (link->link_enc->features.flags.bits.IS_HBR2_CAPABLE)
1087 max_link_cap.link_rate = LINK_RATE_HIGH2;
1088
1089 if (link->link_enc->features.flags.bits.IS_HBR3_CAPABLE)
1090 max_link_cap.link_rate = LINK_RATE_HIGH3;
1091
1092 /* Lower link settings based on sink's link cap */
1093 if (link->public.reported_link_cap.lane_count < max_link_cap.lane_count)
1094 max_link_cap.lane_count =
1095 link->public.reported_link_cap.lane_count;
1096 if (link->public.reported_link_cap.link_rate < max_link_cap.link_rate)
1097 max_link_cap.link_rate =
1098 link->public.reported_link_cap.link_rate;
1099 if (link->public.reported_link_cap.link_spread <
1100 max_link_cap.link_spread)
1101 max_link_cap.link_spread =
1102 link->public.reported_link_cap.link_spread;
1103 return max_link_cap;
1104 }
1105
1106 bool dp_hbr_verify_link_cap(
1107 struct core_link *link,
1108 struct dc_link_settings *known_limit_link_setting)
1109 {
1110 struct dc_link_settings max_link_cap = {0};
1111 bool success;
1112 bool skip_link_training;
1113 const struct dc_link_settings *cur;
1114 bool skip_video_pattern;
1115 uint32_t i;
1116 struct clock_source *dp_cs;
1117 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
1118
1119 success = false;
1120 skip_link_training = false;
1121
1122 max_link_cap = get_max_link_cap(link);
1123
1124 /* TODO implement override and monitor patch later */
1125
1126 /* try to train the link from high to low to
1127 * find the physical link capability
1128 */
1129 /* disable PHY done possible by BIOS, will be done by driver itself */
1130 dp_disable_link_phy(link, link->public.connector_signal);
1131
1132 dp_cs = link->dc->res_pool->dp_clock_source;
1133
1134 if (dp_cs)
1135 dp_cs_id = dp_cs->id;
1136 else {
1137 /*
1138 * dp clock source is not initialized for some reason.
1139 * Should not happen, CLOCK_SOURCE_ID_EXTERNAL will be used
1140 */
1141 ASSERT(dp_cs);
1142 }
1143
1144 for (i = 0; i < get_link_training_fallback_table_len(link) &&
1145 !success; i++) {
1146 cur = get_link_training_fallback_table(link, i);
1147
1148 if (known_limit_link_setting->lane_count != LANE_COUNT_UNKNOWN &&
1149 exceeded_limit_link_setting(cur,
1150 known_limit_link_setting))
1151 continue;
1152
1153 if (!is_link_setting_supported(cur, &max_link_cap))
1154 continue;
1155
1156 skip_video_pattern = true;
1157 if (cur->link_rate == LINK_RATE_LOW)
1158 skip_video_pattern = false;
1159
1160 dp_enable_link_phy(
1161 link,
1162 link->public.connector_signal,
1163 dp_cs_id,
1164 cur);
1165
1166 if (skip_link_training)
1167 success = true;
1168 else {
1169 success = dc_link_dp_perform_link_training(
1170 &link->public,
1171 cur,
1172 skip_video_pattern);
1173 }
1174
1175 if (success)
1176 link->public.verified_link_cap = *cur;
1177
1178 /* always disable the link before trying another
1179 * setting or before returning we'll enable it later
1180 * based on the actual mode we're driving
1181 */
1182 dp_disable_link_phy(link, link->public.connector_signal);
1183 }
1184
1185 /* Link Training failed for all Link Settings
1186 * (Lane Count is still unknown)
1187 */
1188 if (!success) {
1189 /* If all LT fails for all settings,
1190 * set verified = failed safe (1 lane low)
1191 */
1192 link->public.verified_link_cap.lane_count = LANE_COUNT_ONE;
1193 link->public.verified_link_cap.link_rate = LINK_RATE_LOW;
1194
1195 link->public.verified_link_cap.link_spread =
1196 LINK_SPREAD_DISABLED;
1197 }
1198
1199 link->public.max_link_setting = link->public.verified_link_cap;
1200
1201 return success;
1202 }
1203
1204 static uint32_t bandwidth_in_kbps_from_timing(
1205 const struct dc_crtc_timing *timing)
1206 {
1207 uint32_t bits_per_channel = 0;
1208 uint32_t kbps;
1209 switch (timing->display_color_depth) {
1210
1211 case COLOR_DEPTH_666:
1212 bits_per_channel = 6;
1213 break;
1214 case COLOR_DEPTH_888:
1215 bits_per_channel = 8;
1216 break;
1217 case COLOR_DEPTH_101010:
1218 bits_per_channel = 10;
1219 break;
1220 case COLOR_DEPTH_121212:
1221 bits_per_channel = 12;
1222 break;
1223 case COLOR_DEPTH_141414:
1224 bits_per_channel = 14;
1225 break;
1226 case COLOR_DEPTH_161616:
1227 bits_per_channel = 16;
1228 break;
1229 default:
1230 break;
1231 }
1232 ASSERT(bits_per_channel != 0);
1233
1234 kbps = timing->pix_clk_khz;
1235 kbps *= bits_per_channel;
1236
1237 if (timing->flags.Y_ONLY != 1)
1238 /*Only YOnly make reduce bandwidth by 1/3 compares to RGB*/
1239 kbps *= 3;
1240
1241 return kbps;
1242
1243 }
1244
1245 static uint32_t bandwidth_in_kbps_from_link_settings(
1246 const struct dc_link_settings *link_setting)
1247 {
1248 uint32_t link_rate_in_kbps = link_setting->link_rate *
1249 LINK_RATE_REF_FREQ_IN_KHZ;
1250
1251 uint32_t lane_count = link_setting->lane_count;
1252 uint32_t kbps = link_rate_in_kbps;
1253 kbps *= lane_count;
1254 kbps *= 8; /* 8 bits per byte*/
1255
1256 return kbps;
1257
1258 }
1259
1260 bool dp_validate_mode_timing(
1261 struct core_link *link,
1262 const struct dc_crtc_timing *timing)
1263 {
1264 uint32_t req_bw;
1265 uint32_t max_bw;
1266
1267 const struct dc_link_settings *link_setting;
1268
1269 /*always DP fail safe mode*/
1270 if (timing->pix_clk_khz == (uint32_t)25175 &&
1271 timing->h_addressable == (uint32_t)640 &&
1272 timing->v_addressable == (uint32_t)480)
1273 return true;
1274
1275 /* We always use verified link settings */
1276 link_setting = &link->public.verified_link_cap;
1277
1278 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
1279 /*if (flags.DYNAMIC_VALIDATION == 1 &&
1280 link->public.verified_link_cap.lane_count != LANE_COUNT_UNKNOWN)
1281 link_setting = &link->public.verified_link_cap;
1282 */
1283
1284 req_bw = bandwidth_in_kbps_from_timing(timing);
1285 max_bw = bandwidth_in_kbps_from_link_settings(link_setting);
1286
1287 if (req_bw <= max_bw) {
1288 /* remember the biggest mode here, during
1289 * initial link training (to get
1290 * verified_link_cap), LS sends event about
1291 * cannot train at reported cap to upper
1292 * layer and upper layer will re-enumerate modes.
1293 * this is not necessary if the lower
1294 * verified_link_cap is enough to drive
1295 * all the modes */
1296
1297 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
1298 /* if (flags.DYNAMIC_VALIDATION == 1)
1299 dpsst->max_req_bw_for_verified_linkcap = dal_max(
1300 dpsst->max_req_bw_for_verified_linkcap, req_bw); */
1301 return true;
1302 } else
1303 return false;
1304 }
1305
1306 void decide_link_settings(struct core_stream *stream,
1307 struct dc_link_settings *link_setting)
1308 {
1309
1310 const struct dc_link_settings *cur_ls;
1311 struct core_link* link;
1312 uint32_t req_bw;
1313 uint32_t link_bw;
1314 uint32_t i;
1315
1316 req_bw = bandwidth_in_kbps_from_timing(
1317 &stream->public.timing);
1318
1319 /* if preferred is specified through AMDDP, use it, if it's enough
1320 * to drive the mode
1321 */
1322 link = stream->sink->link;
1323
1324 if ((link->public.reported_link_cap.lane_count != LANE_COUNT_UNKNOWN) &&
1325 (link->public.reported_link_cap.link_rate <=
1326 link->public.verified_link_cap.link_rate)) {
1327
1328 link_bw = bandwidth_in_kbps_from_link_settings(
1329 &link->public.reported_link_cap);
1330
1331 if (req_bw < link_bw) {
1332 *link_setting = link->public.reported_link_cap;
1333 return;
1334 }
1335 }
1336
1337 /* search for first suitable setting for the requested
1338 * bandwidth
1339 */
1340 for (i = 0; i < get_link_training_fallback_table_len(link); i++) {
1341
1342 cur_ls = get_link_training_fallback_table(link, i);
1343
1344 link_bw =
1345 bandwidth_in_kbps_from_link_settings(
1346 cur_ls);
1347
1348 if (req_bw < link_bw) {
1349 if (is_link_setting_supported(
1350 cur_ls,
1351 &link->public.max_link_setting)) {
1352 *link_setting = *cur_ls;
1353 return;
1354 }
1355 }
1356 }
1357
1358 BREAK_TO_DEBUGGER();
1359 ASSERT(link->public.verified_link_cap.lane_count !=
1360 LANE_COUNT_UNKNOWN);
1361
1362 *link_setting = link->public.verified_link_cap;
1363 }
1364
1365 /*************************Short Pulse IRQ***************************/
1366
1367 static bool hpd_rx_irq_check_link_loss_status(
1368 struct core_link *link,
1369 union hpd_irq_data *hpd_irq_dpcd_data)
1370 {
1371 uint8_t irq_reg_rx_power_state;
1372 enum dc_status dpcd_result = DC_ERROR_UNEXPECTED;
1373 union lane_status lane_status;
1374 uint32_t lane;
1375 bool sink_status_changed;
1376 bool return_code;
1377
1378 sink_status_changed = false;
1379 return_code = false;
1380
1381 if (link->public.cur_link_settings.lane_count == 0)
1382 return return_code;
1383 /*1. Check that we can handle interrupt: Not in FS DOS,
1384 * Not in "Display Timeout" state, Link is trained.
1385 */
1386
1387 dpcd_result = core_link_read_dpcd(link,
1388 DPCD_ADDRESS_POWER_STATE,
1389 &irq_reg_rx_power_state,
1390 sizeof(irq_reg_rx_power_state));
1391
1392 if (dpcd_result != DC_OK) {
1393 irq_reg_rx_power_state = DP_PWR_STATE_D0;
1394 dm_logger_write(link->ctx->logger, LOG_HW_HPD_IRQ,
1395 "%s: DPCD read failed to obtain power state.\n",
1396 __func__);
1397 }
1398
1399 if (irq_reg_rx_power_state == DP_PWR_STATE_D0) {
1400
1401 /*2. Check that Link Status changed, before re-training.*/
1402
1403 /*parse lane status*/
1404 for (lane = 0;
1405 lane < link->public.cur_link_settings.lane_count;
1406 lane++) {
1407
1408 /* check status of lanes 0,1
1409 * changed DpcdAddress_Lane01Status (0x202)*/
1410 lane_status.raw = get_nibble_at_index(
1411 &hpd_irq_dpcd_data->bytes.lane01_status.raw,
1412 lane);
1413
1414 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
1415 !lane_status.bits.CR_DONE_0 ||
1416 !lane_status.bits.SYMBOL_LOCKED_0) {
1417 /* if one of the channel equalization, clock
1418 * recovery or symbol lock is dropped
1419 * consider it as (link has been
1420 * dropped) dp sink status has changed*/
1421 sink_status_changed = true;
1422 break;
1423 }
1424
1425 }
1426
1427 /* Check interlane align.*/
1428 if (sink_status_changed ||
1429 !hpd_irq_dpcd_data->bytes.lane_status_updated.bits.
1430 INTERLANE_ALIGN_DONE) {
1431
1432 dm_logger_write(link->ctx->logger, LOG_HW_HPD_IRQ,
1433 "%s: Link Status changed.\n",
1434 __func__);
1435
1436 return_code = true;
1437 }
1438 }
1439
1440 return return_code;
1441 }
1442
1443 static enum dc_status read_hpd_rx_irq_data(
1444 struct core_link *link,
1445 union hpd_irq_data *irq_data)
1446 {
1447 /* The HW reads 16 bytes from 200h on HPD,
1448 * but if we get an AUX_DEFER, the HW cannot retry
1449 * and this causes the CTS tests 4.3.2.1 - 3.2.4 to
1450 * fail, so we now explicitly read 6 bytes which is
1451 * the req from the above mentioned test cases.
1452 */
1453 return core_link_read_dpcd(
1454 link,
1455 DPCD_ADDRESS_SINK_COUNT,
1456 irq_data->raw,
1457 sizeof(union hpd_irq_data));
1458 }
1459
1460 static bool allow_hpd_rx_irq(const struct core_link *link)
1461 {
1462 /*
1463 * Don't handle RX IRQ unless one of following is met:
1464 * 1) The link is established (cur_link_settings != unknown)
1465 * 2) We kicked off MST detection
1466 * 3) We know we're dealing with an active dongle
1467 */
1468
1469 if ((link->public.cur_link_settings.lane_count != LANE_COUNT_UNKNOWN) ||
1470 (link->public.type == dc_connection_mst_branch) ||
1471 is_dp_active_dongle(link))
1472 return true;
1473
1474 return false;
1475 }
1476
1477 static bool handle_hpd_irq_psr_sink(const struct core_link *link)
1478 {
1479 union dpcd_psr_configuration psr_configuration;
1480
1481 if (link->public.psr_caps.psr_version == 0)
1482 return false;
1483
1484 dal_ddc_service_read_dpcd_data(
1485 link->ddc,
1486 368 /*DpcdAddress_PSR_Enable_Cfg*/,
1487 &psr_configuration.raw,
1488 sizeof(psr_configuration.raw));
1489
1490 if (psr_configuration.bits.ENABLE) {
1491 unsigned char dpcdbuf[3] = {0};
1492 union psr_error_status psr_error_status;
1493 union psr_sink_psr_status psr_sink_psr_status;
1494
1495 dal_ddc_service_read_dpcd_data(
1496 link->ddc,
1497 0x2006 /*DpcdAddress_PSR_Error_Status*/,
1498 (unsigned char *) dpcdbuf,
1499 sizeof(dpcdbuf));
1500
1501 /*DPCD 2006h ERROR STATUS*/
1502 psr_error_status.raw = dpcdbuf[0];
1503 /*DPCD 2008h SINK PANEL SELF REFRESH STATUS*/
1504 psr_sink_psr_status.raw = dpcdbuf[2];
1505
1506 if (psr_error_status.bits.LINK_CRC_ERROR ||
1507 psr_error_status.bits.RFB_STORAGE_ERROR) {
1508 /* Acknowledge and clear error bits */
1509 dal_ddc_service_write_dpcd_data(
1510 link->ddc,
1511 8198 /*DpcdAddress_PSR_Error_Status*/,
1512 &psr_error_status.raw,
1513 sizeof(psr_error_status.raw));
1514
1515 /* PSR error, disable and re-enable PSR */
1516 dc_link_set_psr_enable(&link->public, false);
1517 dc_link_set_psr_enable(&link->public, true);
1518
1519 return true;
1520 } else if (psr_sink_psr_status.bits.SINK_SELF_REFRESH_STATUS ==
1521 PSR_SINK_STATE_ACTIVE_DISPLAY_FROM_SINK_RFB){
1522 /* No error is detect, PSR is active.
1523 * We should return with IRQ_HPD handled without
1524 * checking for loss of sync since PSR would have
1525 * powered down main link.
1526 */
1527 return true;
1528 }
1529 }
1530 return false;
1531 }
1532
1533 static void dp_test_send_link_training(struct core_link *link)
1534 {
1535 struct dc_link_settings link_settings;
1536
1537 core_link_read_dpcd(
1538 link,
1539 DPCD_ADDRESS_TEST_LANE_COUNT,
1540 (unsigned char *)(&link_settings.lane_count),
1541 1);
1542 core_link_read_dpcd(
1543 link,
1544 DPCD_ADDRESS_TEST_LINK_RATE,
1545 (unsigned char *)(&link_settings.link_rate),
1546 1);
1547
1548 /* Set preferred link settings */
1549 link->public.verified_link_cap.lane_count = link_settings.lane_count;
1550 link->public.verified_link_cap.link_rate = link_settings.link_rate;
1551
1552 dp_retrain_link(link);
1553 }
1554
1555 static void dp_test_send_phy_test_pattern(struct core_link *link)
1556 {
1557 union phy_test_pattern dpcd_test_pattern;
1558 union lane_adjust dpcd_lane_adjustment[2];
1559 unsigned char dpcd_post_cursor_2_adjustment = 0;
1560 unsigned char test_80_bit_pattern[
1561 (DPCD_ADDRESS_TEST_80BIT_CUSTOM_PATTERN_79_72 -
1562 DPCD_ADDRESS_TEST_80BIT_CUSTOM_PATTERN_7_0)+1] = {0};
1563 enum dp_test_pattern test_pattern;
1564 struct dc_link_training_settings link_settings;
1565 union lane_adjust dpcd_lane_adjust;
1566 unsigned int lane;
1567 struct link_training_settings link_training_settings;
1568 int i = 0;
1569
1570 dpcd_test_pattern.raw = 0;
1571 memset(dpcd_lane_adjustment, 0, sizeof(dpcd_lane_adjustment));
1572 memset(&link_settings, 0, sizeof(link_settings));
1573
1574 /* get phy test pattern and pattern parameters from DP receiver */
1575 core_link_read_dpcd(
1576 link,
1577 DPCD_ADDRESS_TEST_PHY_PATTERN,
1578 &dpcd_test_pattern.raw,
1579 sizeof(dpcd_test_pattern));
1580 core_link_read_dpcd(
1581 link,
1582 DPCD_ADDRESS_ADJUST_REQUEST_LANE0_1,
1583 &dpcd_lane_adjustment[0].raw,
1584 sizeof(dpcd_lane_adjustment));
1585
1586 /*get post cursor 2 parameters
1587 * For DP 1.1a or eariler, this DPCD register's value is 0
1588 * For DP 1.2 or later:
1589 * Bits 1:0 = POST_CURSOR2_LANE0; Bits 3:2 = POST_CURSOR2_LANE1
1590 * Bits 5:4 = POST_CURSOR2_LANE2; Bits 7:6 = POST_CURSOR2_LANE3
1591 */
1592 core_link_read_dpcd(
1593 link,
1594 DPCD_ADDRESS_ADJUST_REQUEST_POST_CURSOR2,
1595 &dpcd_post_cursor_2_adjustment,
1596 sizeof(dpcd_post_cursor_2_adjustment));
1597
1598 /* translate request */
1599 switch (dpcd_test_pattern.bits.PATTERN) {
1600 case PHY_TEST_PATTERN_D10_2:
1601 test_pattern = DP_TEST_PATTERN_D102;
1602 break;
1603 case PHY_TEST_PATTERN_SYMBOL_ERROR:
1604 test_pattern = DP_TEST_PATTERN_SYMBOL_ERROR;
1605 break;
1606 case PHY_TEST_PATTERN_PRBS7:
1607 test_pattern = DP_TEST_PATTERN_PRBS7;
1608 break;
1609 case PHY_TEST_PATTERN_80BIT_CUSTOM:
1610 test_pattern = DP_TEST_PATTERN_80BIT_CUSTOM;
1611 break;
1612 case PHY_TEST_PATTERN_HBR2_COMPLIANCE_EYE:
1613 test_pattern = DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
1614 break;
1615 default:
1616 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
1617 break;
1618 }
1619
1620 if (test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM)
1621 core_link_read_dpcd(
1622 link,
1623 DPCD_ADDRESS_TEST_80BIT_CUSTOM_PATTERN_7_0,
1624 test_80_bit_pattern,
1625 sizeof(test_80_bit_pattern));
1626
1627 /* prepare link training settings */
1628 link_settings.link = link->public.cur_link_settings;
1629
1630 for (lane = 0; lane <
1631 (unsigned int)(link->public.cur_link_settings.lane_count);
1632 lane++) {
1633 dpcd_lane_adjust.raw =
1634 get_nibble_at_index(&dpcd_lane_adjustment[0].raw, lane);
1635 link_settings.lane_settings[lane].VOLTAGE_SWING =
1636 (enum dc_voltage_swing)
1637 (dpcd_lane_adjust.bits.VOLTAGE_SWING_LANE);
1638 link_settings.lane_settings[lane].PRE_EMPHASIS =
1639 (enum dc_pre_emphasis)
1640 (dpcd_lane_adjust.bits.PRE_EMPHASIS_LANE);
1641 link_settings.lane_settings[lane].POST_CURSOR2 =
1642 (enum dc_post_cursor2)
1643 ((dpcd_post_cursor_2_adjustment >> (lane * 2)) & 0x03);
1644 }
1645
1646 for (i = 0; i < 4; i++)
1647 link_training_settings.lane_settings[i] =
1648 link_settings.lane_settings[i];
1649 link_training_settings.link_settings = link_settings.link;
1650 link_training_settings.allow_invalid_msa_timing_param = false;
1651 /*Usage: Measure DP physical lane signal
1652 * by DP SI test equipment automatically.
1653 * PHY test pattern request is generated by equipment via HPD interrupt.
1654 * HPD needs to be active all the time. HPD should be active
1655 * all the time. Do not touch it.
1656 * forward request to DS
1657 */
1658 dc_link_dp_set_test_pattern(
1659 &link->public,
1660 test_pattern,
1661 &link_training_settings,
1662 test_80_bit_pattern,
1663 (DPCD_ADDRESS_TEST_80BIT_CUSTOM_PATTERN_79_72 -
1664 DPCD_ADDRESS_TEST_80BIT_CUSTOM_PATTERN_7_0)+1);
1665 }
1666
1667 static void dp_test_send_link_test_pattern(struct core_link *link)
1668 {
1669 union link_test_pattern dpcd_test_pattern;
1670 union test_misc dpcd_test_params;
1671 enum dp_test_pattern test_pattern;
1672
1673 memset(&dpcd_test_pattern, 0, sizeof(dpcd_test_pattern));
1674 memset(&dpcd_test_params, 0, sizeof(dpcd_test_params));
1675
1676 /* get link test pattern and pattern parameters */
1677 core_link_read_dpcd(
1678 link,
1679 DPCD_ADDRESS_TEST_PATTERN,
1680 &dpcd_test_pattern.raw,
1681 sizeof(dpcd_test_pattern));
1682 core_link_read_dpcd(
1683 link,
1684 DPCD_ADDRESS_TEST_MISC1,
1685 &dpcd_test_params.raw,
1686 sizeof(dpcd_test_params));
1687
1688 switch (dpcd_test_pattern.bits.PATTERN) {
1689 case LINK_TEST_PATTERN_COLOR_RAMP:
1690 test_pattern = DP_TEST_PATTERN_COLOR_RAMP;
1691 break;
1692 case LINK_TEST_PATTERN_VERTICAL_BARS:
1693 test_pattern = DP_TEST_PATTERN_VERTICAL_BARS;
1694 break; /* black and white */
1695 case LINK_TEST_PATTERN_COLOR_SQUARES:
1696 test_pattern = (dpcd_test_params.bits.DYN_RANGE ==
1697 TEST_DYN_RANGE_VESA ?
1698 DP_TEST_PATTERN_COLOR_SQUARES :
1699 DP_TEST_PATTERN_COLOR_SQUARES_CEA);
1700 break;
1701 default:
1702 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
1703 break;
1704 }
1705
1706 dc_link_dp_set_test_pattern(
1707 &link->public,
1708 test_pattern,
1709 NULL,
1710 NULL,
1711 0);
1712 }
1713
1714 static void handle_automated_test(struct core_link *link)
1715 {
1716 union test_request test_request;
1717 union test_response test_response;
1718
1719 memset(&test_request, 0, sizeof(test_request));
1720 memset(&test_response, 0, sizeof(test_response));
1721
1722 core_link_read_dpcd(
1723 link,
1724 DPCD_ADDRESS_TEST_REQUEST,
1725 &test_request.raw,
1726 sizeof(union test_request));
1727 if (test_request.bits.LINK_TRAINING) {
1728 /* ACK first to let DP RX test box monitor LT sequence */
1729 test_response.bits.ACK = 1;
1730 core_link_write_dpcd(
1731 link,
1732 DPCD_ADDRESS_TEST_RESPONSE,
1733 &test_response.raw,
1734 sizeof(test_response));
1735 dp_test_send_link_training(link);
1736 /* no acknowledge request is needed again */
1737 test_response.bits.ACK = 0;
1738 }
1739 if (test_request.bits.LINK_TEST_PATTRN) {
1740 dp_test_send_link_test_pattern(link);
1741 link->public.compliance_test_state.bits.
1742 SET_TEST_PATTERN_PENDING = 1;
1743 }
1744 if (test_request.bits.PHY_TEST_PATTERN) {
1745 dp_test_send_phy_test_pattern(link);
1746 test_response.bits.ACK = 1;
1747 }
1748 if (!test_request.raw)
1749 /* no requests, revert all test signals
1750 * TODO: revert all test signals
1751 */
1752 test_response.bits.ACK = 1;
1753 /* send request acknowledgment */
1754 if (test_response.bits.ACK)
1755 core_link_write_dpcd(
1756 link,
1757 DPCD_ADDRESS_TEST_RESPONSE,
1758 &test_response.raw,
1759 sizeof(test_response));
1760 }
1761
1762 bool dc_link_handle_hpd_rx_irq(const struct dc_link *dc_link)
1763 {
1764 struct core_link *link = DC_LINK_TO_LINK(dc_link);
1765 union hpd_irq_data hpd_irq_dpcd_data = {{{{0}}}};
1766 union device_service_irq device_service_clear = {0};
1767 enum dc_status result = DDC_RESULT_UNKNOWN;
1768 bool status = false;
1769 /* For use cases related to down stream connection status change,
1770 * PSR and device auto test, refer to function handle_sst_hpd_irq
1771 * in DAL2.1*/
1772
1773 dm_logger_write(link->ctx->logger, LOG_HW_HPD_IRQ,
1774 "%s: Got short pulse HPD on link %d\n",
1775 __func__, link->public.link_index);
1776
1777 /* All the "handle_hpd_irq_xxx()" methods
1778 * should be called only after
1779 * dal_dpsst_ls_read_hpd_irq_data
1780 * Order of calls is important too
1781 */
1782 result = read_hpd_rx_irq_data(link, &hpd_irq_dpcd_data);
1783
1784 if (result != DC_OK) {
1785 dm_logger_write(link->ctx->logger, LOG_HW_HPD_IRQ,
1786 "%s: DPCD read failed to obtain irq data\n",
1787 __func__);
1788 return false;
1789 }
1790
1791 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.AUTOMATED_TEST) {
1792 device_service_clear.bits.AUTOMATED_TEST = 1;
1793 core_link_write_dpcd(
1794 link,
1795 DPCD_ADDRESS_DEVICE_SERVICE_IRQ_VECTOR,
1796 &device_service_clear.raw,
1797 sizeof(device_service_clear.raw));
1798 device_service_clear.raw = 0;
1799 handle_automated_test(link);
1800 return false;
1801 }
1802
1803 if (!allow_hpd_rx_irq(link)) {
1804 dm_logger_write(link->ctx->logger, LOG_HW_HPD_IRQ,
1805 "%s: skipping HPD handling on %d\n",
1806 __func__, link->public.link_index);
1807 return false;
1808 }
1809
1810 if (handle_hpd_irq_psr_sink(link))
1811 /* PSR-related error was detected and handled */
1812 return true;
1813
1814 /* If PSR-related error handled, Main link may be off,
1815 * so do not handle as a normal sink status change interrupt.
1816 */
1817
1818 /* check if we have MST msg and return since we poll for it */
1819 if (hpd_irq_dpcd_data.bytes.device_service_irq.
1820 bits.DOWN_REP_MSG_RDY ||
1821 hpd_irq_dpcd_data.bytes.device_service_irq.
1822 bits.UP_REQ_MSG_RDY)
1823 return false;
1824
1825 /* For now we only handle 'Downstream port status' case.
1826 * If we got sink count changed it means
1827 * Downstream port status changed,
1828 * then DM should call DC to do the detection. */
1829 if (hpd_rx_irq_check_link_loss_status(
1830 link,
1831 &hpd_irq_dpcd_data)) {
1832 /* Connectivity log: link loss */
1833 CONN_DATA_LINK_LOSS(link,
1834 hpd_irq_dpcd_data.raw,
1835 sizeof(hpd_irq_dpcd_data),
1836 "Status: ");
1837
1838 perform_link_training_with_retries(link,
1839 &link->public.cur_link_settings,
1840 true, LINK_TRAINING_ATTEMPTS);
1841
1842 status = false;
1843 }
1844
1845 if (link->public.type == dc_connection_active_dongle &&
1846 hpd_irq_dpcd_data.bytes.sink_cnt.bits.SINK_COUNT
1847 != link->dpcd_sink_count)
1848 status = true;
1849
1850 /* reasons for HPD RX:
1851 * 1. Link Loss - ie Re-train the Link
1852 * 2. MST sideband message
1853 * 3. Automated Test - ie. Internal Commit
1854 * 4. CP (copy protection) - (not interesting for DM???)
1855 * 5. DRR
1856 * 6. Downstream Port status changed
1857 * -ie. Detect - this the only one
1858 * which is interesting for DM because
1859 * it must call dc_link_detect.
1860 */
1861 return status;
1862 }
1863
1864 /*query dpcd for version and mst cap addresses*/
1865 bool is_mst_supported(struct core_link *link)
1866 {
1867 bool mst = false;
1868 enum dc_status st = DC_OK;
1869 union dpcd_rev rev;
1870 union mstm_cap cap;
1871
1872 rev.raw = 0;
1873 cap.raw = 0;
1874
1875 st = core_link_read_dpcd(link, DPCD_ADDRESS_DPCD_REV, &rev.raw,
1876 sizeof(rev));
1877
1878 if (st == DC_OK && rev.raw >= DPCD_REV_12) {
1879
1880 st = core_link_read_dpcd(link, DPCD_ADDRESS_MSTM_CAP,
1881 &cap.raw, sizeof(cap));
1882 if (st == DC_OK && cap.bits.MST_CAP == 1)
1883 mst = true;
1884 }
1885 return mst;
1886
1887 }
1888
1889 bool is_dp_active_dongle(const struct core_link *link)
1890 {
1891 enum display_dongle_type dongle_type = link->dpcd_caps.dongle_type;
1892
1893 return (dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) ||
1894 (dongle_type == DISPLAY_DONGLE_DP_DVI_CONVERTER) ||
1895 (dongle_type == DISPLAY_DONGLE_DP_HDMI_CONVERTER);
1896 }
1897
1898 static void get_active_converter_info(
1899 uint8_t data, struct core_link *link)
1900 {
1901 union dp_downstream_port_present ds_port = { .byte = data };
1902
1903 /* decode converter info*/
1904 if (!ds_port.fields.PORT_PRESENT) {
1905 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
1906 ddc_service_set_dongle_type(link->ddc,
1907 link->dpcd_caps.dongle_type);
1908 return;
1909 }
1910
1911 switch (ds_port.fields.PORT_TYPE) {
1912 case DOWNSTREAM_VGA:
1913 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_VGA_CONVERTER;
1914 break;
1915 case DOWNSTREAM_DVI_HDMI:
1916 /* At this point we don't know is it DVI or HDMI,
1917 * assume DVI.*/
1918 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_DVI_CONVERTER;
1919 break;
1920 default:
1921 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
1922 break;
1923 }
1924
1925 if (link->dpcd_caps.dpcd_rev.raw >= DCS_DPCD_REV_11) {
1926 uint8_t det_caps[4];
1927 union dwnstream_port_caps_byte0 *port_caps =
1928 (union dwnstream_port_caps_byte0 *)det_caps;
1929 core_link_read_dpcd(link, DPCD_ADDRESS_DWN_STRM_PORT0_CAPS,
1930 det_caps, sizeof(det_caps));
1931
1932 switch (port_caps->bits.DWN_STRM_PORTX_TYPE) {
1933 case DOWN_STREAM_DETAILED_VGA:
1934 link->dpcd_caps.dongle_type =
1935 DISPLAY_DONGLE_DP_VGA_CONVERTER;
1936 break;
1937 case DOWN_STREAM_DETAILED_DVI:
1938 link->dpcd_caps.dongle_type =
1939 DISPLAY_DONGLE_DP_DVI_CONVERTER;
1940 break;
1941 case DOWN_STREAM_DETAILED_HDMI:
1942 link->dpcd_caps.dongle_type =
1943 DISPLAY_DONGLE_DP_HDMI_CONVERTER;
1944
1945 if (ds_port.fields.DETAILED_CAPS) {
1946
1947 union dwnstream_port_caps_byte3_hdmi
1948 hdmi_caps = {.raw = det_caps[3] };
1949
1950 link->dpcd_caps.is_dp_hdmi_s3d_converter =
1951 hdmi_caps.bits.FRAME_SEQ_TO_FRAME_PACK;
1952 }
1953 break;
1954 }
1955 }
1956
1957 ddc_service_set_dongle_type(link->ddc, link->dpcd_caps.dongle_type);
1958
1959 {
1960 struct dp_device_vendor_id dp_id;
1961
1962 /* read IEEE branch device id */
1963 core_link_read_dpcd(
1964 link,
1965 DPCD_ADDRESS_BRANCH_DEVICE_ID_START,
1966 (uint8_t *)&dp_id,
1967 sizeof(dp_id));
1968
1969 link->dpcd_caps.branch_dev_id =
1970 (dp_id.ieee_oui[0] << 16) +
1971 (dp_id.ieee_oui[1] << 8) +
1972 dp_id.ieee_oui[2];
1973
1974 memmove(
1975 link->dpcd_caps.branch_dev_name,
1976 dp_id.ieee_device_id,
1977 sizeof(dp_id.ieee_device_id));
1978 }
1979
1980 {
1981 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
1982
1983 core_link_read_dpcd(
1984 link,
1985 DPCD_ADDRESS_BRANCH_REVISION_START,
1986 (uint8_t *)&dp_hw_fw_revision,
1987 sizeof(dp_hw_fw_revision));
1988
1989 link->dpcd_caps.branch_hw_revision =
1990 dp_hw_fw_revision.ieee_hw_rev;
1991 }
1992 }
1993
1994 static void dp_wa_power_up_0010FA(struct core_link *link, uint8_t *dpcd_data,
1995 int length)
1996 {
1997 int retry = 0;
1998 union dp_downstream_port_present ds_port = { 0 };
1999
2000 if (!link->dpcd_caps.dpcd_rev.raw) {
2001 do {
2002 dp_receiver_power_ctrl(link, true);
2003 core_link_read_dpcd(link, DPCD_ADDRESS_DPCD_REV,
2004 dpcd_data, length);
2005 link->dpcd_caps.dpcd_rev.raw = dpcd_data[
2006 DPCD_ADDRESS_DPCD_REV -
2007 DPCD_ADDRESS_DPCD_REV];
2008 } while (retry++ < 4 && !link->dpcd_caps.dpcd_rev.raw);
2009 }
2010
2011 ds_port.byte = dpcd_data[DPCD_ADDRESS_DOWNSTREAM_PORT_PRESENT -
2012 DPCD_ADDRESS_DPCD_REV];
2013
2014 if (link->dpcd_caps.dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) {
2015 switch (link->dpcd_caps.branch_dev_id) {
2016 /* Some active dongles (DP-VGA, DP-DLDVI converters) power down
2017 * all internal circuits including AUX communication preventing
2018 * reading DPCD table and EDID (spec violation).
2019 * Encoder will skip DP RX power down on disable_output to
2020 * keep receiver powered all the time.*/
2021 case DP_BRANCH_DEVICE_ID_1:
2022 case DP_BRANCH_DEVICE_ID_4:
2023 link->wa_flags.dp_keep_receiver_powered = true;
2024 break;
2025
2026 /* TODO: May need work around for other dongles. */
2027 default:
2028 link->wa_flags.dp_keep_receiver_powered = false;
2029 break;
2030 }
2031 } else
2032 link->wa_flags.dp_keep_receiver_powered = false;
2033 }
2034
2035 static void retrieve_psr_link_cap(struct core_link *link,
2036 enum edp_revision edp_revision)
2037 {
2038 if (edp_revision >= EDP_REVISION_13) {
2039 core_link_read_dpcd(link,
2040 DPCD_ADDRESS_PSR_SUPPORT_VER,
2041 (uint8_t *)(&link->public.psr_caps),
2042 sizeof(link->public.psr_caps));
2043 if (link->public.psr_caps.psr_version != 0) {
2044 unsigned char psr_capability = 0;
2045
2046 core_link_read_dpcd(link,
2047 DPCD_ADDRESS_PSR_CAPABILITY,
2048 &psr_capability,
2049 sizeof(psr_capability));
2050 /* Bit 0 determines whether fast link training is
2051 * required on PSR exit. If set to 0, link training
2052 * is required. If set to 1, sink must lock within
2053 * five Idle Patterns after Main Link is turned on.
2054 */
2055 link->public.psr_caps.psr_exit_link_training_required
2056 = !(psr_capability & 0x1);
2057
2058 psr_capability = (psr_capability >> 1) & 0x7;
2059 link->public.psr_caps.psr_rfb_setup_time =
2060 55 * (6 - psr_capability);
2061 }
2062 }
2063 }
2064
2065 static void retrieve_link_cap(struct core_link *link)
2066 {
2067 uint8_t dpcd_data[DPCD_ADDRESS_TRAINING_AUX_RD_INTERVAL - DPCD_ADDRESS_DPCD_REV + 1];
2068
2069 union down_stream_port_count down_strm_port_count;
2070 union edp_configuration_cap edp_config_cap;
2071 union dp_downstream_port_present ds_port = { 0 };
2072
2073 memset(dpcd_data, '\0', sizeof(dpcd_data));
2074 memset(&down_strm_port_count,
2075 '\0', sizeof(union down_stream_port_count));
2076 memset(&edp_config_cap, '\0',
2077 sizeof(union edp_configuration_cap));
2078
2079 core_link_read_dpcd(
2080 link,
2081 DPCD_ADDRESS_DPCD_REV,
2082 dpcd_data,
2083 sizeof(dpcd_data));
2084
2085 link->dpcd_caps.dpcd_rev.raw =
2086 dpcd_data[DPCD_ADDRESS_DPCD_REV - DPCD_ADDRESS_DPCD_REV];
2087
2088 {
2089 union training_aux_rd_interval aux_rd_interval;
2090
2091 aux_rd_interval.raw =
2092 dpcd_data[DPCD_ADDRESS_TRAINING_AUX_RD_INTERVAL];
2093
2094 if (aux_rd_interval.bits.EXT_RECIEVER_CAP_FIELD_PRESENT == 1) {
2095 core_link_read_dpcd(
2096 link,
2097 DPCD_ADDRESS_DP13_DPCD_REV,
2098 dpcd_data,
2099 sizeof(dpcd_data));
2100 }
2101 }
2102
2103 ds_port.byte = dpcd_data[DPCD_ADDRESS_DOWNSTREAM_PORT_PRESENT -
2104 DPCD_ADDRESS_DPCD_REV];
2105
2106 get_active_converter_info(ds_port.byte, link);
2107
2108 dp_wa_power_up_0010FA(link, dpcd_data, sizeof(dpcd_data));
2109
2110 link->dpcd_caps.allow_invalid_MSA_timing_param =
2111 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
2112
2113 link->dpcd_caps.max_ln_count.raw = dpcd_data[
2114 DPCD_ADDRESS_MAX_LANE_COUNT - DPCD_ADDRESS_DPCD_REV];
2115
2116 link->dpcd_caps.max_down_spread.raw = dpcd_data[
2117 DPCD_ADDRESS_MAX_DOWNSPREAD - DPCD_ADDRESS_DPCD_REV];
2118
2119 link->public.reported_link_cap.lane_count =
2120 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
2121 link->public.reported_link_cap.link_rate = dpcd_data[
2122 DPCD_ADDRESS_MAX_LINK_RATE - DPCD_ADDRESS_DPCD_REV];
2123 link->public.reported_link_cap.link_spread =
2124 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
2125 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
2126
2127 edp_config_cap.raw = dpcd_data[
2128 DPCD_ADDRESS_EDP_CONFIG_CAP - DPCD_ADDRESS_DPCD_REV];
2129 link->dpcd_caps.panel_mode_edp =
2130 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
2131
2132 link->edp_revision = DPCD_EDP_REVISION_EDP_UNKNOWN;
2133
2134 link->public.test_pattern_enabled = false;
2135 link->public.compliance_test_state.raw = 0;
2136
2137 link->public.psr_caps.psr_exit_link_training_required = false;
2138 link->public.psr_caps.psr_frame_capture_indication_req = false;
2139 link->public.psr_caps.psr_rfb_setup_time = 0;
2140 link->public.psr_caps.psr_sdp_transmit_line_num_deadline = 0;
2141 link->public.psr_caps.psr_version = 0;
2142
2143 /* read sink count */
2144 core_link_read_dpcd(link,
2145 DPCD_ADDRESS_SINK_COUNT,
2146 &link->dpcd_caps.sink_count.raw,
2147 sizeof(link->dpcd_caps.sink_count.raw));
2148
2149 /* Display control registers starting at DPCD 700h are only valid and
2150 * enabled if this eDP config cap bit is set. */
2151 if (edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE) {
2152 /* Read the Panel's eDP revision at DPCD 700h. */
2153 core_link_read_dpcd(link,
2154 DPCD_ADDRESS_EDP_REV,
2155 (uint8_t *)(&link->edp_revision),
2156 sizeof(link->edp_revision));
2157 }
2158
2159 /* Connectivity log: detection */
2160 CONN_DATA_DETECT(link, dpcd_data, sizeof(dpcd_data), "Rx Caps: ");
2161
2162 /* TODO: Confirm if need retrieve_psr_link_cap */
2163 retrieve_psr_link_cap(link, link->edp_revision);
2164 }
2165
2166 void detect_dp_sink_caps(struct core_link *link)
2167 {
2168 retrieve_link_cap(link);
2169
2170 /* dc init_hw has power encoder using default
2171 * signal for connector. For native DP, no
2172 * need to power up encoder again. If not native
2173 * DP, hw_init may need check signal or power up
2174 * encoder here.
2175 */
2176
2177 if (is_mst_supported(link)) {
2178 link->public.verified_link_cap = link->public.reported_link_cap;
2179 } else {
2180 dp_hbr_verify_link_cap(link,
2181 &link->public.reported_link_cap);
2182 }
2183 /* TODO save sink caps in link->sink */
2184 }
2185
2186 void dc_link_dp_enable_hpd(const struct dc_link *link)
2187 {
2188 struct core_link *core_link = DC_LINK_TO_CORE(link);
2189 struct link_encoder *encoder = core_link->link_enc;
2190
2191 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
2192 encoder->funcs->enable_hpd(encoder);
2193 }
2194
2195 void dc_link_dp_disable_hpd(const struct dc_link *link)
2196 {
2197 struct core_link *core_link = DC_LINK_TO_CORE(link);
2198 struct link_encoder *encoder = core_link->link_enc;
2199
2200 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
2201 encoder->funcs->disable_hpd(encoder);
2202 }
2203
2204 static bool is_dp_phy_pattern(enum dp_test_pattern test_pattern)
2205 {
2206 if (test_pattern == DP_TEST_PATTERN_D102 ||
2207 test_pattern == DP_TEST_PATTERN_SYMBOL_ERROR ||
2208 test_pattern == DP_TEST_PATTERN_PRBS7 ||
2209 test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM ||
2210 test_pattern == DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE ||
2211 test_pattern == DP_TEST_PATTERN_TRAINING_PATTERN1 ||
2212 test_pattern == DP_TEST_PATTERN_TRAINING_PATTERN2 ||
2213 test_pattern == DP_TEST_PATTERN_TRAINING_PATTERN3 ||
2214 test_pattern == DP_TEST_PATTERN_TRAINING_PATTERN4 ||
2215 test_pattern == DP_TEST_PATTERN_VIDEO_MODE)
2216 return true;
2217 else
2218 return false;
2219 }
2220
2221 static void set_crtc_test_pattern(struct core_link *link,
2222 struct pipe_ctx *pipe_ctx,
2223 enum dp_test_pattern test_pattern)
2224 {
2225 enum controller_dp_test_pattern controller_test_pattern;
2226 enum dc_color_depth color_depth = pipe_ctx->
2227 stream->public.timing.display_color_depth;
2228 struct bit_depth_reduction_params params;
2229
2230 memset(&params, 0, sizeof(params));
2231
2232 switch (test_pattern) {
2233 case DP_TEST_PATTERN_COLOR_SQUARES:
2234 controller_test_pattern =
2235 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES;
2236 break;
2237 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
2238 controller_test_pattern =
2239 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES_CEA;
2240 break;
2241 case DP_TEST_PATTERN_VERTICAL_BARS:
2242 controller_test_pattern =
2243 CONTROLLER_DP_TEST_PATTERN_VERTICALBARS;
2244 break;
2245 case DP_TEST_PATTERN_HORIZONTAL_BARS:
2246 controller_test_pattern =
2247 CONTROLLER_DP_TEST_PATTERN_HORIZONTALBARS;
2248 break;
2249 case DP_TEST_PATTERN_COLOR_RAMP:
2250 controller_test_pattern =
2251 CONTROLLER_DP_TEST_PATTERN_COLORRAMP;
2252 break;
2253 default:
2254 controller_test_pattern =
2255 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE;
2256 break;
2257 }
2258
2259 switch (test_pattern) {
2260 case DP_TEST_PATTERN_COLOR_SQUARES:
2261 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
2262 case DP_TEST_PATTERN_VERTICAL_BARS:
2263 case DP_TEST_PATTERN_HORIZONTAL_BARS:
2264 case DP_TEST_PATTERN_COLOR_RAMP:
2265 {
2266 /* disable bit depth reduction */
2267 pipe_ctx->stream->bit_depth_params = params;
2268 pipe_ctx->opp->funcs->
2269 opp_program_bit_depth_reduction(pipe_ctx->opp, &params);
2270
2271 pipe_ctx->tg->funcs->set_test_pattern(pipe_ctx->tg,
2272 controller_test_pattern, color_depth);
2273 }
2274 break;
2275 case DP_TEST_PATTERN_VIDEO_MODE:
2276 {
2277 /* restore bitdepth reduction */
2278 link->dc->current_context->res_ctx.pool->funcs->
2279 build_bit_depth_reduction_params(pipe_ctx->stream,
2280 &params);
2281 pipe_ctx->stream->bit_depth_params = params;
2282 pipe_ctx->opp->funcs->
2283 opp_program_bit_depth_reduction(pipe_ctx->opp, &params);
2284
2285 pipe_ctx->tg->funcs->set_test_pattern(pipe_ctx->tg,
2286 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
2287 color_depth);
2288 }
2289 break;
2290
2291 default:
2292 break;
2293 }
2294 }
2295
2296 bool dc_link_dp_set_test_pattern(
2297 const struct dc_link *link,
2298 enum dp_test_pattern test_pattern,
2299 const struct link_training_settings *p_link_settings,
2300 const unsigned char *p_custom_pattern,
2301 unsigned int cust_pattern_size)
2302 {
2303 struct core_link *core_link = DC_LINK_TO_CORE(link);
2304 struct pipe_ctx *pipes =
2305 core_link->dc->current_context->res_ctx.pipe_ctx;
2306 struct pipe_ctx pipe_ctx = pipes[0];
2307 unsigned int lane;
2308 unsigned int i;
2309 unsigned char link_qual_pattern[LANE_COUNT_DP_MAX] = {0};
2310 union dpcd_training_pattern training_pattern;
2311 union test_response test_response;
2312 enum dpcd_phy_test_patterns pattern;
2313
2314 memset(&training_pattern, 0, sizeof(training_pattern));
2315 memset(&test_response, 0, sizeof(test_response));
2316
2317 for (i = 0; i < MAX_PIPES; i++) {
2318 if (pipes[i].stream->sink->link == core_link) {
2319 pipe_ctx = pipes[i];
2320 break;
2321 }
2322 }
2323
2324 /* Reset CRTC Test Pattern if it is currently running and request
2325 * is VideoMode Reset DP Phy Test Pattern if it is currently running
2326 * and request is VideoMode
2327 */
2328 if (core_link->public.test_pattern_enabled && test_pattern ==
2329 DP_TEST_PATTERN_VIDEO_MODE) {
2330 /* Set CRTC Test Pattern */
2331 set_crtc_test_pattern(core_link, &pipe_ctx, test_pattern);
2332 dp_set_hw_test_pattern(core_link, test_pattern,
2333 (uint8_t *)p_custom_pattern,
2334 (uint32_t)cust_pattern_size);
2335
2336 /* Unblank Stream */
2337 core_link->dc->hwss.unblank_stream(
2338 &pipe_ctx,
2339 &core_link->public.verified_link_cap);
2340 /* TODO:m_pHwss->MuteAudioEndpoint
2341 * (pPathMode->pDisplayPath, false);
2342 */
2343
2344 /* Reset Test Pattern state */
2345 core_link->public.test_pattern_enabled = false;
2346
2347 return true;
2348 }
2349
2350 /* Check for PHY Test Patterns */
2351 if (is_dp_phy_pattern(test_pattern)) {
2352 /* Set DPCD Lane Settings before running test pattern */
2353 if (p_link_settings != NULL) {
2354 dp_set_hw_lane_settings(core_link, p_link_settings);
2355 dpcd_set_lane_settings(core_link, p_link_settings);
2356 }
2357
2358 /* Blank stream if running test pattern */
2359 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
2360 /*TODO:
2361 * m_pHwss->
2362 * MuteAudioEndpoint(pPathMode->pDisplayPath, true);
2363 */
2364 /* Blank stream */
2365 pipes->stream_enc->funcs->dp_blank(pipe_ctx.stream_enc);
2366 }
2367
2368 dp_set_hw_test_pattern(core_link, test_pattern,
2369 (uint8_t *)p_custom_pattern,
2370 (uint32_t)cust_pattern_size);
2371
2372 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
2373 /* Set Test Pattern state */
2374 core_link->public.test_pattern_enabled = true;
2375 if (p_link_settings != NULL)
2376 dpcd_set_link_settings(core_link,
2377 p_link_settings);
2378 }
2379
2380 switch (test_pattern) {
2381 case DP_TEST_PATTERN_VIDEO_MODE:
2382 pattern = PHY_TEST_PATTERN_NONE;
2383 break;
2384 case DP_TEST_PATTERN_D102:
2385 pattern = PHY_TEST_PATTERN_D10_2;
2386 break;
2387 case DP_TEST_PATTERN_SYMBOL_ERROR:
2388 pattern = PHY_TEST_PATTERN_SYMBOL_ERROR;
2389 break;
2390 case DP_TEST_PATTERN_PRBS7:
2391 pattern = PHY_TEST_PATTERN_PRBS7;
2392 break;
2393 case DP_TEST_PATTERN_80BIT_CUSTOM:
2394 pattern = PHY_TEST_PATTERN_80BIT_CUSTOM;
2395 break;
2396 case DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE:
2397 pattern = PHY_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
2398 break;
2399 default:
2400 return false;
2401 }
2402
2403 if (test_pattern == DP_TEST_PATTERN_VIDEO_MODE
2404 /*TODO:&& !pPathMode->pDisplayPath->IsTargetPoweredOn()*/)
2405 return false;
2406
2407 if (core_link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
2408 /* tell receiver that we are sending qualification
2409 * pattern DP 1.2 or later - DP receiver's link quality
2410 * pattern is set using DPCD LINK_QUAL_LANEx_SET
2411 * register (0x10B~0x10E)\
2412 */
2413 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++)
2414 link_qual_pattern[lane] =
2415 (unsigned char)(pattern);
2416
2417 core_link_write_dpcd(core_link,
2418 DPCD_ADDRESS_LINK_QUAL_LANE0_SET,
2419 link_qual_pattern,
2420 sizeof(link_qual_pattern));
2421 } else if (core_link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_10 ||
2422 core_link->dpcd_caps.dpcd_rev.raw == 0) {
2423 /* tell receiver that we are sending qualification
2424 * pattern DP 1.1a or earlier - DP receiver's link
2425 * quality pattern is set using
2426 * DPCD TRAINING_PATTERN_SET -> LINK_QUAL_PATTERN_SET
2427 * register (0x102). We will use v_1.3 when we are
2428 * setting test pattern for DP 1.1.
2429 */
2430 core_link_read_dpcd(core_link,
2431 DPCD_ADDRESS_TRAINING_PATTERN_SET,
2432 &training_pattern.raw,
2433 sizeof(training_pattern));
2434 training_pattern.v1_3.LINK_QUAL_PATTERN_SET = pattern;
2435 core_link_write_dpcd(core_link,
2436 DPCD_ADDRESS_TRAINING_PATTERN_SET,
2437 &training_pattern.raw,
2438 sizeof(training_pattern));
2439 }
2440 } else {
2441 /* CRTC Patterns */
2442 set_crtc_test_pattern(core_link, &pipe_ctx, test_pattern);
2443 /* Set Test Pattern state */
2444 core_link->public.test_pattern_enabled = true;
2445
2446 /* If this is called because of compliance test request,
2447 * we respond ack here.
2448 */
2449 if (core_link->public.compliance_test_state.bits.
2450 SET_TEST_PATTERN_PENDING == 1) {
2451 core_link->public.compliance_test_state.bits.
2452 SET_TEST_PATTERN_PENDING = 0;
2453 test_response.bits.ACK = 1;
2454 core_link_write_dpcd(core_link,
2455 DPCD_ADDRESS_TEST_RESPONSE,
2456 &test_response.raw,
2457 sizeof(test_response));
2458 }
2459 }
2460
2461 return true;
2462 }