]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
drm/amdgpu/display: fix semicolon.cocci warnings
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / amd / display / dc / dce / dce_clocks.c
CommitLineData
9a70eba7
DL
1/*
2 * Copyright 2012-16 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26#include "dce_clocks.h"
27#include "dm_services.h"
28#include "reg_helper.h"
29#include "fixed32_32.h"
30#include "bios_parser_interface.h"
31#include "dc.h"
ff5ef992
AD
32#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
33#include "dcn_calcs.h"
34#include "core_dc.h"
35#endif
9a70eba7 36
e11b86ad
DL
37#define TO_DCE_CLOCKS(clocks)\
38 container_of(clocks, struct dce_disp_clk, base)
9a70eba7
DL
39
40#define REG(reg) \
41 (clk_dce->regs->reg)
42
43#undef FN
44#define FN(reg_name, field_name) \
45 clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name
46
47#define CTX \
48 clk_dce->base.ctx
49
e11b86ad
DL
50/* Max clock values for each state indexed by "enum clocks_state": */
51static struct state_dependent_clocks dce80_max_clks_by_state[] = {
52/* ClocksStateInvalid - should not be used */
53{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
54/* ClocksStateUltraLow - not expected to be used for DCE 8.0 */
55{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
56/* ClocksStateLow */
57{ .display_clk_khz = 352000, .pixel_clk_khz = 330000},
58/* ClocksStateNominal */
59{ .display_clk_khz = 600000, .pixel_clk_khz = 400000 },
60/* ClocksStatePerformance */
61{ .display_clk_khz = 600000, .pixel_clk_khz = 400000 } };
62
63static struct state_dependent_clocks dce110_max_clks_by_state[] = {
64/*ClocksStateInvalid - should not be used*/
65{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
66/*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
67{ .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
68/*ClocksStateLow*/
69{ .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
70/*ClocksStateNominal*/
71{ .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
72/*ClocksStatePerformance*/
73{ .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
74
75static struct state_dependent_clocks dce112_max_clks_by_state[] = {
76/*ClocksStateInvalid - should not be used*/
77{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
78/*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
79{ .display_clk_khz = 389189, .pixel_clk_khz = 346672 },
80/*ClocksStateLow*/
81{ .display_clk_khz = 459000, .pixel_clk_khz = 400000 },
82/*ClocksStateNominal*/
83{ .display_clk_khz = 667000, .pixel_clk_khz = 600000 },
84/*ClocksStatePerformance*/
85{ .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } };
86
2c8ad2d5
AD
87static struct state_dependent_clocks dce120_max_clks_by_state[] = {
88/*ClocksStateInvalid - should not be used*/
89{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
90/*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
91{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
92/*ClocksStateLow*/
93{ .display_clk_khz = 460000, .pixel_clk_khz = 400000 },
94/*ClocksStateNominal*/
95{ .display_clk_khz = 670000, .pixel_clk_khz = 600000 },
96/*ClocksStatePerformance*/
97{ .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } };
2c8ad2d5 98
9a70eba7 99/* Starting point for each divider range.*/
e11b86ad 100enum dce_divider_range_start {
9a70eba7
DL
101 DIVIDER_RANGE_01_START = 200, /* 2.00*/
102 DIVIDER_RANGE_02_START = 1600, /* 16.00*/
103 DIVIDER_RANGE_03_START = 3200, /* 32.00*/
104 DIVIDER_RANGE_SCALE_FACTOR = 100 /* Results are scaled up by 100.*/
105};
106
107/* Ranges for divider identifiers (Divider ID or DID)
108 mmDENTIST_DISPCLK_CNTL.DENTIST_DISPCLK_WDIVIDER*/
e11b86ad 109enum dce_divider_id_register_setting {
9a70eba7
DL
110 DIVIDER_RANGE_01_BASE_DIVIDER_ID = 0X08,
111 DIVIDER_RANGE_02_BASE_DIVIDER_ID = 0X40,
112 DIVIDER_RANGE_03_BASE_DIVIDER_ID = 0X60,
113 DIVIDER_RANGE_MAX_DIVIDER_ID = 0X80
114};
115
116/* Step size between each divider within a range.
117 Incrementing the DENTIST_DISPCLK_WDIVIDER by one
118 will increment the divider by this much.*/
e11b86ad 119enum dce_divider_range_step_size {
9a70eba7
DL
120 DIVIDER_RANGE_01_STEP_SIZE = 25, /* 0.25*/
121 DIVIDER_RANGE_02_STEP_SIZE = 50, /* 0.50*/
122 DIVIDER_RANGE_03_STEP_SIZE = 100 /* 1.00 */
123};
124
e11b86ad
DL
125static bool dce_divider_range_construct(
126 struct dce_divider_range *div_range,
127 int range_start,
128 int range_step,
129 int did_min,
130 int did_max)
131{
132 div_range->div_range_start = range_start;
133 div_range->div_range_step = range_step;
134 div_range->did_min = did_min;
135 div_range->did_max = did_max;
136
137 if (div_range->div_range_step == 0) {
138 div_range->div_range_step = 1;
139 /*div_range_step cannot be zero*/
140 BREAK_TO_DEBUGGER();
141 }
142 /* Calculate this based on the other inputs.*/
143 /* See DividerRange.h for explanation of */
144 /* the relationship between divider id (DID) and a divider.*/
145 /* Number of Divider IDs = (Maximum Divider ID - Minimum Divider ID)*/
146 /* Maximum divider identified in this range =
147 * (Number of Divider IDs)*Step size between dividers
148 * + The start of this range.*/
149 div_range->div_range_end = (did_max - did_min) * range_step
150 + range_start;
151 return true;
152}
153
154static int dce_divider_range_calc_divider(
155 struct dce_divider_range *div_range,
156 int did)
157{
158 /* Is this DID within our range?*/
159 if ((did < div_range->did_min) || (did >= div_range->did_max))
160 return INVALID_DIVIDER;
161
162 return ((did - div_range->did_min) * div_range->div_range_step)
163 + div_range->div_range_start;
164
165}
166
e11b86ad
DL
167static int dce_divider_range_get_divider(
168 struct dce_divider_range *div_range,
169 int ranges_num,
170 int did)
171{
172 int div = INVALID_DIVIDER;
173 int i;
9a70eba7 174
e11b86ad
DL
175 for (i = 0; i < ranges_num; i++) {
176 /* Calculate divider with given divider ID*/
177 div = dce_divider_range_calc_divider(&div_range[i], did);
178 /* Found a valid return divider*/
179 if (div != INVALID_DIVIDER)
180 break;
181 }
182 return div;
183}
184
e11b86ad 185static int dce_clocks_get_dp_ref_freq(struct display_clock *clk)
9a70eba7
DL
186{
187 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
188 int dprefclk_wdivider;
189 int dprefclk_src_sel;
190 int dp_ref_clk_khz = 600000;
191 int target_div = INVALID_DIVIDER;
192
193 /* ASSERT DP Reference Clock source is from DFS*/
194 REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel);
195 ASSERT(dprefclk_src_sel == 0);
196
197 /* Read the mmDENTIST_DISPCLK_CNTL to get the currently
198 * programmed DID DENTIST_DPREFCLK_WDIVIDER*/
199 REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider);
200
201 /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/
e11b86ad 202 target_div = dce_divider_range_get_divider(
9a70eba7
DL
203 clk_dce->divider_ranges,
204 DIVIDER_RANGE_MAX,
205 dprefclk_wdivider);
206
207 if (target_div != INVALID_DIVIDER) {
208 /* Calculate the current DFS clock, in kHz.*/
209 dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR
210 * clk_dce->dentist_vco_freq_khz) / target_div;
211 }
212
213 /* SW will adjust DP REF Clock average value for all purposes
214 * (DP DTO / DP Audio DTO and DP GTC)
215 if clock is spread for all cases:
216 -if SS enabled on DP Ref clock and HW de-spreading enabled with SW
217 calculations for DS_INCR/DS_MODULO (this is planned to be default case)
218 -if SS enabled on DP Ref clock and HW de-spreading enabled with HW
219 calculations (not planned to be used, but average clock should still
220 be valid)
221 -if SS enabled on DP Ref clock and HW de-spreading disabled
222 (should not be case with CIK) then SW should program all rates
223 generated according to average value (case as with previous ASICs)
224 */
7d091f7a 225 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
9a70eba7
DL
226 struct fixed32_32 ss_percentage = dal_fixed32_32_div_int(
227 dal_fixed32_32_from_fraction(
7d091f7a
HW
228 clk_dce->dprefclk_ss_percentage,
229 clk_dce->dprefclk_ss_divider), 200);
9a70eba7
DL
230 struct fixed32_32 adj_dp_ref_clk_khz;
231
232 ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one,
233 ss_percentage);
234 adj_dp_ref_clk_khz =
235 dal_fixed32_32_mul_int(
236 ss_percentage,
237 dp_ref_clk_khz);
238 dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz);
239 }
240
241 return dp_ref_clk_khz;
242}
243
244static enum dm_pp_clocks_state dce_get_required_clocks_state(
245 struct display_clock *clk,
246 struct state_dependent_clocks *req_clocks)
247{
248 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
249 int i;
250 enum dm_pp_clocks_state low_req_clk;
251
252 /* Iterate from highest supported to lowest valid state, and update
253 * lowest RequiredState with the lowest state that satisfies
254 * all required clocks
255 */
256 for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
257 if (req_clocks->display_clk_khz >
258 clk_dce->max_clks_by_state[i].display_clk_khz
259 || req_clocks->pixel_clk_khz >
260 clk_dce->max_clks_by_state[i].pixel_clk_khz)
261 break;
262
263 low_req_clk = i + 1;
264 if (low_req_clk > clk->max_clks_state) {
265 dm_logger_write(clk->ctx->logger, LOG_WARNING,
266 "%s: clocks unsupported", __func__);
267 low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
268 }
269
270 return low_req_clk;
271}
272
273static bool dce_clock_set_min_clocks_state(
274 struct display_clock *clk,
275 enum dm_pp_clocks_state clocks_state)
276{
277 struct dm_pp_power_level_change_request level_change_req = {
278 clocks_state };
279
280 if (clocks_state > clk->max_clks_state) {
281 /*Requested state exceeds max supported state.*/
282 dm_logger_write(clk->ctx->logger, LOG_WARNING,
283 "Requested state exceeds max supported state");
284 return false;
285 } else if (clocks_state == clk->cur_min_clks_state) {
286 /*if we're trying to set the same state, we can just return
287 * since nothing needs to be done*/
288 return true;
289 }
290
291 /* get max clock state from PPLIB */
292 if (dm_pp_apply_power_level_change_request(clk->ctx, &level_change_req))
293 clk->cur_min_clks_state = clocks_state;
294
295 return true;
296}
297
298static void dce_set_clock(
299 struct display_clock *clk,
e11b86ad 300 int requested_clk_khz)
9a70eba7
DL
301{
302 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
303 struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
304 struct dc_bios *bp = clk->ctx->dc_bios;
305
306 /* Make sure requested clock isn't lower than minimum threshold*/
307 if (requested_clk_khz > 0)
7d7024ca 308 requested_clk_khz = max(requested_clk_khz,
9a70eba7
DL
309 clk_dce->dentist_vco_freq_khz / 64);
310
311 /* Prepare to program display clock*/
312 pxl_clk_params.target_pixel_clock = requested_clk_khz;
313 pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
314
315 bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
316
317 if (clk_dce->dfs_bypass_enabled) {
318
319 /* Cache the fixed display clock*/
320 clk_dce->dfs_bypass_disp_clk =
321 pxl_clk_params.dfs_bypass_display_clock;
322 }
323
324 /* from power down, we need mark the clock state as ClocksStateNominal
325 * from HWReset, so when resume we will call pplib voltage regulator.*/
326 if (requested_clk_khz == 0)
327 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
328}
329
330#define PSR_SET_WAITLOOP 0x31
331
332union dce110_dmcu_psr_config_data_wait_loop_reg1 {
333 struct {
334 unsigned int wait_loop:16; /* [15:0] */
335 unsigned int reserved:16; /* [31:16] */
336 } bits;
337 unsigned int u32;
338};
339
340static void dce_psr_wait_loop(
341 struct dce_disp_clk *clk_dce, unsigned int display_clk_khz)
342{
343 struct dc_context *ctx = clk_dce->base.ctx;
344 union dce110_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1;
345
346 /* waitDMCUReadyForCmd */
347 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 100);
348
349 masterCmdData1.u32 = 0;
350 masterCmdData1.bits.wait_loop = display_clk_khz / 1000 / 7;
351 dm_write_reg(ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
352
353 /* setDMCUParam_Cmd */
354 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, PSR_SET_WAITLOOP);
355
356 /* notifyDMCUMsg */
357 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
358}
359
360static void dce_psr_set_clock(
361 struct display_clock *clk,
e11b86ad 362 int requested_clk_khz)
9a70eba7
DL
363{
364 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
365
366 dce_set_clock(clk, requested_clk_khz);
367 dce_psr_wait_loop(clk_dce, requested_clk_khz);
368}
369
e11b86ad 370static void dce112_set_clock(
9a70eba7 371 struct display_clock *clk,
e11b86ad 372 int requested_clk_khz)
9a70eba7
DL
373{
374 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
375 struct bp_set_dce_clock_parameters dce_clk_params;
376 struct dc_bios *bp = clk->ctx->dc_bios;
377
378 /* Prepare to program display clock*/
379 memset(&dce_clk_params, 0, sizeof(dce_clk_params));
380
381 /* Make sure requested clock isn't lower than minimum threshold*/
382 if (requested_clk_khz > 0)
7d7024ca 383 requested_clk_khz = max(requested_clk_khz,
e11b86ad 384 clk_dce->dentist_vco_freq_khz / 62);
9a70eba7
DL
385
386 dce_clk_params.target_clock_frequency = requested_clk_khz;
387 dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
388 dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK;
389
390 bp->funcs->set_dce_clock(bp, &dce_clk_params);
391
392 /* from power down, we need mark the clock state as ClocksStateNominal
393 * from HWReset, so when resume we will call pplib voltage regulator.*/
394 if (requested_clk_khz == 0)
395 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
396
397 /*Program DP ref Clock*/
398 /*VBIOS will determine DPREFCLK frequency, so we don't set it*/
399 dce_clk_params.target_clock_frequency = 0;
400 dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
e11b86ad
DL
401 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
402 (dce_clk_params.pll_id ==
403 CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
9a70eba7
DL
404
405 bp->funcs->set_dce_clock(bp, &dce_clk_params);
ece4f358 406
ff5ef992
AD
407#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
408 dce_psr_wait_loop(clk_dce, requested_clk_khz);
409#endif
410
9a70eba7
DL
411}
412
413static void dce_clock_read_integrated_info(struct dce_disp_clk *clk_dce)
414{
415 struct dc_debug *debug = &clk_dce->base.ctx->dc->debug;
416 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
c2e218dd
HW
417 struct integrated_info info = { { { 0 } } };
418 struct firmware_info fw_info = { { 0 } };
9a70eba7
DL
419 int i;
420
421 if (bp->integrated_info)
422 info = *bp->integrated_info;
423
424 clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
425 if (clk_dce->dentist_vco_freq_khz == 0) {
426 bp->funcs->get_firmware_info(bp, &fw_info);
427 clk_dce->dentist_vco_freq_khz =
428 fw_info.smu_gpu_pll_output_freq;
429 if (clk_dce->dentist_vco_freq_khz == 0)
430 clk_dce->dentist_vco_freq_khz = 3600000;
431 }
432
433 /*update the maximum display clock for each power state*/
434 for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) {
435 enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID;
436
437 switch (i) {
438 case 0:
439 clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW;
440 break;
441
442 case 1:
443 clk_state = DM_PP_CLOCKS_STATE_LOW;
444 break;
445
446 case 2:
447 clk_state = DM_PP_CLOCKS_STATE_NOMINAL;
448 break;
449
450 case 3:
451 clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE;
452 break;
453
454 default:
455 clk_state = DM_PP_CLOCKS_STATE_INVALID;
456 break;
457 }
458
459 /*Do not allow bad VBIOS/SBIOS to override with invalid values,
460 * check for > 100MHz*/
461 if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
462 clk_dce->max_clks_by_state[clk_state].display_clk_khz =
463 info.disp_clk_voltage[i].max_supported_clk;
464 }
465
85944914 466 if (!debug->disable_dfs_bypass && bp->integrated_info)
9a70eba7
DL
467 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
468 clk_dce->dfs_bypass_enabled = true;
469
470 clk_dce->use_max_disp_clk = debug->max_disp_clk;
471}
472
473static void dce_clock_read_ss_info(struct dce_disp_clk *clk_dce)
474{
475 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
476 int ss_info_num = bp->funcs->get_ss_entry_number(
477 bp, AS_SIGNAL_TYPE_GPU_PLL);
478
479 if (ss_info_num) {
c2e218dd 480 struct spread_spectrum_info info = { { 0 } };
9a70eba7
DL
481 enum bp_result result = bp->funcs->get_spread_spectrum_info(
482 bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
483
484 /* Based on VBIOS, VBIOS will keep entry for GPU PLL SS
485 * even if SS not enabled and in that case
486 * SSInfo.spreadSpectrumPercentage !=0 would be sign
487 * that SS is enabled
488 */
489 if (result == BP_RESULT_OK &&
490 info.spread_spectrum_percentage != 0) {
7d091f7a
HW
491 clk_dce->ss_on_dprefclk = true;
492 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
9a70eba7
DL
493
494 if (info.type.CENTER_MODE == 0) {
7d091f7a 495 /* TODO: Currently for DP Reference clock we
9a70eba7
DL
496 * need only SS percentage for
497 * downspread */
7d091f7a 498 clk_dce->dprefclk_ss_percentage =
9a70eba7
DL
499 info.spread_spectrum_percentage;
500 }
7d091f7a
HW
501
502 return;
9a70eba7
DL
503 }
504
7d091f7a
HW
505 result = bp->funcs->get_spread_spectrum_info(
506 bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info);
507
508 /* Based on VBIOS, VBIOS will keep entry for DPREFCLK SS
509 * even if SS not enabled and in that case
510 * SSInfo.spreadSpectrumPercentage !=0 would be sign
511 * that SS is enabled
512 */
513 if (result == BP_RESULT_OK &&
514 info.spread_spectrum_percentage != 0) {
515 clk_dce->ss_on_dprefclk = true;
516 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
517
518 if (info.type.CENTER_MODE == 0) {
519 /* Currently for DP Reference clock we
520 * need only SS percentage for
521 * downspread */
522 clk_dce->dprefclk_ss_percentage =
523 info.spread_spectrum_percentage;
524 }
525 }
9a70eba7
DL
526 }
527}
528
2c8ad2d5
AD
529static bool dce_apply_clock_voltage_request(
530 struct display_clock *clk,
531 enum dm_pp_clock_type clocks_type,
532 int clocks_in_khz,
533 bool pre_mode_set,
534 bool update_dp_phyclk)
535{
fd8cc371 536 bool send_request = false;
2c8ad2d5
AD
537 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
538
539 switch (clocks_type) {
540 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
541 case DM_PP_CLOCK_TYPE_PIXELCLK:
542 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
543 break;
544 default:
545 BREAK_TO_DEBUGGER();
546 return false;
547 }
548
549 clock_voltage_req.clk_type = clocks_type;
550 clock_voltage_req.clocks_in_khz = clocks_in_khz;
551
552 /* to pplib */
553 if (pre_mode_set) {
554 switch (clocks_type) {
555 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
556 if (clocks_in_khz > clk->cur_clocks_value.dispclk_in_khz) {
2c8ad2d5 557 clk->cur_clocks_value.dispclk_notify_pplib_done = true;
fd8cc371 558 send_request = true;
2c8ad2d5
AD
559 } else
560 clk->cur_clocks_value.dispclk_notify_pplib_done = false;
561 /* no matter incrase or decrase clock, update current clock value */
562 clk->cur_clocks_value.dispclk_in_khz = clocks_in_khz;
563 break;
564 case DM_PP_CLOCK_TYPE_PIXELCLK:
565 if (clocks_in_khz > clk->cur_clocks_value.max_pixelclk_in_khz) {
2c8ad2d5 566 clk->cur_clocks_value.pixelclk_notify_pplib_done = true;
fd8cc371 567 send_request = true;
2c8ad2d5
AD
568 } else
569 clk->cur_clocks_value.pixelclk_notify_pplib_done = false;
570 /* no matter incrase or decrase clock, update current clock value */
571 clk->cur_clocks_value.max_pixelclk_in_khz = clocks_in_khz;
572 break;
573 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
574 if (clocks_in_khz > clk->cur_clocks_value.max_non_dp_phyclk_in_khz) {
2c8ad2d5 575 clk->cur_clocks_value.phyclk_notigy_pplib_done = true;
fd8cc371 576 send_request = true;
2c8ad2d5
AD
577 } else
578 clk->cur_clocks_value.phyclk_notigy_pplib_done = false;
579 /* no matter incrase or decrase clock, update current clock value */
580 clk->cur_clocks_value.max_non_dp_phyclk_in_khz = clocks_in_khz;
581 break;
582 default:
583 ASSERT(0);
584 break;
585 }
fd8cc371 586
2c8ad2d5
AD
587 } else {
588 switch (clocks_type) {
589 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
590 if (!clk->cur_clocks_value.dispclk_notify_pplib_done)
fd8cc371 591 send_request = true;
2c8ad2d5
AD
592 break;
593 case DM_PP_CLOCK_TYPE_PIXELCLK:
594 if (!clk->cur_clocks_value.pixelclk_notify_pplib_done)
fd8cc371 595 send_request = true;
2c8ad2d5
AD
596 break;
597 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
598 if (!clk->cur_clocks_value.phyclk_notigy_pplib_done)
fd8cc371 599 send_request = true;
2c8ad2d5
AD
600 break;
601 default:
602 ASSERT(0);
603 break;
604 }
605 }
fd8cc371 606 if (send_request) {
ff5ef992
AD
607#if defined(CONFIG_DRM_AMD_DC_DCN1_0)
608 struct core_dc *core_dc = DC_TO_CORE(clk->ctx->dc);
609 /*use dcfclk request voltage*/
610 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
611 clock_voltage_req.clocks_in_khz =
612 dcn_find_dcfclk_suits_all(core_dc, &clk->cur_clocks_value);
613#endif
fd8cc371
CL
614 dm_pp_apply_clock_for_voltage_request(
615 clk->ctx, &clock_voltage_req);
616 }
2c8ad2d5
AD
617 if (update_dp_phyclk && (clocks_in_khz >
618 clk->cur_clocks_value.max_dp_phyclk_in_khz))
619 clk->cur_clocks_value.max_dp_phyclk_in_khz = clocks_in_khz;
620
621 return true;
622}
623
fd8cc371 624
2c8ad2d5
AD
625static const struct display_clock_funcs dce120_funcs = {
626 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
627 .apply_clock_voltage_request = dce_apply_clock_voltage_request,
628 .set_clock = dce112_set_clock
629};
2c8ad2d5 630
9a70eba7
DL
631static const struct display_clock_funcs dce112_funcs = {
632 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
633 .get_required_clocks_state = dce_get_required_clocks_state,
634 .set_min_clocks_state = dce_clock_set_min_clocks_state,
e11b86ad 635 .set_clock = dce112_set_clock
9a70eba7
DL
636};
637
638static const struct display_clock_funcs dce110_funcs = {
639 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
640 .get_required_clocks_state = dce_get_required_clocks_state,
641 .set_min_clocks_state = dce_clock_set_min_clocks_state,
642 .set_clock = dce_psr_set_clock
643};
644
645static const struct display_clock_funcs dce_funcs = {
646 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
647 .get_required_clocks_state = dce_get_required_clocks_state,
648 .set_min_clocks_state = dce_clock_set_min_clocks_state,
649 .set_clock = dce_set_clock
650};
651
652static void dce_disp_clk_construct(
653 struct dce_disp_clk *clk_dce,
654 struct dc_context *ctx,
655 const struct dce_disp_clk_registers *regs,
656 const struct dce_disp_clk_shift *clk_shift,
657 const struct dce_disp_clk_mask *clk_mask)
658{
659 struct display_clock *base = &clk_dce->base;
660
661 base->ctx = ctx;
662 base->funcs = &dce_funcs;
663
664 clk_dce->regs = regs;
665 clk_dce->clk_shift = clk_shift;
666 clk_dce->clk_mask = clk_mask;
667
668 clk_dce->dfs_bypass_disp_clk = 0;
7d091f7a
HW
669
670 clk_dce->dprefclk_ss_percentage = 0;
671 clk_dce->dprefclk_ss_divider = 1000;
672 clk_dce->ss_on_dprefclk = false;
673
9a70eba7
DL
674 base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
675 base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
676
677 dce_clock_read_integrated_info(clk_dce);
678 dce_clock_read_ss_info(clk_dce);
679
e11b86ad 680 dce_divider_range_construct(
9a70eba7
DL
681 &clk_dce->divider_ranges[DIVIDER_RANGE_01],
682 DIVIDER_RANGE_01_START,
683 DIVIDER_RANGE_01_STEP_SIZE,
684 DIVIDER_RANGE_01_BASE_DIVIDER_ID,
685 DIVIDER_RANGE_02_BASE_DIVIDER_ID);
e11b86ad 686 dce_divider_range_construct(
9a70eba7
DL
687 &clk_dce->divider_ranges[DIVIDER_RANGE_02],
688 DIVIDER_RANGE_02_START,
689 DIVIDER_RANGE_02_STEP_SIZE,
690 DIVIDER_RANGE_02_BASE_DIVIDER_ID,
691 DIVIDER_RANGE_03_BASE_DIVIDER_ID);
e11b86ad 692 dce_divider_range_construct(
9a70eba7
DL
693 &clk_dce->divider_ranges[DIVIDER_RANGE_03],
694 DIVIDER_RANGE_03_START,
695 DIVIDER_RANGE_03_STEP_SIZE,
696 DIVIDER_RANGE_03_BASE_DIVIDER_ID,
697 DIVIDER_RANGE_MAX_DIVIDER_ID);
698}
699
700struct display_clock *dce_disp_clk_create(
701 struct dc_context *ctx,
702 const struct dce_disp_clk_registers *regs,
703 const struct dce_disp_clk_shift *clk_shift,
704 const struct dce_disp_clk_mask *clk_mask)
705{
706 struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce));
707
708 if (clk_dce == NULL) {
709 BREAK_TO_DEBUGGER();
710 return NULL;
711 }
712
e11b86ad
DL
713 memcpy(clk_dce->max_clks_by_state,
714 dce80_max_clks_by_state,
715 sizeof(dce80_max_clks_by_state));
716
9a70eba7
DL
717 dce_disp_clk_construct(
718 clk_dce, ctx, regs, clk_shift, clk_mask);
719
720 return &clk_dce->base;
721}
722
723struct display_clock *dce110_disp_clk_create(
724 struct dc_context *ctx,
725 const struct dce_disp_clk_registers *regs,
726 const struct dce_disp_clk_shift *clk_shift,
727 const struct dce_disp_clk_mask *clk_mask)
728{
729 struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce));
730
731 if (clk_dce == NULL) {
732 BREAK_TO_DEBUGGER();
733 return NULL;
734 }
735
e11b86ad
DL
736 memcpy(clk_dce->max_clks_by_state,
737 dce110_max_clks_by_state,
738 sizeof(dce110_max_clks_by_state));
739
9a70eba7
DL
740 dce_disp_clk_construct(
741 clk_dce, ctx, regs, clk_shift, clk_mask);
742
743 clk_dce->base.funcs = &dce110_funcs;
744
745 return &clk_dce->base;
746}
747
748struct display_clock *dce112_disp_clk_create(
749 struct dc_context *ctx,
750 const struct dce_disp_clk_registers *regs,
751 const struct dce_disp_clk_shift *clk_shift,
752 const struct dce_disp_clk_mask *clk_mask)
753{
754 struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce));
755
756 if (clk_dce == NULL) {
757 BREAK_TO_DEBUGGER();
758 return NULL;
759 }
760
e11b86ad
DL
761 memcpy(clk_dce->max_clks_by_state,
762 dce112_max_clks_by_state,
763 sizeof(dce112_max_clks_by_state));
764
9a70eba7
DL
765 dce_disp_clk_construct(
766 clk_dce, ctx, regs, clk_shift, clk_mask);
767
768 clk_dce->base.funcs = &dce112_funcs;
769
770 return &clk_dce->base;
771}
772
2c8ad2d5
AD
773struct display_clock *dce120_disp_clk_create(
774 struct dc_context *ctx,
775 const struct dce_disp_clk_registers *regs,
776 const struct dce_disp_clk_shift *clk_shift,
777 const struct dce_disp_clk_mask *clk_mask)
778{
779 struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce));
780 struct dm_pp_clock_levels_with_voltage clk_level_info = {0};
781
782 if (clk_dce == NULL) {
783 BREAK_TO_DEBUGGER();
784 return NULL;
785 }
786
787 memcpy(clk_dce->max_clks_by_state,
788 dce120_max_clks_by_state,
789 sizeof(dce120_max_clks_by_state));
790
791 dce_disp_clk_construct(
792 clk_dce, ctx, regs, clk_shift, clk_mask);
793
794 clk_dce->base.funcs = &dce120_funcs;
795
796 /* new in dce120 */
797 if (!ctx->dc->debug.disable_pplib_clock_request &&
798 dm_pp_get_clock_levels_by_type_with_voltage(
799 ctx, DM_PP_CLOCK_TYPE_DISPLAY_CLK, &clk_level_info)
800 && clk_level_info.num_levels)
801 clk_dce->max_displ_clk_in_khz =
802 clk_level_info.data[clk_level_info.num_levels - 1].clocks_in_khz;
803 else
804 clk_dce->max_displ_clk_in_khz = 1133000;
805
806 return &clk_dce->base;
807}
2c8ad2d5 808
9a70eba7
DL
809void dce_disp_clk_destroy(struct display_clock **disp_clk)
810{
811 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(*disp_clk);
812
813 dm_free(clk_dce);
814 *disp_clk = NULL;
815}