]>
Commit | Line | Data |
---|---|---|
9a70eba7 DL |
1 | /* |
2 | * Copyright 2012-16 Advanced Micro Devices, Inc. | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | |
5 | * copy of this software and associated documentation files (the "Software"), | |
6 | * to deal in the Software without restriction, including without limitation | |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
8 | * and/or sell copies of the Software, and to permit persons to whom the | |
9 | * Software is furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice shall be included in | |
12 | * all copies or substantial portions of the Software. | |
13 | * | |
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
17 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
18 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
19 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
20 | * OTHER DEALINGS IN THE SOFTWARE. | |
21 | * | |
22 | * Authors: AMD | |
23 | * | |
24 | */ | |
25 | ||
26 | #include "dce_clocks.h" | |
27 | #include "dm_services.h" | |
28 | #include "reg_helper.h" | |
29 | #include "fixed32_32.h" | |
30 | #include "bios_parser_interface.h" | |
31 | #include "dc.h" | |
15a27de2 HW |
32 | #include "core_dc.h" |
33 | #include "dce_abm.h" | |
9f72f51d | 34 | #include "dmcu.h" |
ff5ef992 AD |
35 | #if defined(CONFIG_DRM_AMD_DC_DCN1_0) |
36 | #include "dcn_calcs.h" | |
37 | #include "core_dc.h" | |
38 | #endif | |
9a70eba7 | 39 | |
15a27de2 HW |
40 | |
41 | ||
e11b86ad DL |
42 | #define TO_DCE_CLOCKS(clocks)\ |
43 | container_of(clocks, struct dce_disp_clk, base) | |
9a70eba7 DL |
44 | |
45 | #define REG(reg) \ | |
46 | (clk_dce->regs->reg) | |
47 | ||
48 | #undef FN | |
49 | #define FN(reg_name, field_name) \ | |
50 | clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name | |
51 | ||
52 | #define CTX \ | |
53 | clk_dce->base.ctx | |
54 | ||
e11b86ad DL |
55 | /* Max clock values for each state indexed by "enum clocks_state": */ |
56 | static struct state_dependent_clocks dce80_max_clks_by_state[] = { | |
57 | /* ClocksStateInvalid - should not be used */ | |
58 | { .display_clk_khz = 0, .pixel_clk_khz = 0 }, | |
59 | /* ClocksStateUltraLow - not expected to be used for DCE 8.0 */ | |
60 | { .display_clk_khz = 0, .pixel_clk_khz = 0 }, | |
61 | /* ClocksStateLow */ | |
62 | { .display_clk_khz = 352000, .pixel_clk_khz = 330000}, | |
63 | /* ClocksStateNominal */ | |
64 | { .display_clk_khz = 600000, .pixel_clk_khz = 400000 }, | |
65 | /* ClocksStatePerformance */ | |
66 | { .display_clk_khz = 600000, .pixel_clk_khz = 400000 } }; | |
67 | ||
68 | static struct state_dependent_clocks dce110_max_clks_by_state[] = { | |
69 | /*ClocksStateInvalid - should not be used*/ | |
70 | { .display_clk_khz = 0, .pixel_clk_khz = 0 }, | |
71 | /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/ | |
72 | { .display_clk_khz = 352000, .pixel_clk_khz = 330000 }, | |
73 | /*ClocksStateLow*/ | |
74 | { .display_clk_khz = 352000, .pixel_clk_khz = 330000 }, | |
75 | /*ClocksStateNominal*/ | |
76 | { .display_clk_khz = 467000, .pixel_clk_khz = 400000 }, | |
77 | /*ClocksStatePerformance*/ | |
78 | { .display_clk_khz = 643000, .pixel_clk_khz = 400000 } }; | |
79 | ||
80 | static struct state_dependent_clocks dce112_max_clks_by_state[] = { | |
81 | /*ClocksStateInvalid - should not be used*/ | |
82 | { .display_clk_khz = 0, .pixel_clk_khz = 0 }, | |
83 | /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/ | |
84 | { .display_clk_khz = 389189, .pixel_clk_khz = 346672 }, | |
85 | /*ClocksStateLow*/ | |
86 | { .display_clk_khz = 459000, .pixel_clk_khz = 400000 }, | |
87 | /*ClocksStateNominal*/ | |
88 | { .display_clk_khz = 667000, .pixel_clk_khz = 600000 }, | |
89 | /*ClocksStatePerformance*/ | |
90 | { .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } }; | |
91 | ||
2c8ad2d5 AD |
92 | static struct state_dependent_clocks dce120_max_clks_by_state[] = { |
93 | /*ClocksStateInvalid - should not be used*/ | |
94 | { .display_clk_khz = 0, .pixel_clk_khz = 0 }, | |
95 | /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/ | |
96 | { .display_clk_khz = 0, .pixel_clk_khz = 0 }, | |
97 | /*ClocksStateLow*/ | |
98 | { .display_clk_khz = 460000, .pixel_clk_khz = 400000 }, | |
99 | /*ClocksStateNominal*/ | |
100 | { .display_clk_khz = 670000, .pixel_clk_khz = 600000 }, | |
101 | /*ClocksStatePerformance*/ | |
102 | { .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } }; | |
2c8ad2d5 | 103 | |
9a70eba7 | 104 | /* Starting point for each divider range.*/ |
e11b86ad | 105 | enum dce_divider_range_start { |
9a70eba7 DL |
106 | DIVIDER_RANGE_01_START = 200, /* 2.00*/ |
107 | DIVIDER_RANGE_02_START = 1600, /* 16.00*/ | |
108 | DIVIDER_RANGE_03_START = 3200, /* 32.00*/ | |
109 | DIVIDER_RANGE_SCALE_FACTOR = 100 /* Results are scaled up by 100.*/ | |
110 | }; | |
111 | ||
112 | /* Ranges for divider identifiers (Divider ID or DID) | |
113 | mmDENTIST_DISPCLK_CNTL.DENTIST_DISPCLK_WDIVIDER*/ | |
e11b86ad | 114 | enum dce_divider_id_register_setting { |
9a70eba7 DL |
115 | DIVIDER_RANGE_01_BASE_DIVIDER_ID = 0X08, |
116 | DIVIDER_RANGE_02_BASE_DIVIDER_ID = 0X40, | |
117 | DIVIDER_RANGE_03_BASE_DIVIDER_ID = 0X60, | |
118 | DIVIDER_RANGE_MAX_DIVIDER_ID = 0X80 | |
119 | }; | |
120 | ||
121 | /* Step size between each divider within a range. | |
122 | Incrementing the DENTIST_DISPCLK_WDIVIDER by one | |
123 | will increment the divider by this much.*/ | |
e11b86ad | 124 | enum dce_divider_range_step_size { |
9a70eba7 DL |
125 | DIVIDER_RANGE_01_STEP_SIZE = 25, /* 0.25*/ |
126 | DIVIDER_RANGE_02_STEP_SIZE = 50, /* 0.50*/ | |
127 | DIVIDER_RANGE_03_STEP_SIZE = 100 /* 1.00 */ | |
128 | }; | |
129 | ||
e11b86ad DL |
130 | static bool dce_divider_range_construct( |
131 | struct dce_divider_range *div_range, | |
132 | int range_start, | |
133 | int range_step, | |
134 | int did_min, | |
135 | int did_max) | |
136 | { | |
137 | div_range->div_range_start = range_start; | |
138 | div_range->div_range_step = range_step; | |
139 | div_range->did_min = did_min; | |
140 | div_range->did_max = did_max; | |
141 | ||
142 | if (div_range->div_range_step == 0) { | |
143 | div_range->div_range_step = 1; | |
144 | /*div_range_step cannot be zero*/ | |
145 | BREAK_TO_DEBUGGER(); | |
146 | } | |
147 | /* Calculate this based on the other inputs.*/ | |
148 | /* See DividerRange.h for explanation of */ | |
149 | /* the relationship between divider id (DID) and a divider.*/ | |
150 | /* Number of Divider IDs = (Maximum Divider ID - Minimum Divider ID)*/ | |
151 | /* Maximum divider identified in this range = | |
152 | * (Number of Divider IDs)*Step size between dividers | |
153 | * + The start of this range.*/ | |
154 | div_range->div_range_end = (did_max - did_min) * range_step | |
155 | + range_start; | |
156 | return true; | |
157 | } | |
158 | ||
159 | static int dce_divider_range_calc_divider( | |
160 | struct dce_divider_range *div_range, | |
161 | int did) | |
162 | { | |
163 | /* Is this DID within our range?*/ | |
164 | if ((did < div_range->did_min) || (did >= div_range->did_max)) | |
165 | return INVALID_DIVIDER; | |
166 | ||
167 | return ((did - div_range->did_min) * div_range->div_range_step) | |
168 | + div_range->div_range_start; | |
169 | ||
170 | } | |
171 | ||
e11b86ad DL |
172 | static int dce_divider_range_get_divider( |
173 | struct dce_divider_range *div_range, | |
174 | int ranges_num, | |
175 | int did) | |
176 | { | |
177 | int div = INVALID_DIVIDER; | |
178 | int i; | |
9a70eba7 | 179 | |
e11b86ad DL |
180 | for (i = 0; i < ranges_num; i++) { |
181 | /* Calculate divider with given divider ID*/ | |
182 | div = dce_divider_range_calc_divider(&div_range[i], did); | |
183 | /* Found a valid return divider*/ | |
184 | if (div != INVALID_DIVIDER) | |
185 | break; | |
186 | } | |
187 | return div; | |
188 | } | |
189 | ||
e11b86ad | 190 | static int dce_clocks_get_dp_ref_freq(struct display_clock *clk) |
9a70eba7 DL |
191 | { |
192 | struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk); | |
193 | int dprefclk_wdivider; | |
194 | int dprefclk_src_sel; | |
195 | int dp_ref_clk_khz = 600000; | |
196 | int target_div = INVALID_DIVIDER; | |
197 | ||
198 | /* ASSERT DP Reference Clock source is from DFS*/ | |
199 | REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel); | |
200 | ASSERT(dprefclk_src_sel == 0); | |
201 | ||
202 | /* Read the mmDENTIST_DISPCLK_CNTL to get the currently | |
203 | * programmed DID DENTIST_DPREFCLK_WDIVIDER*/ | |
204 | REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider); | |
205 | ||
206 | /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/ | |
e11b86ad | 207 | target_div = dce_divider_range_get_divider( |
9a70eba7 DL |
208 | clk_dce->divider_ranges, |
209 | DIVIDER_RANGE_MAX, | |
210 | dprefclk_wdivider); | |
211 | ||
212 | if (target_div != INVALID_DIVIDER) { | |
213 | /* Calculate the current DFS clock, in kHz.*/ | |
214 | dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR | |
215 | * clk_dce->dentist_vco_freq_khz) / target_div; | |
216 | } | |
217 | ||
218 | /* SW will adjust DP REF Clock average value for all purposes | |
219 | * (DP DTO / DP Audio DTO and DP GTC) | |
220 | if clock is spread for all cases: | |
221 | -if SS enabled on DP Ref clock and HW de-spreading enabled with SW | |
222 | calculations for DS_INCR/DS_MODULO (this is planned to be default case) | |
223 | -if SS enabled on DP Ref clock and HW de-spreading enabled with HW | |
224 | calculations (not planned to be used, but average clock should still | |
225 | be valid) | |
226 | -if SS enabled on DP Ref clock and HW de-spreading disabled | |
227 | (should not be case with CIK) then SW should program all rates | |
228 | generated according to average value (case as with previous ASICs) | |
229 | */ | |
7d091f7a | 230 | if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) { |
9a70eba7 DL |
231 | struct fixed32_32 ss_percentage = dal_fixed32_32_div_int( |
232 | dal_fixed32_32_from_fraction( | |
7d091f7a HW |
233 | clk_dce->dprefclk_ss_percentage, |
234 | clk_dce->dprefclk_ss_divider), 200); | |
9a70eba7 DL |
235 | struct fixed32_32 adj_dp_ref_clk_khz; |
236 | ||
237 | ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one, | |
238 | ss_percentage); | |
239 | adj_dp_ref_clk_khz = | |
240 | dal_fixed32_32_mul_int( | |
241 | ss_percentage, | |
242 | dp_ref_clk_khz); | |
243 | dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz); | |
244 | } | |
245 | ||
246 | return dp_ref_clk_khz; | |
247 | } | |
248 | ||
249 | static enum dm_pp_clocks_state dce_get_required_clocks_state( | |
250 | struct display_clock *clk, | |
251 | struct state_dependent_clocks *req_clocks) | |
252 | { | |
253 | struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk); | |
254 | int i; | |
255 | enum dm_pp_clocks_state low_req_clk; | |
256 | ||
257 | /* Iterate from highest supported to lowest valid state, and update | |
258 | * lowest RequiredState with the lowest state that satisfies | |
259 | * all required clocks | |
260 | */ | |
261 | for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--) | |
262 | if (req_clocks->display_clk_khz > | |
263 | clk_dce->max_clks_by_state[i].display_clk_khz | |
264 | || req_clocks->pixel_clk_khz > | |
265 | clk_dce->max_clks_by_state[i].pixel_clk_khz) | |
266 | break; | |
267 | ||
268 | low_req_clk = i + 1; | |
269 | if (low_req_clk > clk->max_clks_state) { | |
270 | dm_logger_write(clk->ctx->logger, LOG_WARNING, | |
271 | "%s: clocks unsupported", __func__); | |
272 | low_req_clk = DM_PP_CLOCKS_STATE_INVALID; | |
273 | } | |
274 | ||
275 | return low_req_clk; | |
276 | } | |
277 | ||
278 | static bool dce_clock_set_min_clocks_state( | |
279 | struct display_clock *clk, | |
280 | enum dm_pp_clocks_state clocks_state) | |
281 | { | |
282 | struct dm_pp_power_level_change_request level_change_req = { | |
283 | clocks_state }; | |
284 | ||
285 | if (clocks_state > clk->max_clks_state) { | |
286 | /*Requested state exceeds max supported state.*/ | |
287 | dm_logger_write(clk->ctx->logger, LOG_WARNING, | |
288 | "Requested state exceeds max supported state"); | |
289 | return false; | |
290 | } else if (clocks_state == clk->cur_min_clks_state) { | |
291 | /*if we're trying to set the same state, we can just return | |
292 | * since nothing needs to be done*/ | |
293 | return true; | |
294 | } | |
295 | ||
296 | /* get max clock state from PPLIB */ | |
297 | if (dm_pp_apply_power_level_change_request(clk->ctx, &level_change_req)) | |
298 | clk->cur_min_clks_state = clocks_state; | |
299 | ||
300 | return true; | |
301 | } | |
302 | ||
303 | static void dce_set_clock( | |
304 | struct display_clock *clk, | |
e11b86ad | 305 | int requested_clk_khz) |
9a70eba7 DL |
306 | { |
307 | struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk); | |
308 | struct bp_pixel_clock_parameters pxl_clk_params = { 0 }; | |
309 | struct dc_bios *bp = clk->ctx->dc_bios; | |
310 | ||
311 | /* Make sure requested clock isn't lower than minimum threshold*/ | |
312 | if (requested_clk_khz > 0) | |
7d7024ca | 313 | requested_clk_khz = max(requested_clk_khz, |
9a70eba7 DL |
314 | clk_dce->dentist_vco_freq_khz / 64); |
315 | ||
316 | /* Prepare to program display clock*/ | |
317 | pxl_clk_params.target_pixel_clock = requested_clk_khz; | |
318 | pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS; | |
319 | ||
320 | bp->funcs->program_display_engine_pll(bp, &pxl_clk_params); | |
321 | ||
322 | if (clk_dce->dfs_bypass_enabled) { | |
323 | ||
324 | /* Cache the fixed display clock*/ | |
325 | clk_dce->dfs_bypass_disp_clk = | |
326 | pxl_clk_params.dfs_bypass_display_clock; | |
327 | } | |
328 | ||
329 | /* from power down, we need mark the clock state as ClocksStateNominal | |
330 | * from HWReset, so when resume we will call pplib voltage regulator.*/ | |
331 | if (requested_clk_khz == 0) | |
332 | clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL; | |
333 | } | |
334 | ||
9a70eba7 DL |
335 | static void dce_psr_set_clock( |
336 | struct display_clock *clk, | |
e11b86ad | 337 | int requested_clk_khz) |
9a70eba7 DL |
338 | { |
339 | struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk); | |
9f72f51d AZ |
340 | struct dc_context *ctx = clk_dce->base.ctx; |
341 | struct core_dc *core_dc = DC_TO_CORE(ctx->dc); | |
342 | struct dmcu *dmcu = core_dc->res_pool->dmcu; | |
9a70eba7 DL |
343 | |
344 | dce_set_clock(clk, requested_clk_khz); | |
9f72f51d AZ |
345 | |
346 | dmcu->funcs->set_psr_wait_loop(dmcu, requested_clk_khz / 1000 / 7); | |
9a70eba7 DL |
347 | } |
348 | ||
e11b86ad | 349 | static void dce112_set_clock( |
9a70eba7 | 350 | struct display_clock *clk, |
e11b86ad | 351 | int requested_clk_khz) |
9a70eba7 DL |
352 | { |
353 | struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk); | |
354 | struct bp_set_dce_clock_parameters dce_clk_params; | |
355 | struct dc_bios *bp = clk->ctx->dc_bios; | |
15a27de2 HW |
356 | struct core_dc *core_dc = DC_TO_CORE(clk->ctx->dc); |
357 | struct abm *abm = core_dc->res_pool->abm; | |
9f72f51d | 358 | struct dmcu *dmcu = core_dc->res_pool->dmcu; |
9a70eba7 DL |
359 | |
360 | /* Prepare to program display clock*/ | |
361 | memset(&dce_clk_params, 0, sizeof(dce_clk_params)); | |
362 | ||
363 | /* Make sure requested clock isn't lower than minimum threshold*/ | |
364 | if (requested_clk_khz > 0) | |
7d7024ca | 365 | requested_clk_khz = max(requested_clk_khz, |
e11b86ad | 366 | clk_dce->dentist_vco_freq_khz / 62); |
9a70eba7 DL |
367 | |
368 | dce_clk_params.target_clock_frequency = requested_clk_khz; | |
369 | dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS; | |
370 | dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK; | |
371 | ||
372 | bp->funcs->set_dce_clock(bp, &dce_clk_params); | |
373 | ||
374 | /* from power down, we need mark the clock state as ClocksStateNominal | |
375 | * from HWReset, so when resume we will call pplib voltage regulator.*/ | |
376 | if (requested_clk_khz == 0) | |
377 | clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL; | |
378 | ||
379 | /*Program DP ref Clock*/ | |
380 | /*VBIOS will determine DPREFCLK frequency, so we don't set it*/ | |
381 | dce_clk_params.target_clock_frequency = 0; | |
382 | dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK; | |
e11b86ad DL |
383 | dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK = |
384 | (dce_clk_params.pll_id == | |
385 | CLOCK_SOURCE_COMBO_DISPLAY_PLL0); | |
9a70eba7 DL |
386 | |
387 | bp->funcs->set_dce_clock(bp, &dce_clk_params); | |
ece4f358 | 388 | |
15a27de2 | 389 | if (abm->funcs->is_dmcu_initialized(abm)) |
9f72f51d AZ |
390 | dmcu->funcs->set_psr_wait_loop(dmcu, |
391 | requested_clk_khz / 1000 / 7); | |
ff5ef992 | 392 | |
9a70eba7 DL |
393 | } |
394 | ||
395 | static void dce_clock_read_integrated_info(struct dce_disp_clk *clk_dce) | |
396 | { | |
397 | struct dc_debug *debug = &clk_dce->base.ctx->dc->debug; | |
398 | struct dc_bios *bp = clk_dce->base.ctx->dc_bios; | |
c2e218dd HW |
399 | struct integrated_info info = { { { 0 } } }; |
400 | struct firmware_info fw_info = { { 0 } }; | |
9a70eba7 DL |
401 | int i; |
402 | ||
403 | if (bp->integrated_info) | |
404 | info = *bp->integrated_info; | |
405 | ||
406 | clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq; | |
407 | if (clk_dce->dentist_vco_freq_khz == 0) { | |
408 | bp->funcs->get_firmware_info(bp, &fw_info); | |
409 | clk_dce->dentist_vco_freq_khz = | |
410 | fw_info.smu_gpu_pll_output_freq; | |
411 | if (clk_dce->dentist_vco_freq_khz == 0) | |
412 | clk_dce->dentist_vco_freq_khz = 3600000; | |
413 | } | |
414 | ||
415 | /*update the maximum display clock for each power state*/ | |
416 | for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) { | |
417 | enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID; | |
418 | ||
419 | switch (i) { | |
420 | case 0: | |
421 | clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW; | |
422 | break; | |
423 | ||
424 | case 1: | |
425 | clk_state = DM_PP_CLOCKS_STATE_LOW; | |
426 | break; | |
427 | ||
428 | case 2: | |
429 | clk_state = DM_PP_CLOCKS_STATE_NOMINAL; | |
430 | break; | |
431 | ||
432 | case 3: | |
433 | clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE; | |
434 | break; | |
435 | ||
436 | default: | |
437 | clk_state = DM_PP_CLOCKS_STATE_INVALID; | |
438 | break; | |
439 | } | |
440 | ||
441 | /*Do not allow bad VBIOS/SBIOS to override with invalid values, | |
442 | * check for > 100MHz*/ | |
443 | if (info.disp_clk_voltage[i].max_supported_clk >= 100000) | |
444 | clk_dce->max_clks_by_state[clk_state].display_clk_khz = | |
445 | info.disp_clk_voltage[i].max_supported_clk; | |
446 | } | |
447 | ||
85944914 | 448 | if (!debug->disable_dfs_bypass && bp->integrated_info) |
9a70eba7 DL |
449 | if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE) |
450 | clk_dce->dfs_bypass_enabled = true; | |
451 | ||
452 | clk_dce->use_max_disp_clk = debug->max_disp_clk; | |
453 | } | |
454 | ||
455 | static void dce_clock_read_ss_info(struct dce_disp_clk *clk_dce) | |
456 | { | |
457 | struct dc_bios *bp = clk_dce->base.ctx->dc_bios; | |
458 | int ss_info_num = bp->funcs->get_ss_entry_number( | |
459 | bp, AS_SIGNAL_TYPE_GPU_PLL); | |
460 | ||
461 | if (ss_info_num) { | |
c2e218dd | 462 | struct spread_spectrum_info info = { { 0 } }; |
9a70eba7 DL |
463 | enum bp_result result = bp->funcs->get_spread_spectrum_info( |
464 | bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info); | |
465 | ||
466 | /* Based on VBIOS, VBIOS will keep entry for GPU PLL SS | |
467 | * even if SS not enabled and in that case | |
468 | * SSInfo.spreadSpectrumPercentage !=0 would be sign | |
469 | * that SS is enabled | |
470 | */ | |
471 | if (result == BP_RESULT_OK && | |
472 | info.spread_spectrum_percentage != 0) { | |
7d091f7a HW |
473 | clk_dce->ss_on_dprefclk = true; |
474 | clk_dce->dprefclk_ss_divider = info.spread_percentage_divider; | |
9a70eba7 DL |
475 | |
476 | if (info.type.CENTER_MODE == 0) { | |
7d091f7a | 477 | /* TODO: Currently for DP Reference clock we |
9a70eba7 DL |
478 | * need only SS percentage for |
479 | * downspread */ | |
7d091f7a | 480 | clk_dce->dprefclk_ss_percentage = |
9a70eba7 DL |
481 | info.spread_spectrum_percentage; |
482 | } | |
7d091f7a HW |
483 | |
484 | return; | |
9a70eba7 DL |
485 | } |
486 | ||
7d091f7a HW |
487 | result = bp->funcs->get_spread_spectrum_info( |
488 | bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info); | |
489 | ||
490 | /* Based on VBIOS, VBIOS will keep entry for DPREFCLK SS | |
491 | * even if SS not enabled and in that case | |
492 | * SSInfo.spreadSpectrumPercentage !=0 would be sign | |
493 | * that SS is enabled | |
494 | */ | |
495 | if (result == BP_RESULT_OK && | |
496 | info.spread_spectrum_percentage != 0) { | |
497 | clk_dce->ss_on_dprefclk = true; | |
498 | clk_dce->dprefclk_ss_divider = info.spread_percentage_divider; | |
499 | ||
500 | if (info.type.CENTER_MODE == 0) { | |
501 | /* Currently for DP Reference clock we | |
502 | * need only SS percentage for | |
503 | * downspread */ | |
504 | clk_dce->dprefclk_ss_percentage = | |
505 | info.spread_spectrum_percentage; | |
506 | } | |
507 | } | |
9a70eba7 DL |
508 | } |
509 | } | |
510 | ||
2c8ad2d5 AD |
511 | static bool dce_apply_clock_voltage_request( |
512 | struct display_clock *clk, | |
513 | enum dm_pp_clock_type clocks_type, | |
514 | int clocks_in_khz, | |
515 | bool pre_mode_set, | |
516 | bool update_dp_phyclk) | |
517 | { | |
fd8cc371 | 518 | bool send_request = false; |
2c8ad2d5 AD |
519 | struct dm_pp_clock_for_voltage_req clock_voltage_req = {0}; |
520 | ||
521 | switch (clocks_type) { | |
522 | case DM_PP_CLOCK_TYPE_DISPLAY_CLK: | |
523 | case DM_PP_CLOCK_TYPE_PIXELCLK: | |
524 | case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK: | |
525 | break; | |
526 | default: | |
527 | BREAK_TO_DEBUGGER(); | |
528 | return false; | |
529 | } | |
530 | ||
531 | clock_voltage_req.clk_type = clocks_type; | |
532 | clock_voltage_req.clocks_in_khz = clocks_in_khz; | |
533 | ||
534 | /* to pplib */ | |
535 | if (pre_mode_set) { | |
536 | switch (clocks_type) { | |
537 | case DM_PP_CLOCK_TYPE_DISPLAY_CLK: | |
538 | if (clocks_in_khz > clk->cur_clocks_value.dispclk_in_khz) { | |
2c8ad2d5 | 539 | clk->cur_clocks_value.dispclk_notify_pplib_done = true; |
fd8cc371 | 540 | send_request = true; |
2c8ad2d5 AD |
541 | } else |
542 | clk->cur_clocks_value.dispclk_notify_pplib_done = false; | |
543 | /* no matter incrase or decrase clock, update current clock value */ | |
544 | clk->cur_clocks_value.dispclk_in_khz = clocks_in_khz; | |
545 | break; | |
546 | case DM_PP_CLOCK_TYPE_PIXELCLK: | |
547 | if (clocks_in_khz > clk->cur_clocks_value.max_pixelclk_in_khz) { | |
2c8ad2d5 | 548 | clk->cur_clocks_value.pixelclk_notify_pplib_done = true; |
fd8cc371 | 549 | send_request = true; |
2c8ad2d5 AD |
550 | } else |
551 | clk->cur_clocks_value.pixelclk_notify_pplib_done = false; | |
552 | /* no matter incrase or decrase clock, update current clock value */ | |
553 | clk->cur_clocks_value.max_pixelclk_in_khz = clocks_in_khz; | |
554 | break; | |
555 | case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK: | |
556 | if (clocks_in_khz > clk->cur_clocks_value.max_non_dp_phyclk_in_khz) { | |
2c8ad2d5 | 557 | clk->cur_clocks_value.phyclk_notigy_pplib_done = true; |
fd8cc371 | 558 | send_request = true; |
2c8ad2d5 AD |
559 | } else |
560 | clk->cur_clocks_value.phyclk_notigy_pplib_done = false; | |
561 | /* no matter incrase or decrase clock, update current clock value */ | |
562 | clk->cur_clocks_value.max_non_dp_phyclk_in_khz = clocks_in_khz; | |
563 | break; | |
564 | default: | |
565 | ASSERT(0); | |
566 | break; | |
567 | } | |
fd8cc371 | 568 | |
2c8ad2d5 AD |
569 | } else { |
570 | switch (clocks_type) { | |
571 | case DM_PP_CLOCK_TYPE_DISPLAY_CLK: | |
572 | if (!clk->cur_clocks_value.dispclk_notify_pplib_done) | |
fd8cc371 | 573 | send_request = true; |
2c8ad2d5 AD |
574 | break; |
575 | case DM_PP_CLOCK_TYPE_PIXELCLK: | |
576 | if (!clk->cur_clocks_value.pixelclk_notify_pplib_done) | |
fd8cc371 | 577 | send_request = true; |
2c8ad2d5 AD |
578 | break; |
579 | case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK: | |
580 | if (!clk->cur_clocks_value.phyclk_notigy_pplib_done) | |
fd8cc371 | 581 | send_request = true; |
2c8ad2d5 AD |
582 | break; |
583 | default: | |
584 | ASSERT(0); | |
585 | break; | |
586 | } | |
587 | } | |
fd8cc371 | 588 | if (send_request) { |
ff5ef992 AD |
589 | #if defined(CONFIG_DRM_AMD_DC_DCN1_0) |
590 | struct core_dc *core_dc = DC_TO_CORE(clk->ctx->dc); | |
591 | /*use dcfclk request voltage*/ | |
592 | clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK; | |
593 | clock_voltage_req.clocks_in_khz = | |
594 | dcn_find_dcfclk_suits_all(core_dc, &clk->cur_clocks_value); | |
595 | #endif | |
fd8cc371 CL |
596 | dm_pp_apply_clock_for_voltage_request( |
597 | clk->ctx, &clock_voltage_req); | |
598 | } | |
2c8ad2d5 AD |
599 | if (update_dp_phyclk && (clocks_in_khz > |
600 | clk->cur_clocks_value.max_dp_phyclk_in_khz)) | |
601 | clk->cur_clocks_value.max_dp_phyclk_in_khz = clocks_in_khz; | |
602 | ||
603 | return true; | |
604 | } | |
605 | ||
fd8cc371 | 606 | |
2c8ad2d5 AD |
607 | static const struct display_clock_funcs dce120_funcs = { |
608 | .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq, | |
609 | .apply_clock_voltage_request = dce_apply_clock_voltage_request, | |
610 | .set_clock = dce112_set_clock | |
611 | }; | |
2c8ad2d5 | 612 | |
9a70eba7 DL |
613 | static const struct display_clock_funcs dce112_funcs = { |
614 | .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq, | |
615 | .get_required_clocks_state = dce_get_required_clocks_state, | |
616 | .set_min_clocks_state = dce_clock_set_min_clocks_state, | |
e11b86ad | 617 | .set_clock = dce112_set_clock |
9a70eba7 DL |
618 | }; |
619 | ||
620 | static const struct display_clock_funcs dce110_funcs = { | |
621 | .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq, | |
622 | .get_required_clocks_state = dce_get_required_clocks_state, | |
623 | .set_min_clocks_state = dce_clock_set_min_clocks_state, | |
624 | .set_clock = dce_psr_set_clock | |
625 | }; | |
626 | ||
627 | static const struct display_clock_funcs dce_funcs = { | |
628 | .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq, | |
629 | .get_required_clocks_state = dce_get_required_clocks_state, | |
630 | .set_min_clocks_state = dce_clock_set_min_clocks_state, | |
631 | .set_clock = dce_set_clock | |
632 | }; | |
633 | ||
634 | static void dce_disp_clk_construct( | |
635 | struct dce_disp_clk *clk_dce, | |
636 | struct dc_context *ctx, | |
637 | const struct dce_disp_clk_registers *regs, | |
638 | const struct dce_disp_clk_shift *clk_shift, | |
639 | const struct dce_disp_clk_mask *clk_mask) | |
640 | { | |
641 | struct display_clock *base = &clk_dce->base; | |
642 | ||
643 | base->ctx = ctx; | |
644 | base->funcs = &dce_funcs; | |
645 | ||
646 | clk_dce->regs = regs; | |
647 | clk_dce->clk_shift = clk_shift; | |
648 | clk_dce->clk_mask = clk_mask; | |
649 | ||
650 | clk_dce->dfs_bypass_disp_clk = 0; | |
7d091f7a HW |
651 | |
652 | clk_dce->dprefclk_ss_percentage = 0; | |
653 | clk_dce->dprefclk_ss_divider = 1000; | |
654 | clk_dce->ss_on_dprefclk = false; | |
655 | ||
9a70eba7 DL |
656 | base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL; |
657 | base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID; | |
658 | ||
659 | dce_clock_read_integrated_info(clk_dce); | |
660 | dce_clock_read_ss_info(clk_dce); | |
661 | ||
e11b86ad | 662 | dce_divider_range_construct( |
9a70eba7 DL |
663 | &clk_dce->divider_ranges[DIVIDER_RANGE_01], |
664 | DIVIDER_RANGE_01_START, | |
665 | DIVIDER_RANGE_01_STEP_SIZE, | |
666 | DIVIDER_RANGE_01_BASE_DIVIDER_ID, | |
667 | DIVIDER_RANGE_02_BASE_DIVIDER_ID); | |
e11b86ad | 668 | dce_divider_range_construct( |
9a70eba7 DL |
669 | &clk_dce->divider_ranges[DIVIDER_RANGE_02], |
670 | DIVIDER_RANGE_02_START, | |
671 | DIVIDER_RANGE_02_STEP_SIZE, | |
672 | DIVIDER_RANGE_02_BASE_DIVIDER_ID, | |
673 | DIVIDER_RANGE_03_BASE_DIVIDER_ID); | |
e11b86ad | 674 | dce_divider_range_construct( |
9a70eba7 DL |
675 | &clk_dce->divider_ranges[DIVIDER_RANGE_03], |
676 | DIVIDER_RANGE_03_START, | |
677 | DIVIDER_RANGE_03_STEP_SIZE, | |
678 | DIVIDER_RANGE_03_BASE_DIVIDER_ID, | |
679 | DIVIDER_RANGE_MAX_DIVIDER_ID); | |
680 | } | |
681 | ||
682 | struct display_clock *dce_disp_clk_create( | |
683 | struct dc_context *ctx, | |
684 | const struct dce_disp_clk_registers *regs, | |
685 | const struct dce_disp_clk_shift *clk_shift, | |
686 | const struct dce_disp_clk_mask *clk_mask) | |
687 | { | |
688 | struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce)); | |
689 | ||
690 | if (clk_dce == NULL) { | |
691 | BREAK_TO_DEBUGGER(); | |
692 | return NULL; | |
693 | } | |
694 | ||
e11b86ad DL |
695 | memcpy(clk_dce->max_clks_by_state, |
696 | dce80_max_clks_by_state, | |
697 | sizeof(dce80_max_clks_by_state)); | |
698 | ||
9a70eba7 DL |
699 | dce_disp_clk_construct( |
700 | clk_dce, ctx, regs, clk_shift, clk_mask); | |
701 | ||
702 | return &clk_dce->base; | |
703 | } | |
704 | ||
705 | struct display_clock *dce110_disp_clk_create( | |
706 | struct dc_context *ctx, | |
707 | const struct dce_disp_clk_registers *regs, | |
708 | const struct dce_disp_clk_shift *clk_shift, | |
709 | const struct dce_disp_clk_mask *clk_mask) | |
710 | { | |
711 | struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce)); | |
712 | ||
713 | if (clk_dce == NULL) { | |
714 | BREAK_TO_DEBUGGER(); | |
715 | return NULL; | |
716 | } | |
717 | ||
e11b86ad DL |
718 | memcpy(clk_dce->max_clks_by_state, |
719 | dce110_max_clks_by_state, | |
720 | sizeof(dce110_max_clks_by_state)); | |
721 | ||
9a70eba7 DL |
722 | dce_disp_clk_construct( |
723 | clk_dce, ctx, regs, clk_shift, clk_mask); | |
724 | ||
725 | clk_dce->base.funcs = &dce110_funcs; | |
726 | ||
727 | return &clk_dce->base; | |
728 | } | |
729 | ||
730 | struct display_clock *dce112_disp_clk_create( | |
731 | struct dc_context *ctx, | |
732 | const struct dce_disp_clk_registers *regs, | |
733 | const struct dce_disp_clk_shift *clk_shift, | |
734 | const struct dce_disp_clk_mask *clk_mask) | |
735 | { | |
736 | struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce)); | |
737 | ||
738 | if (clk_dce == NULL) { | |
739 | BREAK_TO_DEBUGGER(); | |
740 | return NULL; | |
741 | } | |
742 | ||
e11b86ad DL |
743 | memcpy(clk_dce->max_clks_by_state, |
744 | dce112_max_clks_by_state, | |
745 | sizeof(dce112_max_clks_by_state)); | |
746 | ||
9a70eba7 DL |
747 | dce_disp_clk_construct( |
748 | clk_dce, ctx, regs, clk_shift, clk_mask); | |
749 | ||
750 | clk_dce->base.funcs = &dce112_funcs; | |
751 | ||
752 | return &clk_dce->base; | |
753 | } | |
754 | ||
2c8ad2d5 AD |
755 | struct display_clock *dce120_disp_clk_create( |
756 | struct dc_context *ctx, | |
757 | const struct dce_disp_clk_registers *regs, | |
758 | const struct dce_disp_clk_shift *clk_shift, | |
759 | const struct dce_disp_clk_mask *clk_mask) | |
760 | { | |
761 | struct dce_disp_clk *clk_dce = dm_alloc(sizeof(*clk_dce)); | |
762 | struct dm_pp_clock_levels_with_voltage clk_level_info = {0}; | |
763 | ||
764 | if (clk_dce == NULL) { | |
765 | BREAK_TO_DEBUGGER(); | |
766 | return NULL; | |
767 | } | |
768 | ||
769 | memcpy(clk_dce->max_clks_by_state, | |
770 | dce120_max_clks_by_state, | |
771 | sizeof(dce120_max_clks_by_state)); | |
772 | ||
773 | dce_disp_clk_construct( | |
774 | clk_dce, ctx, regs, clk_shift, clk_mask); | |
775 | ||
776 | clk_dce->base.funcs = &dce120_funcs; | |
777 | ||
778 | /* new in dce120 */ | |
779 | if (!ctx->dc->debug.disable_pplib_clock_request && | |
780 | dm_pp_get_clock_levels_by_type_with_voltage( | |
781 | ctx, DM_PP_CLOCK_TYPE_DISPLAY_CLK, &clk_level_info) | |
782 | && clk_level_info.num_levels) | |
783 | clk_dce->max_displ_clk_in_khz = | |
784 | clk_level_info.data[clk_level_info.num_levels - 1].clocks_in_khz; | |
785 | else | |
786 | clk_dce->max_displ_clk_in_khz = 1133000; | |
787 | ||
788 | return &clk_dce->base; | |
789 | } | |
2c8ad2d5 | 790 | |
9a70eba7 DL |
791 | void dce_disp_clk_destroy(struct display_clock **disp_clk) |
792 | { | |
793 | struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(*disp_clk); | |
794 | ||
795 | dm_free(clk_dce); | |
796 | *disp_clk = NULL; | |
797 | } |