]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - drivers/gpu/drm/radeon/atombios_crtc.c
drm/radeon: avoid turning off spread spectrum for used pll
[mirror_ubuntu-artful-kernel.git] / drivers / gpu / drm / radeon / atombios_crtc.c
1 /*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/radeon_drm.h>
29 #include <drm/drm_fixed.h>
30 #include "radeon.h"
31 #include "atom.h"
32 #include "atom-bits.h"
33
34 static void atombios_overscan_setup(struct drm_crtc *crtc,
35 struct drm_display_mode *mode,
36 struct drm_display_mode *adjusted_mode)
37 {
38 struct drm_device *dev = crtc->dev;
39 struct radeon_device *rdev = dev->dev_private;
40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
43 int a1, a2;
44
45 memset(&args, 0, sizeof(args));
46
47 args.ucCRTC = radeon_crtc->crtc_id;
48
49 switch (radeon_crtc->rmx_type) {
50 case RMX_CENTER:
51 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
52 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
54 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 break;
56 case RMX_ASPECT:
57 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
58 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
59
60 if (a1 > a2) {
61 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
62 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 } else if (a2 > a1) {
64 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
65 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 }
67 break;
68 case RMX_FULL:
69 default:
70 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
71 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
72 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
73 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
74 break;
75 }
76 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
77 }
78
79 static void atombios_scaler_setup(struct drm_crtc *crtc)
80 {
81 struct drm_device *dev = crtc->dev;
82 struct radeon_device *rdev = dev->dev_private;
83 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
84 ENABLE_SCALER_PS_ALLOCATION args;
85 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
86
87 /* fixme - fill in enc_priv for atom dac */
88 enum radeon_tv_std tv_std = TV_STD_NTSC;
89 bool is_tv = false, is_cv = false;
90 struct drm_encoder *encoder;
91
92 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
93 return;
94
95 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
96 /* find tv std */
97 if (encoder->crtc == crtc) {
98 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
99 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
100 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
101 tv_std = tv_dac->tv_std;
102 is_tv = true;
103 }
104 }
105 }
106
107 memset(&args, 0, sizeof(args));
108
109 args.ucScaler = radeon_crtc->crtc_id;
110
111 if (is_tv) {
112 switch (tv_std) {
113 case TV_STD_NTSC:
114 default:
115 args.ucTVStandard = ATOM_TV_NTSC;
116 break;
117 case TV_STD_PAL:
118 args.ucTVStandard = ATOM_TV_PAL;
119 break;
120 case TV_STD_PAL_M:
121 args.ucTVStandard = ATOM_TV_PALM;
122 break;
123 case TV_STD_PAL_60:
124 args.ucTVStandard = ATOM_TV_PAL60;
125 break;
126 case TV_STD_NTSC_J:
127 args.ucTVStandard = ATOM_TV_NTSCJ;
128 break;
129 case TV_STD_SCART_PAL:
130 args.ucTVStandard = ATOM_TV_PAL; /* ??? */
131 break;
132 case TV_STD_SECAM:
133 args.ucTVStandard = ATOM_TV_SECAM;
134 break;
135 case TV_STD_PAL_CN:
136 args.ucTVStandard = ATOM_TV_PALCN;
137 break;
138 }
139 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
140 } else if (is_cv) {
141 args.ucTVStandard = ATOM_TV_CV;
142 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
143 } else {
144 switch (radeon_crtc->rmx_type) {
145 case RMX_FULL:
146 args.ucEnable = ATOM_SCALER_EXPANSION;
147 break;
148 case RMX_CENTER:
149 args.ucEnable = ATOM_SCALER_CENTER;
150 break;
151 case RMX_ASPECT:
152 args.ucEnable = ATOM_SCALER_EXPANSION;
153 break;
154 default:
155 if (ASIC_IS_AVIVO(rdev))
156 args.ucEnable = ATOM_SCALER_DISABLE;
157 else
158 args.ucEnable = ATOM_SCALER_CENTER;
159 break;
160 }
161 }
162 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
163 if ((is_tv || is_cv)
164 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
165 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
166 }
167 }
168
169 static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
170 {
171 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
172 struct drm_device *dev = crtc->dev;
173 struct radeon_device *rdev = dev->dev_private;
174 int index =
175 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
176 ENABLE_CRTC_PS_ALLOCATION args;
177
178 memset(&args, 0, sizeof(args));
179
180 args.ucCRTC = radeon_crtc->crtc_id;
181 args.ucEnable = lock;
182
183 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
184 }
185
186 static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
187 {
188 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
189 struct drm_device *dev = crtc->dev;
190 struct radeon_device *rdev = dev->dev_private;
191 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
192 ENABLE_CRTC_PS_ALLOCATION args;
193
194 memset(&args, 0, sizeof(args));
195
196 args.ucCRTC = radeon_crtc->crtc_id;
197 args.ucEnable = state;
198
199 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
200 }
201
202 static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
203 {
204 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
205 struct drm_device *dev = crtc->dev;
206 struct radeon_device *rdev = dev->dev_private;
207 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
208 ENABLE_CRTC_PS_ALLOCATION args;
209
210 memset(&args, 0, sizeof(args));
211
212 args.ucCRTC = radeon_crtc->crtc_id;
213 args.ucEnable = state;
214
215 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
216 }
217
218 static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
219 {
220 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
221 struct drm_device *dev = crtc->dev;
222 struct radeon_device *rdev = dev->dev_private;
223 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
224 BLANK_CRTC_PS_ALLOCATION args;
225
226 memset(&args, 0, sizeof(args));
227
228 args.ucCRTC = radeon_crtc->crtc_id;
229 args.ucBlanking = state;
230
231 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
232 }
233
234 static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
235 {
236 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
237 struct drm_device *dev = crtc->dev;
238 struct radeon_device *rdev = dev->dev_private;
239 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
240 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
241
242 memset(&args, 0, sizeof(args));
243
244 args.ucDispPipeId = radeon_crtc->crtc_id;
245 args.ucEnable = state;
246
247 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
248 }
249
250 void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
251 {
252 struct drm_device *dev = crtc->dev;
253 struct radeon_device *rdev = dev->dev_private;
254 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
255
256 switch (mode) {
257 case DRM_MODE_DPMS_ON:
258 radeon_crtc->enabled = true;
259 /* adjust pm to dpms changes BEFORE enabling crtcs */
260 radeon_pm_compute_clocks(rdev);
261 /* disable crtc pair power gating before programming */
262 if (ASIC_IS_DCE6(rdev) && !radeon_crtc->in_mode_set)
263 atombios_powergate_crtc(crtc, ATOM_DISABLE);
264 atombios_enable_crtc(crtc, ATOM_ENABLE);
265 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
266 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
267 atombios_blank_crtc(crtc, ATOM_DISABLE);
268 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
269 radeon_crtc_load_lut(crtc);
270 break;
271 case DRM_MODE_DPMS_STANDBY:
272 case DRM_MODE_DPMS_SUSPEND:
273 case DRM_MODE_DPMS_OFF:
274 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
275 if (radeon_crtc->enabled)
276 atombios_blank_crtc(crtc, ATOM_ENABLE);
277 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
278 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
279 atombios_enable_crtc(crtc, ATOM_DISABLE);
280 radeon_crtc->enabled = false;
281 /* power gating is per-pair */
282 if (ASIC_IS_DCE6(rdev) && !radeon_crtc->in_mode_set) {
283 struct drm_crtc *other_crtc;
284 struct radeon_crtc *other_radeon_crtc;
285 list_for_each_entry(other_crtc, &rdev->ddev->mode_config.crtc_list, head) {
286 other_radeon_crtc = to_radeon_crtc(other_crtc);
287 if (((radeon_crtc->crtc_id == 0) && (other_radeon_crtc->crtc_id == 1)) ||
288 ((radeon_crtc->crtc_id == 1) && (other_radeon_crtc->crtc_id == 0)) ||
289 ((radeon_crtc->crtc_id == 2) && (other_radeon_crtc->crtc_id == 3)) ||
290 ((radeon_crtc->crtc_id == 3) && (other_radeon_crtc->crtc_id == 2)) ||
291 ((radeon_crtc->crtc_id == 4) && (other_radeon_crtc->crtc_id == 5)) ||
292 ((radeon_crtc->crtc_id == 5) && (other_radeon_crtc->crtc_id == 4))) {
293 /* if both crtcs in the pair are off, enable power gating */
294 if (other_radeon_crtc->enabled == false)
295 atombios_powergate_crtc(crtc, ATOM_ENABLE);
296 break;
297 }
298 }
299 }
300 /* adjust pm to dpms changes AFTER disabling crtcs */
301 radeon_pm_compute_clocks(rdev);
302 break;
303 }
304 }
305
306 static void
307 atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
308 struct drm_display_mode *mode)
309 {
310 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
311 struct drm_device *dev = crtc->dev;
312 struct radeon_device *rdev = dev->dev_private;
313 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
314 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
315 u16 misc = 0;
316
317 memset(&args, 0, sizeof(args));
318 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
319 args.usH_Blanking_Time =
320 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
321 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
322 args.usV_Blanking_Time =
323 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
324 args.usH_SyncOffset =
325 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
326 args.usH_SyncWidth =
327 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
328 args.usV_SyncOffset =
329 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
330 args.usV_SyncWidth =
331 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
332 args.ucH_Border = radeon_crtc->h_border;
333 args.ucV_Border = radeon_crtc->v_border;
334
335 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
336 misc |= ATOM_VSYNC_POLARITY;
337 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
338 misc |= ATOM_HSYNC_POLARITY;
339 if (mode->flags & DRM_MODE_FLAG_CSYNC)
340 misc |= ATOM_COMPOSITESYNC;
341 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
342 misc |= ATOM_INTERLACE;
343 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
344 misc |= ATOM_DOUBLE_CLOCK_MODE;
345
346 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
347 args.ucCRTC = radeon_crtc->crtc_id;
348
349 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
350 }
351
352 static void atombios_crtc_set_timing(struct drm_crtc *crtc,
353 struct drm_display_mode *mode)
354 {
355 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
356 struct drm_device *dev = crtc->dev;
357 struct radeon_device *rdev = dev->dev_private;
358 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
359 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
360 u16 misc = 0;
361
362 memset(&args, 0, sizeof(args));
363 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
364 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
365 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
366 args.usH_SyncWidth =
367 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
368 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
369 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
370 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
371 args.usV_SyncWidth =
372 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
373
374 args.ucOverscanRight = radeon_crtc->h_border;
375 args.ucOverscanLeft = radeon_crtc->h_border;
376 args.ucOverscanBottom = radeon_crtc->v_border;
377 args.ucOverscanTop = radeon_crtc->v_border;
378
379 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
380 misc |= ATOM_VSYNC_POLARITY;
381 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
382 misc |= ATOM_HSYNC_POLARITY;
383 if (mode->flags & DRM_MODE_FLAG_CSYNC)
384 misc |= ATOM_COMPOSITESYNC;
385 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
386 misc |= ATOM_INTERLACE;
387 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
388 misc |= ATOM_DOUBLE_CLOCK_MODE;
389
390 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
391 args.ucCRTC = radeon_crtc->crtc_id;
392
393 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
394 }
395
396 static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
397 {
398 u32 ss_cntl;
399
400 if (ASIC_IS_DCE4(rdev)) {
401 switch (pll_id) {
402 case ATOM_PPLL1:
403 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
404 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
405 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
406 break;
407 case ATOM_PPLL2:
408 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
409 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
410 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
411 break;
412 case ATOM_DCPLL:
413 case ATOM_PPLL_INVALID:
414 return;
415 }
416 } else if (ASIC_IS_AVIVO(rdev)) {
417 switch (pll_id) {
418 case ATOM_PPLL1:
419 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
420 ss_cntl &= ~1;
421 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
422 break;
423 case ATOM_PPLL2:
424 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
425 ss_cntl &= ~1;
426 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
427 break;
428 case ATOM_DCPLL:
429 case ATOM_PPLL_INVALID:
430 return;
431 }
432 }
433 }
434
435
436 union atom_enable_ss {
437 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
438 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
439 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
440 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
441 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
442 };
443
444 static void atombios_crtc_program_ss(struct radeon_device *rdev,
445 int enable,
446 int pll_id,
447 int crtc_id,
448 struct radeon_atom_ss *ss)
449 {
450 unsigned i;
451 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
452 union atom_enable_ss args;
453
454 if (!enable) {
455 for (i = 0; i < 6; i++) {
456 if (rdev->mode_info.crtcs[i] &&
457 rdev->mode_info.crtcs[i]->enabled &&
458 i != crtc_id &&
459 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
460 /* one other crtc is using this pll don't turn
461 * off spread spectrum as it might turn off
462 * display on active crtc
463 */
464 return;
465 }
466 }
467 }
468
469 memset(&args, 0, sizeof(args));
470
471 if (ASIC_IS_DCE5(rdev)) {
472 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
473 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
474 switch (pll_id) {
475 case ATOM_PPLL1:
476 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
477 break;
478 case ATOM_PPLL2:
479 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
480 break;
481 case ATOM_DCPLL:
482 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
483 break;
484 case ATOM_PPLL_INVALID:
485 return;
486 }
487 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
488 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
489 args.v3.ucEnable = enable;
490 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE61(rdev))
491 args.v3.ucEnable = ATOM_DISABLE;
492 } else if (ASIC_IS_DCE4(rdev)) {
493 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
494 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
495 switch (pll_id) {
496 case ATOM_PPLL1:
497 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
498 break;
499 case ATOM_PPLL2:
500 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
501 break;
502 case ATOM_DCPLL:
503 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
504 break;
505 case ATOM_PPLL_INVALID:
506 return;
507 }
508 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
509 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
510 args.v2.ucEnable = enable;
511 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE41(rdev))
512 args.v2.ucEnable = ATOM_DISABLE;
513 } else if (ASIC_IS_DCE3(rdev)) {
514 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
515 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
516 args.v1.ucSpreadSpectrumStep = ss->step;
517 args.v1.ucSpreadSpectrumDelay = ss->delay;
518 args.v1.ucSpreadSpectrumRange = ss->range;
519 args.v1.ucPpll = pll_id;
520 args.v1.ucEnable = enable;
521 } else if (ASIC_IS_AVIVO(rdev)) {
522 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
523 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
524 atombios_disable_ss(rdev, pll_id);
525 return;
526 }
527 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
528 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
529 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
530 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
531 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
532 args.lvds_ss_2.ucEnable = enable;
533 } else {
534 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
535 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
536 atombios_disable_ss(rdev, pll_id);
537 return;
538 }
539 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
540 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
541 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
542 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
543 args.lvds_ss.ucEnable = enable;
544 }
545 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
546 }
547
548 union adjust_pixel_clock {
549 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
550 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
551 };
552
553 static u32 atombios_adjust_pll(struct drm_crtc *crtc,
554 struct drm_display_mode *mode,
555 struct radeon_pll *pll,
556 bool ss_enabled,
557 struct radeon_atom_ss *ss)
558 {
559 struct drm_device *dev = crtc->dev;
560 struct radeon_device *rdev = dev->dev_private;
561 struct drm_encoder *encoder = NULL;
562 struct radeon_encoder *radeon_encoder = NULL;
563 struct drm_connector *connector = NULL;
564 u32 adjusted_clock = mode->clock;
565 int encoder_mode = 0;
566 u32 dp_clock = mode->clock;
567 int bpc = 8;
568 bool is_duallink = false;
569
570 /* reset the pll flags */
571 pll->flags = 0;
572
573 if (ASIC_IS_AVIVO(rdev)) {
574 if ((rdev->family == CHIP_RS600) ||
575 (rdev->family == CHIP_RS690) ||
576 (rdev->family == CHIP_RS740))
577 pll->flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
578 RADEON_PLL_PREFER_CLOSEST_LOWER);
579
580 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
581 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
582 else
583 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
584
585 if (rdev->family < CHIP_RV770)
586 pll->flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
587 /* use frac fb div on APUs */
588 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev))
589 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
590 } else {
591 pll->flags |= RADEON_PLL_LEGACY;
592
593 if (mode->clock > 200000) /* range limits??? */
594 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
595 else
596 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
597 }
598
599 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
600 if (encoder->crtc == crtc) {
601 radeon_encoder = to_radeon_encoder(encoder);
602 connector = radeon_get_connector_for_encoder(encoder);
603 bpc = radeon_get_monitor_bpc(connector);
604 encoder_mode = atombios_get_encoder_mode(encoder);
605 is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
606 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
607 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
608 if (connector) {
609 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
610 struct radeon_connector_atom_dig *dig_connector =
611 radeon_connector->con_priv;
612
613 dp_clock = dig_connector->dp_clock;
614 }
615 }
616
617 /* use recommended ref_div for ss */
618 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
619 if (ss_enabled) {
620 if (ss->refdiv) {
621 pll->flags |= RADEON_PLL_USE_REF_DIV;
622 pll->reference_div = ss->refdiv;
623 if (ASIC_IS_AVIVO(rdev))
624 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
625 }
626 }
627 }
628
629 if (ASIC_IS_AVIVO(rdev)) {
630 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
631 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
632 adjusted_clock = mode->clock * 2;
633 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
634 pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
635 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
636 pll->flags |= RADEON_PLL_IS_LCD;
637 } else {
638 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
639 pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
640 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
641 pll->flags |= RADEON_PLL_USE_REF_DIV;
642 }
643 break;
644 }
645 }
646
647 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
648 * accordingly based on the encoder/transmitter to work around
649 * special hw requirements.
650 */
651 if (ASIC_IS_DCE3(rdev)) {
652 union adjust_pixel_clock args;
653 u8 frev, crev;
654 int index;
655
656 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
657 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
658 &crev))
659 return adjusted_clock;
660
661 memset(&args, 0, sizeof(args));
662
663 switch (frev) {
664 case 1:
665 switch (crev) {
666 case 1:
667 case 2:
668 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
669 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
670 args.v1.ucEncodeMode = encoder_mode;
671 if (ss_enabled && ss->percentage)
672 args.v1.ucConfig |=
673 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
674
675 atom_execute_table(rdev->mode_info.atom_context,
676 index, (uint32_t *)&args);
677 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
678 break;
679 case 3:
680 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
681 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
682 args.v3.sInput.ucEncodeMode = encoder_mode;
683 args.v3.sInput.ucDispPllConfig = 0;
684 if (ss_enabled && ss->percentage)
685 args.v3.sInput.ucDispPllConfig |=
686 DISPPLL_CONFIG_SS_ENABLE;
687 if (ENCODER_MODE_IS_DP(encoder_mode)) {
688 args.v3.sInput.ucDispPllConfig |=
689 DISPPLL_CONFIG_COHERENT_MODE;
690 /* 16200 or 27000 */
691 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
692 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
693 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
694 if (encoder_mode == ATOM_ENCODER_MODE_HDMI)
695 /* deep color support */
696 args.v3.sInput.usPixelClock =
697 cpu_to_le16((mode->clock * bpc / 8) / 10);
698 if (dig->coherent_mode)
699 args.v3.sInput.ucDispPllConfig |=
700 DISPPLL_CONFIG_COHERENT_MODE;
701 if (is_duallink)
702 args.v3.sInput.ucDispPllConfig |=
703 DISPPLL_CONFIG_DUAL_LINK;
704 }
705 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
706 ENCODER_OBJECT_ID_NONE)
707 args.v3.sInput.ucExtTransmitterID =
708 radeon_encoder_get_dp_bridge_encoder_id(encoder);
709 else
710 args.v3.sInput.ucExtTransmitterID = 0;
711
712 atom_execute_table(rdev->mode_info.atom_context,
713 index, (uint32_t *)&args);
714 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
715 if (args.v3.sOutput.ucRefDiv) {
716 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
717 pll->flags |= RADEON_PLL_USE_REF_DIV;
718 pll->reference_div = args.v3.sOutput.ucRefDiv;
719 }
720 if (args.v3.sOutput.ucPostDiv) {
721 pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
722 pll->flags |= RADEON_PLL_USE_POST_DIV;
723 pll->post_div = args.v3.sOutput.ucPostDiv;
724 }
725 break;
726 default:
727 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
728 return adjusted_clock;
729 }
730 break;
731 default:
732 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
733 return adjusted_clock;
734 }
735 }
736 return adjusted_clock;
737 }
738
739 union set_pixel_clock {
740 SET_PIXEL_CLOCK_PS_ALLOCATION base;
741 PIXEL_CLOCK_PARAMETERS v1;
742 PIXEL_CLOCK_PARAMETERS_V2 v2;
743 PIXEL_CLOCK_PARAMETERS_V3 v3;
744 PIXEL_CLOCK_PARAMETERS_V5 v5;
745 PIXEL_CLOCK_PARAMETERS_V6 v6;
746 };
747
748 /* on DCE5, make sure the voltage is high enough to support the
749 * required disp clk.
750 */
751 static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
752 u32 dispclk)
753 {
754 u8 frev, crev;
755 int index;
756 union set_pixel_clock args;
757
758 memset(&args, 0, sizeof(args));
759
760 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
761 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
762 &crev))
763 return;
764
765 switch (frev) {
766 case 1:
767 switch (crev) {
768 case 5:
769 /* if the default dcpll clock is specified,
770 * SetPixelClock provides the dividers
771 */
772 args.v5.ucCRTC = ATOM_CRTC_INVALID;
773 args.v5.usPixelClock = cpu_to_le16(dispclk);
774 args.v5.ucPpll = ATOM_DCPLL;
775 break;
776 case 6:
777 /* if the default dcpll clock is specified,
778 * SetPixelClock provides the dividers
779 */
780 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
781 if (ASIC_IS_DCE61(rdev))
782 args.v6.ucPpll = ATOM_EXT_PLL1;
783 else if (ASIC_IS_DCE6(rdev))
784 args.v6.ucPpll = ATOM_PPLL0;
785 else
786 args.v6.ucPpll = ATOM_DCPLL;
787 break;
788 default:
789 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
790 return;
791 }
792 break;
793 default:
794 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
795 return;
796 }
797 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
798 }
799
800 static void atombios_crtc_program_pll(struct drm_crtc *crtc,
801 u32 crtc_id,
802 int pll_id,
803 u32 encoder_mode,
804 u32 encoder_id,
805 u32 clock,
806 u32 ref_div,
807 u32 fb_div,
808 u32 frac_fb_div,
809 u32 post_div,
810 int bpc,
811 bool ss_enabled,
812 struct radeon_atom_ss *ss)
813 {
814 struct drm_device *dev = crtc->dev;
815 struct radeon_device *rdev = dev->dev_private;
816 u8 frev, crev;
817 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
818 union set_pixel_clock args;
819
820 memset(&args, 0, sizeof(args));
821
822 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
823 &crev))
824 return;
825
826 switch (frev) {
827 case 1:
828 switch (crev) {
829 case 1:
830 if (clock == ATOM_DISABLE)
831 return;
832 args.v1.usPixelClock = cpu_to_le16(clock / 10);
833 args.v1.usRefDiv = cpu_to_le16(ref_div);
834 args.v1.usFbDiv = cpu_to_le16(fb_div);
835 args.v1.ucFracFbDiv = frac_fb_div;
836 args.v1.ucPostDiv = post_div;
837 args.v1.ucPpll = pll_id;
838 args.v1.ucCRTC = crtc_id;
839 args.v1.ucRefDivSrc = 1;
840 break;
841 case 2:
842 args.v2.usPixelClock = cpu_to_le16(clock / 10);
843 args.v2.usRefDiv = cpu_to_le16(ref_div);
844 args.v2.usFbDiv = cpu_to_le16(fb_div);
845 args.v2.ucFracFbDiv = frac_fb_div;
846 args.v2.ucPostDiv = post_div;
847 args.v2.ucPpll = pll_id;
848 args.v2.ucCRTC = crtc_id;
849 args.v2.ucRefDivSrc = 1;
850 break;
851 case 3:
852 args.v3.usPixelClock = cpu_to_le16(clock / 10);
853 args.v3.usRefDiv = cpu_to_le16(ref_div);
854 args.v3.usFbDiv = cpu_to_le16(fb_div);
855 args.v3.ucFracFbDiv = frac_fb_div;
856 args.v3.ucPostDiv = post_div;
857 args.v3.ucPpll = pll_id;
858 args.v3.ucMiscInfo = (pll_id << 2);
859 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
860 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
861 args.v3.ucTransmitterId = encoder_id;
862 args.v3.ucEncoderMode = encoder_mode;
863 break;
864 case 5:
865 args.v5.ucCRTC = crtc_id;
866 args.v5.usPixelClock = cpu_to_le16(clock / 10);
867 args.v5.ucRefDiv = ref_div;
868 args.v5.usFbDiv = cpu_to_le16(fb_div);
869 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
870 args.v5.ucPostDiv = post_div;
871 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
872 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
873 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
874 switch (bpc) {
875 case 8:
876 default:
877 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
878 break;
879 case 10:
880 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
881 break;
882 }
883 args.v5.ucTransmitterID = encoder_id;
884 args.v5.ucEncoderMode = encoder_mode;
885 args.v5.ucPpll = pll_id;
886 break;
887 case 6:
888 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
889 args.v6.ucRefDiv = ref_div;
890 args.v6.usFbDiv = cpu_to_le16(fb_div);
891 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
892 args.v6.ucPostDiv = post_div;
893 args.v6.ucMiscInfo = 0; /* HDMI depth, etc. */
894 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
895 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
896 switch (bpc) {
897 case 8:
898 default:
899 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
900 break;
901 case 10:
902 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP;
903 break;
904 case 12:
905 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP;
906 break;
907 case 16:
908 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
909 break;
910 }
911 args.v6.ucTransmitterID = encoder_id;
912 args.v6.ucEncoderMode = encoder_mode;
913 args.v6.ucPpll = pll_id;
914 break;
915 default:
916 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
917 return;
918 }
919 break;
920 default:
921 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
922 return;
923 }
924
925 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
926 }
927
928 static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
929 {
930 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
931 struct drm_device *dev = crtc->dev;
932 struct radeon_device *rdev = dev->dev_private;
933 struct drm_encoder *encoder = NULL;
934 struct radeon_encoder *radeon_encoder = NULL;
935 u32 pll_clock = mode->clock;
936 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
937 struct radeon_pll *pll;
938 u32 adjusted_clock;
939 int encoder_mode = 0;
940 struct radeon_atom_ss ss;
941 bool ss_enabled = false;
942 int bpc = 8;
943
944 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
945 if (encoder->crtc == crtc) {
946 radeon_encoder = to_radeon_encoder(encoder);
947 encoder_mode = atombios_get_encoder_mode(encoder);
948 break;
949 }
950 }
951
952 if (!radeon_encoder)
953 return;
954
955 switch (radeon_crtc->pll_id) {
956 case ATOM_PPLL1:
957 pll = &rdev->clock.p1pll;
958 break;
959 case ATOM_PPLL2:
960 pll = &rdev->clock.p2pll;
961 break;
962 case ATOM_DCPLL:
963 case ATOM_PPLL_INVALID:
964 default:
965 pll = &rdev->clock.dcpll;
966 break;
967 }
968
969 if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
970 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
971 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
972 struct drm_connector *connector =
973 radeon_get_connector_for_encoder(encoder);
974 struct radeon_connector *radeon_connector =
975 to_radeon_connector(connector);
976 struct radeon_connector_atom_dig *dig_connector =
977 radeon_connector->con_priv;
978 int dp_clock;
979 bpc = radeon_get_monitor_bpc(connector);
980
981 switch (encoder_mode) {
982 case ATOM_ENCODER_MODE_DP_MST:
983 case ATOM_ENCODER_MODE_DP:
984 /* DP/eDP */
985 dp_clock = dig_connector->dp_clock / 10;
986 if (ASIC_IS_DCE4(rdev))
987 ss_enabled =
988 radeon_atombios_get_asic_ss_info(rdev, &ss,
989 ASIC_INTERNAL_SS_ON_DP,
990 dp_clock);
991 else {
992 if (dp_clock == 16200) {
993 ss_enabled =
994 radeon_atombios_get_ppll_ss_info(rdev, &ss,
995 ATOM_DP_SS_ID2);
996 if (!ss_enabled)
997 ss_enabled =
998 radeon_atombios_get_ppll_ss_info(rdev, &ss,
999 ATOM_DP_SS_ID1);
1000 } else
1001 ss_enabled =
1002 radeon_atombios_get_ppll_ss_info(rdev, &ss,
1003 ATOM_DP_SS_ID1);
1004 }
1005 break;
1006 case ATOM_ENCODER_MODE_LVDS:
1007 if (ASIC_IS_DCE4(rdev))
1008 ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
1009 dig->lcd_ss_id,
1010 mode->clock / 10);
1011 else
1012 ss_enabled = radeon_atombios_get_ppll_ss_info(rdev, &ss,
1013 dig->lcd_ss_id);
1014 break;
1015 case ATOM_ENCODER_MODE_DVI:
1016 if (ASIC_IS_DCE4(rdev))
1017 ss_enabled =
1018 radeon_atombios_get_asic_ss_info(rdev, &ss,
1019 ASIC_INTERNAL_SS_ON_TMDS,
1020 mode->clock / 10);
1021 break;
1022 case ATOM_ENCODER_MODE_HDMI:
1023 if (ASIC_IS_DCE4(rdev))
1024 ss_enabled =
1025 radeon_atombios_get_asic_ss_info(rdev, &ss,
1026 ASIC_INTERNAL_SS_ON_HDMI,
1027 mode->clock / 10);
1028 break;
1029 default:
1030 break;
1031 }
1032 }
1033
1034 /* adjust pixel clock as needed */
1035 adjusted_clock = atombios_adjust_pll(crtc, mode, pll, ss_enabled, &ss);
1036
1037 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1038 /* TV seems to prefer the legacy algo on some boards */
1039 radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1040 &ref_div, &post_div);
1041 else if (ASIC_IS_AVIVO(rdev))
1042 radeon_compute_pll_avivo(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1043 &ref_div, &post_div);
1044 else
1045 radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1046 &ref_div, &post_div);
1047
1048 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id, radeon_crtc->crtc_id, &ss);
1049
1050 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1051 encoder_mode, radeon_encoder->encoder_id, mode->clock,
1052 ref_div, fb_div, frac_fb_div, post_div, bpc, ss_enabled, &ss);
1053
1054 if (ss_enabled) {
1055 /* calculate ss amount and step size */
1056 if (ASIC_IS_DCE4(rdev)) {
1057 u32 step_size;
1058 u32 amount = (((fb_div * 10) + frac_fb_div) * ss.percentage) / 10000;
1059 ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1060 ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1061 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1062 if (ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1063 step_size = (4 * amount * ref_div * (ss.rate * 2048)) /
1064 (125 * 25 * pll->reference_freq / 100);
1065 else
1066 step_size = (2 * amount * ref_div * (ss.rate * 2048)) /
1067 (125 * 25 * pll->reference_freq / 100);
1068 ss.step = step_size;
1069 }
1070
1071 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id, radeon_crtc->crtc_id, &ss);
1072 }
1073 }
1074
1075 static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1076 struct drm_framebuffer *fb,
1077 int x, int y, int atomic)
1078 {
1079 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1080 struct drm_device *dev = crtc->dev;
1081 struct radeon_device *rdev = dev->dev_private;
1082 struct radeon_framebuffer *radeon_fb;
1083 struct drm_framebuffer *target_fb;
1084 struct drm_gem_object *obj;
1085 struct radeon_bo *rbo;
1086 uint64_t fb_location;
1087 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1088 unsigned bankw, bankh, mtaspect, tile_split;
1089 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1090 u32 tmp, viewport_w, viewport_h;
1091 int r;
1092
1093 /* no fb bound */
1094 if (!atomic && !crtc->fb) {
1095 DRM_DEBUG_KMS("No FB bound\n");
1096 return 0;
1097 }
1098
1099 if (atomic) {
1100 radeon_fb = to_radeon_framebuffer(fb);
1101 target_fb = fb;
1102 }
1103 else {
1104 radeon_fb = to_radeon_framebuffer(crtc->fb);
1105 target_fb = crtc->fb;
1106 }
1107
1108 /* If atomic, assume fb object is pinned & idle & fenced and
1109 * just update base pointers
1110 */
1111 obj = radeon_fb->obj;
1112 rbo = gem_to_radeon_bo(obj);
1113 r = radeon_bo_reserve(rbo, false);
1114 if (unlikely(r != 0))
1115 return r;
1116
1117 if (atomic)
1118 fb_location = radeon_bo_gpu_offset(rbo);
1119 else {
1120 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1121 if (unlikely(r != 0)) {
1122 radeon_bo_unreserve(rbo);
1123 return -EINVAL;
1124 }
1125 }
1126
1127 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1128 radeon_bo_unreserve(rbo);
1129
1130 switch (target_fb->bits_per_pixel) {
1131 case 8:
1132 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1133 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1134 break;
1135 case 15:
1136 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1137 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1138 break;
1139 case 16:
1140 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1141 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1142 #ifdef __BIG_ENDIAN
1143 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1144 #endif
1145 break;
1146 case 24:
1147 case 32:
1148 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1149 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1150 #ifdef __BIG_ENDIAN
1151 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1152 #endif
1153 break;
1154 default:
1155 DRM_ERROR("Unsupported screen depth %d\n",
1156 target_fb->bits_per_pixel);
1157 return -EINVAL;
1158 }
1159
1160 if (tiling_flags & RADEON_TILING_MACRO) {
1161 if (rdev->family >= CHIP_TAHITI)
1162 tmp = rdev->config.si.tile_config;
1163 else if (rdev->family >= CHIP_CAYMAN)
1164 tmp = rdev->config.cayman.tile_config;
1165 else
1166 tmp = rdev->config.evergreen.tile_config;
1167
1168 switch ((tmp & 0xf0) >> 4) {
1169 case 0: /* 4 banks */
1170 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1171 break;
1172 case 1: /* 8 banks */
1173 default:
1174 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1175 break;
1176 case 2: /* 16 banks */
1177 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1178 break;
1179 }
1180
1181 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1182
1183 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1184 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1185 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1186 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1187 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1188 } else if (tiling_flags & RADEON_TILING_MICRO)
1189 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1190
1191 if ((rdev->family == CHIP_TAHITI) ||
1192 (rdev->family == CHIP_PITCAIRN))
1193 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1194 else if (rdev->family == CHIP_VERDE)
1195 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1196
1197 switch (radeon_crtc->crtc_id) {
1198 case 0:
1199 WREG32(AVIVO_D1VGA_CONTROL, 0);
1200 break;
1201 case 1:
1202 WREG32(AVIVO_D2VGA_CONTROL, 0);
1203 break;
1204 case 2:
1205 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1206 break;
1207 case 3:
1208 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1209 break;
1210 case 4:
1211 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1212 break;
1213 case 5:
1214 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1215 break;
1216 default:
1217 break;
1218 }
1219
1220 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1221 upper_32_bits(fb_location));
1222 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1223 upper_32_bits(fb_location));
1224 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1225 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1226 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1227 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1228 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1229 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1230
1231 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1232 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1233 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1234 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1235 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1236 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1237
1238 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1239 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1240 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1241
1242 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1243 target_fb->height);
1244 x &= ~3;
1245 y &= ~1;
1246 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1247 (x << 16) | y);
1248 viewport_w = crtc->mode.hdisplay;
1249 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1250 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1251 (viewport_w << 16) | viewport_h);
1252
1253 /* pageflip setup */
1254 /* make sure flip is at vb rather than hb */
1255 tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1256 tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1257 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1258
1259 /* set pageflip to happen anywhere in vblank interval */
1260 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1261
1262 if (!atomic && fb && fb != crtc->fb) {
1263 radeon_fb = to_radeon_framebuffer(fb);
1264 rbo = gem_to_radeon_bo(radeon_fb->obj);
1265 r = radeon_bo_reserve(rbo, false);
1266 if (unlikely(r != 0))
1267 return r;
1268 radeon_bo_unpin(rbo);
1269 radeon_bo_unreserve(rbo);
1270 }
1271
1272 /* Bytes per pixel may have changed */
1273 radeon_bandwidth_update(rdev);
1274
1275 return 0;
1276 }
1277
1278 static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1279 struct drm_framebuffer *fb,
1280 int x, int y, int atomic)
1281 {
1282 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1283 struct drm_device *dev = crtc->dev;
1284 struct radeon_device *rdev = dev->dev_private;
1285 struct radeon_framebuffer *radeon_fb;
1286 struct drm_gem_object *obj;
1287 struct radeon_bo *rbo;
1288 struct drm_framebuffer *target_fb;
1289 uint64_t fb_location;
1290 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1291 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1292 u32 tmp, viewport_w, viewport_h;
1293 int r;
1294
1295 /* no fb bound */
1296 if (!atomic && !crtc->fb) {
1297 DRM_DEBUG_KMS("No FB bound\n");
1298 return 0;
1299 }
1300
1301 if (atomic) {
1302 radeon_fb = to_radeon_framebuffer(fb);
1303 target_fb = fb;
1304 }
1305 else {
1306 radeon_fb = to_radeon_framebuffer(crtc->fb);
1307 target_fb = crtc->fb;
1308 }
1309
1310 obj = radeon_fb->obj;
1311 rbo = gem_to_radeon_bo(obj);
1312 r = radeon_bo_reserve(rbo, false);
1313 if (unlikely(r != 0))
1314 return r;
1315
1316 /* If atomic, assume fb object is pinned & idle & fenced and
1317 * just update base pointers
1318 */
1319 if (atomic)
1320 fb_location = radeon_bo_gpu_offset(rbo);
1321 else {
1322 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1323 if (unlikely(r != 0)) {
1324 radeon_bo_unreserve(rbo);
1325 return -EINVAL;
1326 }
1327 }
1328 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1329 radeon_bo_unreserve(rbo);
1330
1331 switch (target_fb->bits_per_pixel) {
1332 case 8:
1333 fb_format =
1334 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1335 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1336 break;
1337 case 15:
1338 fb_format =
1339 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1340 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1341 break;
1342 case 16:
1343 fb_format =
1344 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1345 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1346 #ifdef __BIG_ENDIAN
1347 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1348 #endif
1349 break;
1350 case 24:
1351 case 32:
1352 fb_format =
1353 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1354 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1355 #ifdef __BIG_ENDIAN
1356 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1357 #endif
1358 break;
1359 default:
1360 DRM_ERROR("Unsupported screen depth %d\n",
1361 target_fb->bits_per_pixel);
1362 return -EINVAL;
1363 }
1364
1365 if (rdev->family >= CHIP_R600) {
1366 if (tiling_flags & RADEON_TILING_MACRO)
1367 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1368 else if (tiling_flags & RADEON_TILING_MICRO)
1369 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1370 } else {
1371 if (tiling_flags & RADEON_TILING_MACRO)
1372 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1373
1374 if (tiling_flags & RADEON_TILING_MICRO)
1375 fb_format |= AVIVO_D1GRPH_TILED;
1376 }
1377
1378 if (radeon_crtc->crtc_id == 0)
1379 WREG32(AVIVO_D1VGA_CONTROL, 0);
1380 else
1381 WREG32(AVIVO_D2VGA_CONTROL, 0);
1382
1383 if (rdev->family >= CHIP_RV770) {
1384 if (radeon_crtc->crtc_id) {
1385 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1386 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1387 } else {
1388 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1389 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1390 }
1391 }
1392 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1393 (u32) fb_location);
1394 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1395 radeon_crtc->crtc_offset, (u32) fb_location);
1396 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1397 if (rdev->family >= CHIP_R600)
1398 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1399
1400 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1401 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1402 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1403 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1404 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1405 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1406
1407 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1408 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1409 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1410
1411 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1412 target_fb->height);
1413 x &= ~3;
1414 y &= ~1;
1415 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1416 (x << 16) | y);
1417 viewport_w = crtc->mode.hdisplay;
1418 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1419 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1420 (viewport_w << 16) | viewport_h);
1421
1422 /* pageflip setup */
1423 /* make sure flip is at vb rather than hb */
1424 tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1425 tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1426 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1427
1428 /* set pageflip to happen anywhere in vblank interval */
1429 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1430
1431 if (!atomic && fb && fb != crtc->fb) {
1432 radeon_fb = to_radeon_framebuffer(fb);
1433 rbo = gem_to_radeon_bo(radeon_fb->obj);
1434 r = radeon_bo_reserve(rbo, false);
1435 if (unlikely(r != 0))
1436 return r;
1437 radeon_bo_unpin(rbo);
1438 radeon_bo_unreserve(rbo);
1439 }
1440
1441 /* Bytes per pixel may have changed */
1442 radeon_bandwidth_update(rdev);
1443
1444 return 0;
1445 }
1446
1447 int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1448 struct drm_framebuffer *old_fb)
1449 {
1450 struct drm_device *dev = crtc->dev;
1451 struct radeon_device *rdev = dev->dev_private;
1452
1453 if (ASIC_IS_DCE4(rdev))
1454 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1455 else if (ASIC_IS_AVIVO(rdev))
1456 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1457 else
1458 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1459 }
1460
1461 int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1462 struct drm_framebuffer *fb,
1463 int x, int y, enum mode_set_atomic state)
1464 {
1465 struct drm_device *dev = crtc->dev;
1466 struct radeon_device *rdev = dev->dev_private;
1467
1468 if (ASIC_IS_DCE4(rdev))
1469 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1470 else if (ASIC_IS_AVIVO(rdev))
1471 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1472 else
1473 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1474 }
1475
1476 /* properly set additional regs when using atombios */
1477 static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1478 {
1479 struct drm_device *dev = crtc->dev;
1480 struct radeon_device *rdev = dev->dev_private;
1481 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1482 u32 disp_merge_cntl;
1483
1484 switch (radeon_crtc->crtc_id) {
1485 case 0:
1486 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1487 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1488 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1489 break;
1490 case 1:
1491 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1492 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1493 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1494 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1495 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1496 break;
1497 }
1498 }
1499
1500 static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1501 {
1502 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1503 struct drm_device *dev = crtc->dev;
1504 struct radeon_device *rdev = dev->dev_private;
1505 struct drm_encoder *test_encoder;
1506 struct drm_crtc *test_crtc;
1507 uint32_t pll_in_use = 0;
1508
1509 if (ASIC_IS_DCE61(rdev)) {
1510 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1511 if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1512 struct radeon_encoder *test_radeon_encoder =
1513 to_radeon_encoder(test_encoder);
1514 struct radeon_encoder_atom_dig *dig =
1515 test_radeon_encoder->enc_priv;
1516
1517 if ((test_radeon_encoder->encoder_id ==
1518 ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1519 (dig->linkb == false)) /* UNIPHY A uses PPLL2 */
1520 return ATOM_PPLL2;
1521 }
1522 }
1523 /* UNIPHY B/C/D/E/F */
1524 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1525 struct radeon_crtc *radeon_test_crtc;
1526
1527 if (crtc == test_crtc)
1528 continue;
1529
1530 radeon_test_crtc = to_radeon_crtc(test_crtc);
1531 if ((radeon_test_crtc->pll_id == ATOM_PPLL0) ||
1532 (radeon_test_crtc->pll_id == ATOM_PPLL1))
1533 pll_in_use |= (1 << radeon_test_crtc->pll_id);
1534 }
1535 if (!(pll_in_use & 4))
1536 return ATOM_PPLL0;
1537 return ATOM_PPLL1;
1538 } else if (ASIC_IS_DCE4(rdev)) {
1539 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1540 if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1541 /* in DP mode, the DP ref clock can come from PPLL, DCPLL, or ext clock,
1542 * depending on the asic:
1543 * DCE4: PPLL or ext clock
1544 * DCE5: DCPLL or ext clock
1545 *
1546 * Setting ATOM_PPLL_INVALID will cause SetPixelClock to skip
1547 * PPLL/DCPLL programming and only program the DP DTO for the
1548 * crtc virtual pixel clock.
1549 */
1550 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_encoder))) {
1551 if (rdev->clock.dp_extclk)
1552 return ATOM_PPLL_INVALID;
1553 else if (ASIC_IS_DCE6(rdev))
1554 return ATOM_PPLL0;
1555 else if (ASIC_IS_DCE5(rdev))
1556 return ATOM_DCPLL;
1557 }
1558 }
1559 }
1560
1561 /* otherwise, pick one of the plls */
1562 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1563 struct radeon_crtc *radeon_test_crtc;
1564
1565 if (crtc == test_crtc)
1566 continue;
1567
1568 radeon_test_crtc = to_radeon_crtc(test_crtc);
1569 if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
1570 (radeon_test_crtc->pll_id <= ATOM_PPLL2))
1571 pll_in_use |= (1 << radeon_test_crtc->pll_id);
1572 }
1573 if (!(pll_in_use & 1))
1574 return ATOM_PPLL1;
1575 return ATOM_PPLL2;
1576 } else
1577 return radeon_crtc->crtc_id;
1578
1579 }
1580
1581 void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
1582 {
1583 /* always set DCPLL */
1584 if (ASIC_IS_DCE6(rdev))
1585 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
1586 else if (ASIC_IS_DCE4(rdev)) {
1587 struct radeon_atom_ss ss;
1588 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
1589 ASIC_INTERNAL_SS_ON_DCPLL,
1590 rdev->clock.default_dispclk);
1591 if (ss_enabled)
1592 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
1593 /* XXX: DCE5, make sure voltage, dispclk is high enough */
1594 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
1595 if (ss_enabled)
1596 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
1597 }
1598
1599 }
1600
1601 int atombios_crtc_mode_set(struct drm_crtc *crtc,
1602 struct drm_display_mode *mode,
1603 struct drm_display_mode *adjusted_mode,
1604 int x, int y, struct drm_framebuffer *old_fb)
1605 {
1606 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1607 struct drm_device *dev = crtc->dev;
1608 struct radeon_device *rdev = dev->dev_private;
1609 struct drm_encoder *encoder;
1610 bool is_tvcv = false;
1611
1612 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1613 /* find tv std */
1614 if (encoder->crtc == crtc) {
1615 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1616 if (radeon_encoder->active_device &
1617 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
1618 is_tvcv = true;
1619 }
1620 }
1621
1622 atombios_crtc_set_pll(crtc, adjusted_mode);
1623
1624 if (ASIC_IS_DCE4(rdev))
1625 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1626 else if (ASIC_IS_AVIVO(rdev)) {
1627 if (is_tvcv)
1628 atombios_crtc_set_timing(crtc, adjusted_mode);
1629 else
1630 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1631 } else {
1632 atombios_crtc_set_timing(crtc, adjusted_mode);
1633 if (radeon_crtc->crtc_id == 0)
1634 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1635 radeon_legacy_atom_fixup(crtc);
1636 }
1637 atombios_crtc_set_base(crtc, x, y, old_fb);
1638 atombios_overscan_setup(crtc, mode, adjusted_mode);
1639 atombios_scaler_setup(crtc);
1640 return 0;
1641 }
1642
1643 static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
1644 const struct drm_display_mode *mode,
1645 struct drm_display_mode *adjusted_mode)
1646 {
1647 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
1648 return false;
1649 return true;
1650 }
1651
1652 static void atombios_crtc_prepare(struct drm_crtc *crtc)
1653 {
1654 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1655 struct drm_device *dev = crtc->dev;
1656 struct radeon_device *rdev = dev->dev_private;
1657
1658 radeon_crtc->in_mode_set = true;
1659 /* pick pll */
1660 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1661
1662 /* disable crtc pair power gating before programming */
1663 if (ASIC_IS_DCE6(rdev))
1664 atombios_powergate_crtc(crtc, ATOM_DISABLE);
1665
1666 atombios_lock_crtc(crtc, ATOM_ENABLE);
1667 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1668 }
1669
1670 static void atombios_crtc_commit(struct drm_crtc *crtc)
1671 {
1672 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1673
1674 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
1675 atombios_lock_crtc(crtc, ATOM_DISABLE);
1676 radeon_crtc->in_mode_set = false;
1677 }
1678
1679 static void atombios_crtc_disable(struct drm_crtc *crtc)
1680 {
1681 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1682 struct drm_device *dev = crtc->dev;
1683 struct radeon_device *rdev = dev->dev_private;
1684 struct radeon_atom_ss ss;
1685
1686 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1687
1688 switch (radeon_crtc->pll_id) {
1689 case ATOM_PPLL1:
1690 case ATOM_PPLL2:
1691 /* disable the ppll */
1692 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1693 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1694 break;
1695 case ATOM_PPLL0:
1696 /* disable the ppll */
1697 if (ASIC_IS_DCE61(rdev))
1698 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1699 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1700 break;
1701 default:
1702 break;
1703 }
1704 radeon_crtc->pll_id = -1;
1705 }
1706
1707 static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
1708 .dpms = atombios_crtc_dpms,
1709 .mode_fixup = atombios_crtc_mode_fixup,
1710 .mode_set = atombios_crtc_mode_set,
1711 .mode_set_base = atombios_crtc_set_base,
1712 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
1713 .prepare = atombios_crtc_prepare,
1714 .commit = atombios_crtc_commit,
1715 .load_lut = radeon_crtc_load_lut,
1716 .disable = atombios_crtc_disable,
1717 };
1718
1719 void radeon_atombios_init_crtc(struct drm_device *dev,
1720 struct radeon_crtc *radeon_crtc)
1721 {
1722 struct radeon_device *rdev = dev->dev_private;
1723
1724 if (ASIC_IS_DCE4(rdev)) {
1725 switch (radeon_crtc->crtc_id) {
1726 case 0:
1727 default:
1728 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1729 break;
1730 case 1:
1731 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1732 break;
1733 case 2:
1734 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1735 break;
1736 case 3:
1737 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1738 break;
1739 case 4:
1740 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1741 break;
1742 case 5:
1743 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1744 break;
1745 }
1746 } else {
1747 if (radeon_crtc->crtc_id == 1)
1748 radeon_crtc->crtc_offset =
1749 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
1750 else
1751 radeon_crtc->crtc_offset = 0;
1752 }
1753 radeon_crtc->pll_id = -1;
1754 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
1755 }