]>
Commit | Line | Data |
---|---|---|
c6e14f40 ED |
1 | /* |
2 | * Copyright 2014 Advanced Micro Devices, Inc. | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | |
5 | * copy of this software and associated documentation files (the "Software"), | |
6 | * to deal in the Software without restriction, including without limitation | |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
8 | * and/or sell copies of the Software, and to permit persons to whom the | |
9 | * Software is furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice shall be included in | |
12 | * all copies or substantial portions of the Software. | |
13 | * | |
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
17 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
18 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
19 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
20 | * OTHER DEALINGS IN THE SOFTWARE. | |
21 | * | |
22 | */ | |
23 | #include "drmP.h" | |
24 | #include "amdgpu.h" | |
25 | #include "amdgpu_pm.h" | |
26 | #include "amdgpu_i2c.h" | |
27 | #include "atom.h" | |
c6e14f40 ED |
28 | #include "amdgpu_pll.h" |
29 | #include "amdgpu_connectors.h" | |
a1d37046 AD |
30 | #ifdef CONFIG_DRM_AMDGPU_SI |
31 | #include "dce_v6_0.h" | |
32 | #endif | |
83c9b025 ED |
33 | #ifdef CONFIG_DRM_AMDGPU_CIK |
34 | #include "dce_v8_0.h" | |
35 | #endif | |
36 | #include "dce_v10_0.h" | |
37 | #include "dce_v11_0.h" | |
46ac3622 | 38 | #include "dce_virtual.h" |
c6e14f40 | 39 | |
623fea18 AD |
40 | #define DCE_VIRTUAL_VBLANK_PERIOD 16666666 |
41 | ||
42 | ||
c6e14f40 ED |
43 | static void dce_virtual_set_display_funcs(struct amdgpu_device *adev); |
44 | static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev); | |
66264ba8 AD |
45 | static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev, |
46 | int index); | |
c6e14f40 | 47 | |
8e6de75b ED |
48 | /** |
49 | * dce_virtual_vblank_wait - vblank wait asic callback. | |
50 | * | |
51 | * @adev: amdgpu_device pointer | |
52 | * @crtc: crtc to wait for vblank on | |
53 | * | |
54 | * Wait for vblank on the requested crtc (evergreen+). | |
55 | */ | |
56 | static void dce_virtual_vblank_wait(struct amdgpu_device *adev, int crtc) | |
57 | { | |
58 | return; | |
59 | } | |
60 | ||
61 | static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc) | |
62 | { | |
041aa658 | 63 | return 0; |
8e6de75b ED |
64 | } |
65 | ||
66 | static void dce_virtual_page_flip(struct amdgpu_device *adev, | |
67 | int crtc_id, u64 crtc_base, bool async) | |
68 | { | |
69 | return; | |
70 | } | |
71 | ||
72 | static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc, | |
73 | u32 *vbl, u32 *position) | |
74 | { | |
8e6de75b ED |
75 | *vbl = 0; |
76 | *position = 0; | |
77 | ||
041aa658 | 78 | return -EINVAL; |
8e6de75b ED |
79 | } |
80 | ||
81 | static bool dce_virtual_hpd_sense(struct amdgpu_device *adev, | |
82 | enum amdgpu_hpd_id hpd) | |
83 | { | |
84 | return true; | |
85 | } | |
86 | ||
87 | static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev, | |
88 | enum amdgpu_hpd_id hpd) | |
89 | { | |
90 | return; | |
91 | } | |
92 | ||
93 | static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev) | |
94 | { | |
95 | return 0; | |
96 | } | |
97 | ||
4d446656 | 98 | static void dce_virtual_stop_mc_access(struct amdgpu_device *adev, |
8e6de75b ED |
99 | struct amdgpu_mode_mc_save *save) |
100 | { | |
83c9b025 | 101 | switch (adev->asic_type) { |
a1d37046 AD |
102 | #ifdef CONFIG_DRM_AMDGPU_SI |
103 | case CHIP_TAHITI: | |
104 | case CHIP_PITCAIRN: | |
105 | case CHIP_VERDE: | |
106 | case CHIP_OLAND: | |
107 | dce_v6_0_disable_dce(adev); | |
108 | break; | |
109 | #endif | |
8cb619d8 | 110 | #ifdef CONFIG_DRM_AMDGPU_CIK |
83c9b025 ED |
111 | case CHIP_BONAIRE: |
112 | case CHIP_HAWAII: | |
113 | case CHIP_KAVERI: | |
114 | case CHIP_KABINI: | |
115 | case CHIP_MULLINS: | |
83c9b025 | 116 | dce_v8_0_disable_dce(adev); |
83c9b025 | 117 | break; |
8cb619d8 | 118 | #endif |
83c9b025 ED |
119 | case CHIP_FIJI: |
120 | case CHIP_TONGA: | |
121 | dce_v10_0_disable_dce(adev); | |
122 | break; | |
123 | case CHIP_CARRIZO: | |
124 | case CHIP_STONEY: | |
125 | case CHIP_POLARIS11: | |
126 | case CHIP_POLARIS10: | |
127 | dce_v11_0_disable_dce(adev); | |
128 | break; | |
2579de43 | 129 | case CHIP_TOPAZ: |
a1d37046 AD |
130 | #ifdef CONFIG_DRM_AMDGPU_SI |
131 | case CHIP_HAINAN: | |
132 | #endif | |
2579de43 AD |
133 | /* no DCE */ |
134 | return; | |
83c9b025 | 135 | default: |
2579de43 | 136 | DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev->asic_type); |
83c9b025 ED |
137 | } |
138 | ||
8e6de75b ED |
139 | return; |
140 | } | |
4d446656 | 141 | static void dce_virtual_resume_mc_access(struct amdgpu_device *adev, |
8e6de75b ED |
142 | struct amdgpu_mode_mc_save *save) |
143 | { | |
144 | return; | |
145 | } | |
146 | ||
4d446656 | 147 | static void dce_virtual_set_vga_render_state(struct amdgpu_device *adev, |
8e6de75b ED |
148 | bool render) |
149 | { | |
150 | return; | |
151 | } | |
152 | ||
153 | /** | |
154 | * dce_virtual_bandwidth_update - program display watermarks | |
155 | * | |
156 | * @adev: amdgpu_device pointer | |
157 | * | |
158 | * Calculate and program the display watermarks and line | |
159 | * buffer allocation (CIK). | |
160 | */ | |
161 | static void dce_virtual_bandwidth_update(struct amdgpu_device *adev) | |
162 | { | |
163 | return; | |
164 | } | |
165 | ||
0d43f3bc ED |
166 | static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, |
167 | u16 *green, u16 *blue, uint32_t size) | |
168 | { | |
169 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | |
170 | int i; | |
171 | ||
172 | /* userspace palettes are always correct as is */ | |
173 | for (i = 0; i < size; i++) { | |
174 | amdgpu_crtc->lut_r[i] = red[i] >> 6; | |
175 | amdgpu_crtc->lut_g[i] = green[i] >> 6; | |
176 | amdgpu_crtc->lut_b[i] = blue[i] >> 6; | |
177 | } | |
178 | ||
179 | return 0; | |
180 | } | |
181 | ||
182 | static void dce_virtual_crtc_destroy(struct drm_crtc *crtc) | |
183 | { | |
184 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | |
185 | ||
186 | drm_crtc_cleanup(crtc); | |
187 | kfree(amdgpu_crtc); | |
188 | } | |
189 | ||
c6e14f40 ED |
190 | static const struct drm_crtc_funcs dce_virtual_crtc_funcs = { |
191 | .cursor_set2 = NULL, | |
192 | .cursor_move = NULL, | |
0d43f3bc ED |
193 | .gamma_set = dce_virtual_crtc_gamma_set, |
194 | .set_config = amdgpu_crtc_set_config, | |
195 | .destroy = dce_virtual_crtc_destroy, | |
325cbba1 | 196 | .page_flip_target = amdgpu_crtc_page_flip_target, |
c6e14f40 ED |
197 | }; |
198 | ||
f1f5ef92 ED |
199 | static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode) |
200 | { | |
201 | struct drm_device *dev = crtc->dev; | |
202 | struct amdgpu_device *adev = dev->dev_private; | |
203 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | |
204 | unsigned type; | |
205 | ||
206 | switch (mode) { | |
207 | case DRM_MODE_DPMS_ON: | |
208 | amdgpu_crtc->enabled = true; | |
82b9f817 | 209 | /* Make sure VBLANK interrupts are still enabled */ |
f1f5ef92 ED |
210 | type = amdgpu_crtc_idx_to_irq_type(adev, amdgpu_crtc->crtc_id); |
211 | amdgpu_irq_update(adev, &adev->crtc_irq, type); | |
2d1e331f | 212 | drm_crtc_vblank_on(crtc); |
f1f5ef92 ED |
213 | break; |
214 | case DRM_MODE_DPMS_STANDBY: | |
215 | case DRM_MODE_DPMS_SUSPEND: | |
216 | case DRM_MODE_DPMS_OFF: | |
2d1e331f | 217 | drm_crtc_vblank_off(crtc); |
f1f5ef92 ED |
218 | amdgpu_crtc->enabled = false; |
219 | break; | |
220 | } | |
221 | } | |
222 | ||
223 | ||
224 | static void dce_virtual_crtc_prepare(struct drm_crtc *crtc) | |
225 | { | |
226 | dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); | |
227 | } | |
228 | ||
229 | static void dce_virtual_crtc_commit(struct drm_crtc *crtc) | |
230 | { | |
231 | dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON); | |
232 | } | |
233 | ||
234 | static void dce_virtual_crtc_disable(struct drm_crtc *crtc) | |
235 | { | |
236 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | |
237 | ||
238 | dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); | |
239 | if (crtc->primary->fb) { | |
240 | int r; | |
241 | struct amdgpu_framebuffer *amdgpu_fb; | |
765e7fbf | 242 | struct amdgpu_bo *abo; |
f1f5ef92 ED |
243 | |
244 | amdgpu_fb = to_amdgpu_framebuffer(crtc->primary->fb); | |
765e7fbf CK |
245 | abo = gem_to_amdgpu_bo(amdgpu_fb->obj); |
246 | r = amdgpu_bo_reserve(abo, false); | |
f1f5ef92 | 247 | if (unlikely(r)) |
765e7fbf | 248 | DRM_ERROR("failed to reserve abo before unpin\n"); |
f1f5ef92 | 249 | else { |
765e7fbf CK |
250 | amdgpu_bo_unpin(abo); |
251 | amdgpu_bo_unreserve(abo); | |
f1f5ef92 ED |
252 | } |
253 | } | |
254 | ||
255 | amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; | |
256 | amdgpu_crtc->encoder = NULL; | |
257 | amdgpu_crtc->connector = NULL; | |
258 | } | |
259 | ||
260 | static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc, | |
261 | struct drm_display_mode *mode, | |
262 | struct drm_display_mode *adjusted_mode, | |
263 | int x, int y, struct drm_framebuffer *old_fb) | |
264 | { | |
265 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | |
266 | ||
267 | /* update the hw version fpr dpm */ | |
268 | amdgpu_crtc->hw_mode = *adjusted_mode; | |
269 | ||
270 | return 0; | |
271 | } | |
272 | ||
273 | static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc, | |
274 | const struct drm_display_mode *mode, | |
275 | struct drm_display_mode *adjusted_mode) | |
276 | { | |
f1f5ef92 ED |
277 | return true; |
278 | } | |
279 | ||
280 | ||
281 | static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y, | |
282 | struct drm_framebuffer *old_fb) | |
283 | { | |
284 | return 0; | |
285 | } | |
286 | ||
287 | static void dce_virtual_crtc_load_lut(struct drm_crtc *crtc) | |
288 | { | |
289 | return; | |
290 | } | |
291 | ||
292 | static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc, | |
293 | struct drm_framebuffer *fb, | |
294 | int x, int y, enum mode_set_atomic state) | |
295 | { | |
296 | return 0; | |
297 | } | |
298 | ||
c6e14f40 | 299 | static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = { |
f1f5ef92 ED |
300 | .dpms = dce_virtual_crtc_dpms, |
301 | .mode_fixup = dce_virtual_crtc_mode_fixup, | |
302 | .mode_set = dce_virtual_crtc_mode_set, | |
303 | .mode_set_base = dce_virtual_crtc_set_base, | |
304 | .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic, | |
305 | .prepare = dce_virtual_crtc_prepare, | |
306 | .commit = dce_virtual_crtc_commit, | |
307 | .load_lut = dce_virtual_crtc_load_lut, | |
308 | .disable = dce_virtual_crtc_disable, | |
c6e14f40 ED |
309 | }; |
310 | ||
311 | static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index) | |
312 | { | |
313 | struct amdgpu_crtc *amdgpu_crtc; | |
314 | int i; | |
315 | ||
316 | amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) + | |
317 | (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL); | |
318 | if (amdgpu_crtc == NULL) | |
319 | return -ENOMEM; | |
320 | ||
321 | drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs); | |
322 | ||
323 | drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256); | |
324 | amdgpu_crtc->crtc_id = index; | |
325 | adev->mode_info.crtcs[index] = amdgpu_crtc; | |
326 | ||
327 | for (i = 0; i < 256; i++) { | |
328 | amdgpu_crtc->lut_r[i] = i << 2; | |
329 | amdgpu_crtc->lut_g[i] = i << 2; | |
330 | amdgpu_crtc->lut_b[i] = i << 2; | |
331 | } | |
332 | ||
333 | amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; | |
334 | amdgpu_crtc->encoder = NULL; | |
335 | amdgpu_crtc->connector = NULL; | |
0f66356d | 336 | amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE; |
c6e14f40 ED |
337 | drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs); |
338 | ||
339 | return 0; | |
340 | } | |
341 | ||
342 | static int dce_virtual_early_init(void *handle) | |
343 | { | |
344 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; | |
345 | ||
346 | dce_virtual_set_display_funcs(adev); | |
347 | dce_virtual_set_irq_funcs(adev); | |
348 | ||
c6e14f40 ED |
349 | adev->mode_info.num_hpd = 1; |
350 | adev->mode_info.num_dig = 1; | |
351 | return 0; | |
352 | } | |
353 | ||
66264ba8 AD |
354 | static struct drm_encoder * |
355 | dce_virtual_encoder(struct drm_connector *connector) | |
c6e14f40 | 356 | { |
66264ba8 AD |
357 | int enc_id = connector->encoder_ids[0]; |
358 | struct drm_encoder *encoder; | |
359 | int i; | |
360 | ||
361 | for (i = 0; i < DRM_CONNECTOR_MAX_ENCODER; i++) { | |
362 | if (connector->encoder_ids[i] == 0) | |
363 | break; | |
364 | ||
365 | encoder = drm_encoder_find(connector->dev, connector->encoder_ids[i]); | |
366 | if (!encoder) | |
367 | continue; | |
c6e14f40 | 368 | |
66264ba8 AD |
369 | if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL) |
370 | return encoder; | |
371 | } | |
c6e14f40 | 372 | |
66264ba8 AD |
373 | /* pick the first one */ |
374 | if (enc_id) | |
375 | return drm_encoder_find(connector->dev, enc_id); | |
376 | return NULL; | |
377 | } | |
378 | ||
379 | static int dce_virtual_get_modes(struct drm_connector *connector) | |
380 | { | |
381 | struct drm_device *dev = connector->dev; | |
382 | struct drm_display_mode *mode = NULL; | |
383 | unsigned i; | |
384 | static const struct mode_size { | |
385 | int w; | |
386 | int h; | |
387 | } common_modes[17] = { | |
388 | { 640, 480}, | |
389 | { 720, 480}, | |
390 | { 800, 600}, | |
391 | { 848, 480}, | |
392 | {1024, 768}, | |
393 | {1152, 768}, | |
394 | {1280, 720}, | |
395 | {1280, 800}, | |
396 | {1280, 854}, | |
397 | {1280, 960}, | |
398 | {1280, 1024}, | |
399 | {1440, 900}, | |
400 | {1400, 1050}, | |
401 | {1680, 1050}, | |
402 | {1600, 1200}, | |
403 | {1920, 1080}, | |
404 | {1920, 1200} | |
405 | }; | |
406 | ||
407 | for (i = 0; i < 17; i++) { | |
408 | mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false); | |
409 | drm_mode_probed_add(connector, mode); | |
410 | } | |
c6e14f40 | 411 | |
66264ba8 AD |
412 | return 0; |
413 | } | |
c6e14f40 | 414 | |
66264ba8 AD |
415 | static int dce_virtual_mode_valid(struct drm_connector *connector, |
416 | struct drm_display_mode *mode) | |
417 | { | |
418 | return MODE_OK; | |
419 | } | |
c6e14f40 | 420 | |
66264ba8 AD |
421 | static int |
422 | dce_virtual_dpms(struct drm_connector *connector, int mode) | |
423 | { | |
424 | return 0; | |
c6e14f40 ED |
425 | } |
426 | ||
66264ba8 AD |
427 | static int |
428 | dce_virtual_set_property(struct drm_connector *connector, | |
429 | struct drm_property *property, | |
430 | uint64_t val) | |
431 | { | |
432 | return 0; | |
433 | } | |
434 | ||
435 | static void dce_virtual_destroy(struct drm_connector *connector) | |
436 | { | |
437 | drm_connector_unregister(connector); | |
438 | drm_connector_cleanup(connector); | |
439 | kfree(connector); | |
440 | } | |
441 | ||
442 | static void dce_virtual_force(struct drm_connector *connector) | |
443 | { | |
444 | return; | |
445 | } | |
446 | ||
447 | static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = { | |
448 | .get_modes = dce_virtual_get_modes, | |
449 | .mode_valid = dce_virtual_mode_valid, | |
450 | .best_encoder = dce_virtual_encoder, | |
451 | }; | |
452 | ||
453 | static const struct drm_connector_funcs dce_virtual_connector_funcs = { | |
454 | .dpms = dce_virtual_dpms, | |
66264ba8 AD |
455 | .fill_modes = drm_helper_probe_single_connector_modes, |
456 | .set_property = dce_virtual_set_property, | |
457 | .destroy = dce_virtual_destroy, | |
458 | .force = dce_virtual_force, | |
459 | }; | |
460 | ||
c6e14f40 ED |
461 | static int dce_virtual_sw_init(void *handle) |
462 | { | |
463 | int r, i; | |
464 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; | |
465 | ||
466 | r = amdgpu_irq_add_id(adev, 229, &adev->crtc_irq); | |
467 | if (r) | |
468 | return r; | |
469 | ||
041aa658 ED |
470 | adev->ddev->max_vblank_count = 0; |
471 | ||
c6e14f40 ED |
472 | adev->ddev->mode_config.funcs = &amdgpu_mode_funcs; |
473 | ||
474 | adev->ddev->mode_config.max_width = 16384; | |
475 | adev->ddev->mode_config.max_height = 16384; | |
476 | ||
477 | adev->ddev->mode_config.preferred_depth = 24; | |
478 | adev->ddev->mode_config.prefer_shadow = 1; | |
479 | ||
480 | adev->ddev->mode_config.fb_base = adev->mc.aper_base; | |
481 | ||
482 | r = amdgpu_modeset_create_props(adev); | |
483 | if (r) | |
484 | return r; | |
485 | ||
486 | adev->ddev->mode_config.max_width = 16384; | |
487 | adev->ddev->mode_config.max_height = 16384; | |
488 | ||
66264ba8 | 489 | /* allocate crtcs, encoders, connectors */ |
c6e14f40 ED |
490 | for (i = 0; i < adev->mode_info.num_crtc; i++) { |
491 | r = dce_virtual_crtc_init(adev, i); | |
492 | if (r) | |
493 | return r; | |
66264ba8 AD |
494 | r = dce_virtual_connector_encoder_init(adev, i); |
495 | if (r) | |
496 | return r; | |
c6e14f40 ED |
497 | } |
498 | ||
c6e14f40 ED |
499 | drm_kms_helper_poll_init(adev->ddev); |
500 | ||
501 | adev->mode_info.mode_config_initialized = true; | |
502 | return 0; | |
503 | } | |
504 | ||
505 | static int dce_virtual_sw_fini(void *handle) | |
506 | { | |
507 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; | |
508 | ||
509 | kfree(adev->mode_info.bios_hardcoded_edid); | |
510 | ||
511 | drm_kms_helper_poll_fini(adev->ddev); | |
512 | ||
513 | drm_mode_config_cleanup(adev->ddev); | |
514 | adev->mode_info.mode_config_initialized = false; | |
515 | return 0; | |
516 | } | |
517 | ||
518 | static int dce_virtual_hw_init(void *handle) | |
519 | { | |
520 | return 0; | |
521 | } | |
522 | ||
523 | static int dce_virtual_hw_fini(void *handle) | |
524 | { | |
525 | return 0; | |
526 | } | |
527 | ||
528 | static int dce_virtual_suspend(void *handle) | |
529 | { | |
530 | return dce_virtual_hw_fini(handle); | |
531 | } | |
532 | ||
533 | static int dce_virtual_resume(void *handle) | |
534 | { | |
d912adef | 535 | return dce_virtual_hw_init(handle); |
c6e14f40 ED |
536 | } |
537 | ||
538 | static bool dce_virtual_is_idle(void *handle) | |
539 | { | |
540 | return true; | |
541 | } | |
542 | ||
543 | static int dce_virtual_wait_for_idle(void *handle) | |
544 | { | |
545 | return 0; | |
546 | } | |
547 | ||
548 | static int dce_virtual_soft_reset(void *handle) | |
549 | { | |
550 | return 0; | |
551 | } | |
552 | ||
553 | static int dce_virtual_set_clockgating_state(void *handle, | |
554 | enum amd_clockgating_state state) | |
555 | { | |
556 | return 0; | |
557 | } | |
558 | ||
559 | static int dce_virtual_set_powergating_state(void *handle, | |
560 | enum amd_powergating_state state) | |
561 | { | |
562 | return 0; | |
563 | } | |
564 | ||
a1255107 | 565 | static const struct amd_ip_funcs dce_virtual_ip_funcs = { |
c6e14f40 ED |
566 | .name = "dce_virtual", |
567 | .early_init = dce_virtual_early_init, | |
568 | .late_init = NULL, | |
569 | .sw_init = dce_virtual_sw_init, | |
570 | .sw_fini = dce_virtual_sw_fini, | |
571 | .hw_init = dce_virtual_hw_init, | |
572 | .hw_fini = dce_virtual_hw_fini, | |
573 | .suspend = dce_virtual_suspend, | |
574 | .resume = dce_virtual_resume, | |
575 | .is_idle = dce_virtual_is_idle, | |
576 | .wait_for_idle = dce_virtual_wait_for_idle, | |
577 | .soft_reset = dce_virtual_soft_reset, | |
578 | .set_clockgating_state = dce_virtual_set_clockgating_state, | |
579 | .set_powergating_state = dce_virtual_set_powergating_state, | |
580 | }; | |
581 | ||
8e6de75b ED |
582 | /* these are handled by the primary encoders */ |
583 | static void dce_virtual_encoder_prepare(struct drm_encoder *encoder) | |
584 | { | |
585 | return; | |
586 | } | |
587 | ||
588 | static void dce_virtual_encoder_commit(struct drm_encoder *encoder) | |
589 | { | |
590 | return; | |
591 | } | |
592 | ||
593 | static void | |
594 | dce_virtual_encoder_mode_set(struct drm_encoder *encoder, | |
66264ba8 AD |
595 | struct drm_display_mode *mode, |
596 | struct drm_display_mode *adjusted_mode) | |
8e6de75b ED |
597 | { |
598 | return; | |
599 | } | |
600 | ||
601 | static void dce_virtual_encoder_disable(struct drm_encoder *encoder) | |
602 | { | |
603 | return; | |
604 | } | |
605 | ||
606 | static void | |
607 | dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode) | |
608 | { | |
609 | return; | |
610 | } | |
611 | ||
612 | static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder, | |
613 | const struct drm_display_mode *mode, | |
614 | struct drm_display_mode *adjusted_mode) | |
615 | { | |
8e6de75b ED |
616 | return true; |
617 | } | |
618 | ||
619 | static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = { | |
620 | .dpms = dce_virtual_encoder_dpms, | |
621 | .mode_fixup = dce_virtual_encoder_mode_fixup, | |
622 | .prepare = dce_virtual_encoder_prepare, | |
623 | .mode_set = dce_virtual_encoder_mode_set, | |
624 | .commit = dce_virtual_encoder_commit, | |
625 | .disable = dce_virtual_encoder_disable, | |
626 | }; | |
627 | ||
628 | static void dce_virtual_encoder_destroy(struct drm_encoder *encoder) | |
629 | { | |
630 | struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); | |
631 | ||
632 | kfree(amdgpu_encoder->enc_priv); | |
633 | drm_encoder_cleanup(encoder); | |
634 | kfree(amdgpu_encoder); | |
635 | } | |
636 | ||
637 | static const struct drm_encoder_funcs dce_virtual_encoder_funcs = { | |
638 | .destroy = dce_virtual_encoder_destroy, | |
639 | }; | |
640 | ||
66264ba8 AD |
641 | static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev, |
642 | int index) | |
8e6de75b | 643 | { |
8e6de75b | 644 | struct drm_encoder *encoder; |
66264ba8 | 645 | struct drm_connector *connector; |
8e6de75b | 646 | |
66264ba8 AD |
647 | /* add a new encoder */ |
648 | encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL); | |
649 | if (!encoder) | |
650 | return -ENOMEM; | |
651 | encoder->possible_crtcs = 1 << index; | |
652 | drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs, | |
653 | DRM_MODE_ENCODER_VIRTUAL, NULL); | |
654 | drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs); | |
655 | ||
656 | connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL); | |
657 | if (!connector) { | |
658 | kfree(encoder); | |
659 | return -ENOMEM; | |
8e6de75b ED |
660 | } |
661 | ||
66264ba8 AD |
662 | /* add a new connector */ |
663 | drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs, | |
664 | DRM_MODE_CONNECTOR_VIRTUAL); | |
665 | drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs); | |
666 | connector->display_info.subpixel_order = SubPixelHorizontalRGB; | |
667 | connector->interlace_allowed = false; | |
668 | connector->doublescan_allowed = false; | |
669 | drm_connector_register(connector); | |
8e6de75b | 670 | |
66264ba8 AD |
671 | /* link them */ |
672 | drm_mode_connector_attach_encoder(connector, encoder); | |
673 | ||
674 | return 0; | |
8e6de75b ED |
675 | } |
676 | ||
c6e14f40 | 677 | static const struct amdgpu_display_funcs dce_virtual_display_funcs = { |
8e6de75b ED |
678 | .set_vga_render_state = &dce_virtual_set_vga_render_state, |
679 | .bandwidth_update = &dce_virtual_bandwidth_update, | |
680 | .vblank_get_counter = &dce_virtual_vblank_get_counter, | |
681 | .vblank_wait = &dce_virtual_vblank_wait, | |
c6e14f40 ED |
682 | .backlight_set_level = NULL, |
683 | .backlight_get_level = NULL, | |
8e6de75b ED |
684 | .hpd_sense = &dce_virtual_hpd_sense, |
685 | .hpd_set_polarity = &dce_virtual_hpd_set_polarity, | |
686 | .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg, | |
687 | .page_flip = &dce_virtual_page_flip, | |
688 | .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos, | |
66264ba8 AD |
689 | .add_encoder = NULL, |
690 | .add_connector = NULL, | |
8e6de75b ED |
691 | .stop_mc_access = &dce_virtual_stop_mc_access, |
692 | .resume_mc_access = &dce_virtual_resume_mc_access, | |
c6e14f40 ED |
693 | }; |
694 | ||
695 | static void dce_virtual_set_display_funcs(struct amdgpu_device *adev) | |
696 | { | |
697 | if (adev->mode_info.funcs == NULL) | |
698 | adev->mode_info.funcs = &dce_virtual_display_funcs; | |
699 | } | |
700 | ||
9405e47d AD |
701 | static int dce_virtual_pageflip(struct amdgpu_device *adev, |
702 | unsigned crtc_id) | |
703 | { | |
704 | unsigned long flags; | |
705 | struct amdgpu_crtc *amdgpu_crtc; | |
706 | struct amdgpu_flip_work *works; | |
707 | ||
708 | amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; | |
709 | ||
710 | if (crtc_id >= adev->mode_info.num_crtc) { | |
711 | DRM_ERROR("invalid pageflip crtc %d\n", crtc_id); | |
712 | return -EINVAL; | |
713 | } | |
714 | ||
715 | /* IRQ could occur when in initial stage */ | |
716 | if (amdgpu_crtc == NULL) | |
717 | return 0; | |
718 | ||
719 | spin_lock_irqsave(&adev->ddev->event_lock, flags); | |
720 | works = amdgpu_crtc->pflip_works; | |
721 | if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) { | |
722 | DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != " | |
723 | "AMDGPU_FLIP_SUBMITTED(%d)\n", | |
724 | amdgpu_crtc->pflip_status, | |
725 | AMDGPU_FLIP_SUBMITTED); | |
726 | spin_unlock_irqrestore(&adev->ddev->event_lock, flags); | |
727 | return 0; | |
728 | } | |
729 | ||
730 | /* page flip completed. clean up */ | |
731 | amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE; | |
732 | amdgpu_crtc->pflip_works = NULL; | |
733 | ||
734 | /* wakeup usersapce */ | |
735 | if (works->event) | |
736 | drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event); | |
737 | ||
738 | spin_unlock_irqrestore(&adev->ddev->event_lock, flags); | |
739 | ||
740 | drm_crtc_vblank_put(&amdgpu_crtc->base); | |
741 | schedule_work(&works->unpin_work); | |
742 | ||
743 | return 0; | |
744 | } | |
745 | ||
46ac3622 ED |
746 | static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer) |
747 | { | |
0f66356d ED |
748 | struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer, |
749 | struct amdgpu_crtc, vblank_timer); | |
750 | struct drm_device *ddev = amdgpu_crtc->base.dev; | |
751 | struct amdgpu_device *adev = ddev->dev_private; | |
9405e47d | 752 | |
0f66356d ED |
753 | drm_handle_vblank(ddev, amdgpu_crtc->crtc_id); |
754 | dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id); | |
8b0e1953 | 755 | hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD, |
9405e47d AD |
756 | HRTIMER_MODE_REL); |
757 | ||
46ac3622 ED |
758 | return HRTIMER_NORESTART; |
759 | } | |
760 | ||
e13273d4 | 761 | static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev, |
82b9f817 AD |
762 | int crtc, |
763 | enum amdgpu_interrupt_state state) | |
e13273d4 ED |
764 | { |
765 | if (crtc >= adev->mode_info.num_crtc) { | |
766 | DRM_DEBUG("invalid crtc %d\n", crtc); | |
767 | return; | |
768 | } | |
46ac3622 | 769 | |
0f66356d | 770 | if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) { |
46ac3622 | 771 | DRM_DEBUG("Enable software vsync timer\n"); |
0f66356d ED |
772 | hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer, |
773 | CLOCK_MONOTONIC, HRTIMER_MODE_REL); | |
774 | hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer, | |
8b0e1953 | 775 | DCE_VIRTUAL_VBLANK_PERIOD); |
0f66356d ED |
776 | adev->mode_info.crtcs[crtc]->vblank_timer.function = |
777 | dce_virtual_vblank_timer_handle; | |
778 | hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer, | |
8b0e1953 | 779 | DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL); |
0f66356d | 780 | } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) { |
46ac3622 | 781 | DRM_DEBUG("Disable software vsync timer\n"); |
0f66356d | 782 | hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer); |
46ac3622 ED |
783 | } |
784 | ||
0f66356d | 785 | adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state; |
46ac3622 | 786 | DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state); |
e13273d4 ED |
787 | } |
788 | ||
46ac3622 | 789 | |
e13273d4 | 790 | static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev, |
82b9f817 AD |
791 | struct amdgpu_irq_src *source, |
792 | unsigned type, | |
793 | enum amdgpu_interrupt_state state) | |
e13273d4 | 794 | { |
0f66356d ED |
795 | if (type > AMDGPU_CRTC_IRQ_VBLANK6) |
796 | return -EINVAL; | |
797 | ||
798 | dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state); | |
799 | ||
e13273d4 ED |
800 | return 0; |
801 | } | |
802 | ||
c6e14f40 | 803 | static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = { |
e13273d4 | 804 | .set = dce_virtual_set_crtc_irq_state, |
bf2335a5 | 805 | .process = NULL, |
c6e14f40 ED |
806 | }; |
807 | ||
c6e14f40 ED |
808 | static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev) |
809 | { | |
810 | adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_LAST; | |
811 | adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs; | |
c6e14f40 ED |
812 | } |
813 | ||
a1255107 AD |
814 | const struct amdgpu_ip_block_version dce_virtual_ip_block = |
815 | { | |
816 | .type = AMD_IP_BLOCK_TYPE_DCE, | |
817 | .major = 1, | |
818 | .minor = 0, | |
819 | .rev = 0, | |
820 | .funcs = &dce_virtual_ip_funcs, | |
821 | }; |