]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - drivers/gpu/drm/arm/malidp_crtc.c
Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/hid/hid
[mirror_ubuntu-jammy-kernel.git] / drivers / gpu / drm / arm / malidp_crtc.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * (C) COPYRIGHT 2016 ARM Limited. All rights reserved.
4 * Author: Liviu Dudau <Liviu.Dudau@arm.com>
5 *
6 * ARM Mali DP500/DP550/DP650 driver (crtc operations)
7 */
8
9 #include <drm/drmP.h>
10 #include <drm/drm_atomic.h>
11 #include <drm/drm_atomic_helper.h>
12 #include <drm/drm_crtc.h>
13 #include <drm/drm_probe_helper.h>
14 #include <linux/clk.h>
15 #include <linux/pm_runtime.h>
16 #include <video/videomode.h>
17
18 #include "malidp_drv.h"
19 #include "malidp_hw.h"
20
21 static enum drm_mode_status malidp_crtc_mode_valid(struct drm_crtc *crtc,
22 const struct drm_display_mode *mode)
23 {
24 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
25 struct malidp_hw_device *hwdev = malidp->dev;
26
27 /*
28 * check that the hardware can drive the required clock rate,
29 * but skip the check if the clock is meant to be disabled (req_rate = 0)
30 */
31 long rate, req_rate = mode->crtc_clock * 1000;
32
33 if (req_rate) {
34 rate = clk_round_rate(hwdev->pxlclk, req_rate);
35 if (rate != req_rate) {
36 DRM_DEBUG_DRIVER("pxlclk doesn't support %ld Hz\n",
37 req_rate);
38 return MODE_NOCLOCK;
39 }
40 }
41
42 return MODE_OK;
43 }
44
45 static void malidp_crtc_atomic_enable(struct drm_crtc *crtc,
46 struct drm_crtc_state *old_state)
47 {
48 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
49 struct malidp_hw_device *hwdev = malidp->dev;
50 struct videomode vm;
51 int err = pm_runtime_get_sync(crtc->dev->dev);
52
53 if (err < 0) {
54 DRM_DEBUG_DRIVER("Failed to enable runtime power management: %d\n", err);
55 return;
56 }
57
58 drm_display_mode_to_videomode(&crtc->state->adjusted_mode, &vm);
59 clk_prepare_enable(hwdev->pxlclk);
60
61 /* We rely on firmware to set mclk to a sensible level. */
62 clk_set_rate(hwdev->pxlclk, crtc->state->adjusted_mode.crtc_clock * 1000);
63
64 hwdev->hw->modeset(hwdev, &vm);
65 hwdev->hw->leave_config_mode(hwdev);
66 drm_crtc_vblank_on(crtc);
67 }
68
69 static void malidp_crtc_atomic_disable(struct drm_crtc *crtc,
70 struct drm_crtc_state *old_state)
71 {
72 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
73 struct malidp_hw_device *hwdev = malidp->dev;
74 int err;
75
76 /* always disable planes on the CRTC that is being turned off */
77 drm_atomic_helper_disable_planes_on_crtc(old_state, false);
78
79 drm_crtc_vblank_off(crtc);
80 hwdev->hw->enter_config_mode(hwdev);
81
82 clk_disable_unprepare(hwdev->pxlclk);
83
84 err = pm_runtime_put(crtc->dev->dev);
85 if (err < 0) {
86 DRM_DEBUG_DRIVER("Failed to disable runtime power management: %d\n", err);
87 }
88 }
89
90 static const struct gamma_curve_segment {
91 u16 start;
92 u16 end;
93 } segments[MALIDP_COEFFTAB_NUM_COEFFS] = {
94 /* sector 0 */
95 { 0, 0 }, { 1, 1 }, { 2, 2 }, { 3, 3 },
96 { 4, 4 }, { 5, 5 }, { 6, 6 }, { 7, 7 },
97 { 8, 8 }, { 9, 9 }, { 10, 10 }, { 11, 11 },
98 { 12, 12 }, { 13, 13 }, { 14, 14 }, { 15, 15 },
99 /* sector 1 */
100 { 16, 19 }, { 20, 23 }, { 24, 27 }, { 28, 31 },
101 /* sector 2 */
102 { 32, 39 }, { 40, 47 }, { 48, 55 }, { 56, 63 },
103 /* sector 3 */
104 { 64, 79 }, { 80, 95 }, { 96, 111 }, { 112, 127 },
105 /* sector 4 */
106 { 128, 159 }, { 160, 191 }, { 192, 223 }, { 224, 255 },
107 /* sector 5 */
108 { 256, 319 }, { 320, 383 }, { 384, 447 }, { 448, 511 },
109 /* sector 6 */
110 { 512, 639 }, { 640, 767 }, { 768, 895 }, { 896, 1023 },
111 { 1024, 1151 }, { 1152, 1279 }, { 1280, 1407 }, { 1408, 1535 },
112 { 1536, 1663 }, { 1664, 1791 }, { 1792, 1919 }, { 1920, 2047 },
113 { 2048, 2175 }, { 2176, 2303 }, { 2304, 2431 }, { 2432, 2559 },
114 { 2560, 2687 }, { 2688, 2815 }, { 2816, 2943 }, { 2944, 3071 },
115 { 3072, 3199 }, { 3200, 3327 }, { 3328, 3455 }, { 3456, 3583 },
116 { 3584, 3711 }, { 3712, 3839 }, { 3840, 3967 }, { 3968, 4095 },
117 };
118
119 #define DE_COEFTAB_DATA(a, b) ((((a) & 0xfff) << 16) | (((b) & 0xfff)))
120
121 static void malidp_generate_gamma_table(struct drm_property_blob *lut_blob,
122 u32 coeffs[MALIDP_COEFFTAB_NUM_COEFFS])
123 {
124 struct drm_color_lut *lut = (struct drm_color_lut *)lut_blob->data;
125 int i;
126
127 for (i = 0; i < MALIDP_COEFFTAB_NUM_COEFFS; ++i) {
128 u32 a, b, delta_in, out_start, out_end;
129
130 delta_in = segments[i].end - segments[i].start;
131 /* DP has 12-bit internal precision for its LUTs. */
132 out_start = drm_color_lut_extract(lut[segments[i].start].green,
133 12);
134 out_end = drm_color_lut_extract(lut[segments[i].end].green, 12);
135 a = (delta_in == 0) ? 0 : ((out_end - out_start) * 256) / delta_in;
136 b = out_start;
137 coeffs[i] = DE_COEFTAB_DATA(a, b);
138 }
139 }
140
141 /*
142 * Check if there is a new gamma LUT and if it is of an acceptable size. Also,
143 * reject any LUTs that use distinct red, green, and blue curves.
144 */
145 static int malidp_crtc_atomic_check_gamma(struct drm_crtc *crtc,
146 struct drm_crtc_state *state)
147 {
148 struct malidp_crtc_state *mc = to_malidp_crtc_state(state);
149 struct drm_color_lut *lut;
150 size_t lut_size;
151 int i;
152
153 if (!state->color_mgmt_changed || !state->gamma_lut)
154 return 0;
155
156 if (crtc->state->gamma_lut &&
157 (crtc->state->gamma_lut->base.id == state->gamma_lut->base.id))
158 return 0;
159
160 if (state->gamma_lut->length % sizeof(struct drm_color_lut))
161 return -EINVAL;
162
163 lut_size = state->gamma_lut->length / sizeof(struct drm_color_lut);
164 if (lut_size != MALIDP_GAMMA_LUT_SIZE)
165 return -EINVAL;
166
167 lut = (struct drm_color_lut *)state->gamma_lut->data;
168 for (i = 0; i < lut_size; ++i)
169 if (!((lut[i].red == lut[i].green) &&
170 (lut[i].red == lut[i].blue)))
171 return -EINVAL;
172
173 if (!state->mode_changed) {
174 int ret;
175
176 state->mode_changed = true;
177 /*
178 * Kerneldoc for drm_atomic_helper_check_modeset mandates that
179 * it be invoked when the driver sets ->mode_changed. Since
180 * changing the gamma LUT doesn't depend on any external
181 * resources, it is safe to call it only once.
182 */
183 ret = drm_atomic_helper_check_modeset(crtc->dev, state->state);
184 if (ret)
185 return ret;
186 }
187
188 malidp_generate_gamma_table(state->gamma_lut, mc->gamma_coeffs);
189 return 0;
190 }
191
192 /*
193 * Check if there is a new CTM and if it contains valid input. Valid here means
194 * that the number is inside the representable range for a Q3.12 number,
195 * excluding truncating the fractional part of the input data.
196 *
197 * The COLORADJ registers can be changed atomically.
198 */
199 static int malidp_crtc_atomic_check_ctm(struct drm_crtc *crtc,
200 struct drm_crtc_state *state)
201 {
202 struct malidp_crtc_state *mc = to_malidp_crtc_state(state);
203 struct drm_color_ctm *ctm;
204 int i;
205
206 if (!state->color_mgmt_changed)
207 return 0;
208
209 if (!state->ctm)
210 return 0;
211
212 if (crtc->state->ctm && (crtc->state->ctm->base.id ==
213 state->ctm->base.id))
214 return 0;
215
216 /*
217 * The size of the ctm is checked in
218 * drm_atomic_replace_property_blob_from_id.
219 */
220 ctm = (struct drm_color_ctm *)state->ctm->data;
221 for (i = 0; i < ARRAY_SIZE(ctm->matrix); ++i) {
222 /* Convert from S31.32 to Q3.12. */
223 s64 val = ctm->matrix[i];
224 u32 mag = ((((u64)val) & ~BIT_ULL(63)) >> 20) &
225 GENMASK_ULL(14, 0);
226
227 /*
228 * Convert to 2s complement and check the destination's top bit
229 * for overflow. NB: Can't check before converting or it'd
230 * incorrectly reject the case:
231 * sign == 1
232 * mag == 0x2000
233 */
234 if (val & BIT_ULL(63))
235 mag = ~mag + 1;
236 if (!!(val & BIT_ULL(63)) != !!(mag & BIT(14)))
237 return -EINVAL;
238 mc->coloradj_coeffs[i] = mag;
239 }
240
241 return 0;
242 }
243
244 static int malidp_crtc_atomic_check_scaling(struct drm_crtc *crtc,
245 struct drm_crtc_state *state)
246 {
247 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
248 struct malidp_hw_device *hwdev = malidp->dev;
249 struct malidp_crtc_state *cs = to_malidp_crtc_state(state);
250 struct malidp_se_config *s = &cs->scaler_config;
251 struct drm_plane *plane;
252 struct videomode vm;
253 const struct drm_plane_state *pstate;
254 u32 h_upscale_factor = 0; /* U16.16 */
255 u32 v_upscale_factor = 0; /* U16.16 */
256 u8 scaling = cs->scaled_planes_mask;
257 int ret;
258
259 if (!scaling) {
260 s->scale_enable = false;
261 goto mclk_calc;
262 }
263
264 /* The scaling engine can only handle one plane at a time. */
265 if (scaling & (scaling - 1))
266 return -EINVAL;
267
268 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
269 struct malidp_plane *mp = to_malidp_plane(plane);
270 u32 phase;
271
272 if (!(mp->layer->id & scaling))
273 continue;
274
275 /*
276 * Convert crtc_[w|h] to U32.32, then divide by U16.16 src_[w|h]
277 * to get the U16.16 result.
278 */
279 h_upscale_factor = div_u64((u64)pstate->crtc_w << 32,
280 pstate->src_w);
281 v_upscale_factor = div_u64((u64)pstate->crtc_h << 32,
282 pstate->src_h);
283
284 s->enhancer_enable = ((h_upscale_factor >> 16) >= 2 ||
285 (v_upscale_factor >> 16) >= 2);
286
287 if (pstate->rotation & MALIDP_ROTATED_MASK) {
288 s->input_w = pstate->src_h >> 16;
289 s->input_h = pstate->src_w >> 16;
290 } else {
291 s->input_w = pstate->src_w >> 16;
292 s->input_h = pstate->src_h >> 16;
293 }
294
295 s->output_w = pstate->crtc_w;
296 s->output_h = pstate->crtc_h;
297
298 #define SE_N_PHASE 4
299 #define SE_SHIFT_N_PHASE 12
300 /* Calculate initial_phase and delta_phase for horizontal. */
301 phase = s->input_w;
302 s->h_init_phase =
303 ((phase << SE_N_PHASE) / s->output_w + 1) / 2;
304
305 phase = s->input_w;
306 phase <<= (SE_SHIFT_N_PHASE + SE_N_PHASE);
307 s->h_delta_phase = phase / s->output_w;
308
309 /* Same for vertical. */
310 phase = s->input_h;
311 s->v_init_phase =
312 ((phase << SE_N_PHASE) / s->output_h + 1) / 2;
313
314 phase = s->input_h;
315 phase <<= (SE_SHIFT_N_PHASE + SE_N_PHASE);
316 s->v_delta_phase = phase / s->output_h;
317 #undef SE_N_PHASE
318 #undef SE_SHIFT_N_PHASE
319 s->plane_src_id = mp->layer->id;
320 }
321
322 s->scale_enable = true;
323 s->hcoeff = malidp_se_select_coeffs(h_upscale_factor);
324 s->vcoeff = malidp_se_select_coeffs(v_upscale_factor);
325
326 mclk_calc:
327 drm_display_mode_to_videomode(&state->adjusted_mode, &vm);
328 ret = hwdev->hw->se_calc_mclk(hwdev, s, &vm);
329 if (ret < 0)
330 return -EINVAL;
331 return 0;
332 }
333
334 static int malidp_crtc_atomic_check(struct drm_crtc *crtc,
335 struct drm_crtc_state *state)
336 {
337 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
338 struct malidp_hw_device *hwdev = malidp->dev;
339 struct drm_plane *plane;
340 const struct drm_plane_state *pstate;
341 u32 rot_mem_free, rot_mem_usable;
342 int rotated_planes = 0;
343 int ret;
344
345 /*
346 * check if there is enough rotation memory available for planes
347 * that need 90° and 270° rotion or planes that are compressed.
348 * Each plane has set its required memory size in the ->plane_check()
349 * callback, here we only make sure that the sums are less that the
350 * total usable memory.
351 *
352 * The rotation memory allocation algorithm (for each plane):
353 * a. If no more rotated or compressed planes exist, all remaining
354 * rotate memory in the bank is available for use by the plane.
355 * b. If other rotated or compressed planes exist, and plane's
356 * layer ID is DE_VIDEO1, it can use all the memory from first bank
357 * if secondary rotation memory bank is available, otherwise it can
358 * use up to half the bank's memory.
359 * c. If other rotated or compressed planes exist, and plane's layer ID
360 * is not DE_VIDEO1, it can use half of the available memory.
361 *
362 * Note: this algorithm assumes that the order in which the planes are
363 * checked always has DE_VIDEO1 plane first in the list if it is
364 * rotated. Because that is how we create the planes in the first
365 * place, under current DRM version things work, but if ever the order
366 * in which drm_atomic_crtc_state_for_each_plane() iterates over planes
367 * changes, we need to pre-sort the planes before validation.
368 */
369
370 /* first count the number of rotated planes */
371 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
372 struct drm_framebuffer *fb = pstate->fb;
373
374 if ((pstate->rotation & MALIDP_ROTATED_MASK) || fb->modifier)
375 rotated_planes++;
376 }
377
378 rot_mem_free = hwdev->rotation_memory[0];
379 /*
380 * if we have more than 1 plane using rotation memory, use the second
381 * block of rotation memory as well
382 */
383 if (rotated_planes > 1)
384 rot_mem_free += hwdev->rotation_memory[1];
385
386 /* now validate the rotation memory requirements */
387 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
388 struct malidp_plane *mp = to_malidp_plane(plane);
389 struct malidp_plane_state *ms = to_malidp_plane_state(pstate);
390 struct drm_framebuffer *fb = pstate->fb;
391
392 if ((pstate->rotation & MALIDP_ROTATED_MASK) || fb->modifier) {
393 /* process current plane */
394 rotated_planes--;
395
396 if (!rotated_planes) {
397 /* no more rotated planes, we can use what's left */
398 rot_mem_usable = rot_mem_free;
399 } else {
400 if ((mp->layer->id != DE_VIDEO1) ||
401 (hwdev->rotation_memory[1] == 0))
402 rot_mem_usable = rot_mem_free / 2;
403 else
404 rot_mem_usable = hwdev->rotation_memory[0];
405 }
406
407 rot_mem_free -= rot_mem_usable;
408
409 if (ms->rotmem_size > rot_mem_usable)
410 return -EINVAL;
411 }
412 }
413
414 /* If only the writeback routing has changed, we don't need a modeset */
415 if (state->connectors_changed) {
416 u32 old_mask = crtc->state->connector_mask;
417 u32 new_mask = state->connector_mask;
418
419 if ((old_mask ^ new_mask) ==
420 (1 << drm_connector_index(&malidp->mw_connector.base)))
421 state->connectors_changed = false;
422 }
423
424 ret = malidp_crtc_atomic_check_gamma(crtc, state);
425 ret = ret ? ret : malidp_crtc_atomic_check_ctm(crtc, state);
426 ret = ret ? ret : malidp_crtc_atomic_check_scaling(crtc, state);
427
428 return ret;
429 }
430
431 static const struct drm_crtc_helper_funcs malidp_crtc_helper_funcs = {
432 .mode_valid = malidp_crtc_mode_valid,
433 .atomic_check = malidp_crtc_atomic_check,
434 .atomic_enable = malidp_crtc_atomic_enable,
435 .atomic_disable = malidp_crtc_atomic_disable,
436 };
437
438 static struct drm_crtc_state *malidp_crtc_duplicate_state(struct drm_crtc *crtc)
439 {
440 struct malidp_crtc_state *state, *old_state;
441
442 if (WARN_ON(!crtc->state))
443 return NULL;
444
445 old_state = to_malidp_crtc_state(crtc->state);
446 state = kmalloc(sizeof(*state), GFP_KERNEL);
447 if (!state)
448 return NULL;
449
450 __drm_atomic_helper_crtc_duplicate_state(crtc, &state->base);
451 memcpy(state->gamma_coeffs, old_state->gamma_coeffs,
452 sizeof(state->gamma_coeffs));
453 memcpy(state->coloradj_coeffs, old_state->coloradj_coeffs,
454 sizeof(state->coloradj_coeffs));
455 memcpy(&state->scaler_config, &old_state->scaler_config,
456 sizeof(state->scaler_config));
457 state->scaled_planes_mask = 0;
458
459 return &state->base;
460 }
461
462 static void malidp_crtc_reset(struct drm_crtc *crtc)
463 {
464 struct malidp_crtc_state *state = NULL;
465
466 if (crtc->state) {
467 state = to_malidp_crtc_state(crtc->state);
468 __drm_atomic_helper_crtc_destroy_state(crtc->state);
469 }
470
471 kfree(state);
472 state = kzalloc(sizeof(*state), GFP_KERNEL);
473 if (state) {
474 crtc->state = &state->base;
475 crtc->state->crtc = crtc;
476 }
477 }
478
479 static void malidp_crtc_destroy_state(struct drm_crtc *crtc,
480 struct drm_crtc_state *state)
481 {
482 struct malidp_crtc_state *mali_state = NULL;
483
484 if (state) {
485 mali_state = to_malidp_crtc_state(state);
486 __drm_atomic_helper_crtc_destroy_state(state);
487 }
488
489 kfree(mali_state);
490 }
491
492 static int malidp_crtc_enable_vblank(struct drm_crtc *crtc)
493 {
494 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
495 struct malidp_hw_device *hwdev = malidp->dev;
496
497 malidp_hw_enable_irq(hwdev, MALIDP_DE_BLOCK,
498 hwdev->hw->map.de_irq_map.vsync_irq);
499 return 0;
500 }
501
502 static void malidp_crtc_disable_vblank(struct drm_crtc *crtc)
503 {
504 struct malidp_drm *malidp = crtc_to_malidp_device(crtc);
505 struct malidp_hw_device *hwdev = malidp->dev;
506
507 malidp_hw_disable_irq(hwdev, MALIDP_DE_BLOCK,
508 hwdev->hw->map.de_irq_map.vsync_irq);
509 }
510
511 static const struct drm_crtc_funcs malidp_crtc_funcs = {
512 .gamma_set = drm_atomic_helper_legacy_gamma_set,
513 .destroy = drm_crtc_cleanup,
514 .set_config = drm_atomic_helper_set_config,
515 .page_flip = drm_atomic_helper_page_flip,
516 .reset = malidp_crtc_reset,
517 .atomic_duplicate_state = malidp_crtc_duplicate_state,
518 .atomic_destroy_state = malidp_crtc_destroy_state,
519 .enable_vblank = malidp_crtc_enable_vblank,
520 .disable_vblank = malidp_crtc_disable_vblank,
521 };
522
523 int malidp_crtc_init(struct drm_device *drm)
524 {
525 struct malidp_drm *malidp = drm->dev_private;
526 struct drm_plane *primary = NULL, *plane;
527 int ret;
528
529 ret = malidp_de_planes_init(drm);
530 if (ret < 0) {
531 DRM_ERROR("Failed to initialise planes\n");
532 return ret;
533 }
534
535 drm_for_each_plane(plane, drm) {
536 if (plane->type == DRM_PLANE_TYPE_PRIMARY) {
537 primary = plane;
538 break;
539 }
540 }
541
542 if (!primary) {
543 DRM_ERROR("no primary plane found\n");
544 return -EINVAL;
545 }
546
547 ret = drm_crtc_init_with_planes(drm, &malidp->crtc, primary, NULL,
548 &malidp_crtc_funcs, NULL);
549 if (ret)
550 return ret;
551
552 drm_crtc_helper_add(&malidp->crtc, &malidp_crtc_helper_funcs);
553 drm_mode_crtc_set_gamma_size(&malidp->crtc, MALIDP_GAMMA_LUT_SIZE);
554 /* No inverse-gamma: it is per-plane. */
555 drm_crtc_enable_color_mgmt(&malidp->crtc, 0, true, MALIDP_GAMMA_LUT_SIZE);
556
557 malidp_se_set_enh_coeffs(malidp->dev);
558
559 return 0;
560 }