*/
static void intel_update_watermarks(struct drm_i915_private *dev_priv)
{
- if (dev_priv->wm_disp.update_wm)
- dev_priv->wm_disp.update_wm(dev_priv);
+ if (dev_priv->wm_disp->update_wm)
+ dev_priv->wm_disp->update_wm(dev_priv);
}
static int intel_compute_pipe_wm(struct intel_atomic_state *state,
struct intel_crtc *crtc)
{
struct drm_i915_private *dev_priv = to_i915(state->base.dev);
- if (dev_priv->wm_disp.compute_pipe_wm)
- return dev_priv->wm_disp.compute_pipe_wm(state, crtc);
+ if (dev_priv->wm_disp->compute_pipe_wm)
+ return dev_priv->wm_disp->compute_pipe_wm(state, crtc);
return 0;
}
struct intel_crtc *crtc)
{
struct drm_i915_private *dev_priv = to_i915(state->base.dev);
- if (!dev_priv->wm_disp.compute_intermediate_wm)
+ if (!dev_priv->wm_disp->compute_intermediate_wm)
return 0;
if (drm_WARN_ON(&dev_priv->drm,
- !dev_priv->wm_disp.compute_pipe_wm))
+ !dev_priv->wm_disp->compute_pipe_wm))
return 0;
- return dev_priv->wm_disp.compute_intermediate_wm(state, crtc);
+ return dev_priv->wm_disp->compute_intermediate_wm(state, crtc);
}
static bool intel_initial_watermarks(struct intel_atomic_state *state,
struct intel_crtc *crtc)
{
struct drm_i915_private *dev_priv = to_i915(state->base.dev);
- if (dev_priv->wm_disp.initial_watermarks) {
- dev_priv->wm_disp.initial_watermarks(state, crtc);
+ if (dev_priv->wm_disp->initial_watermarks) {
+ dev_priv->wm_disp->initial_watermarks(state, crtc);
return true;
}
return false;
struct intel_crtc *crtc)
{
struct drm_i915_private *dev_priv = to_i915(state->base.dev);
- if (dev_priv->wm_disp.atomic_update_watermarks)
- dev_priv->wm_disp.atomic_update_watermarks(state, crtc);
+ if (dev_priv->wm_disp->atomic_update_watermarks)
+ dev_priv->wm_disp->atomic_update_watermarks(state, crtc);
}
static void intel_optimize_watermarks(struct intel_atomic_state *state,
struct intel_crtc *crtc)
{
struct drm_i915_private *dev_priv = to_i915(state->base.dev);
- if (dev_priv->wm_disp.optimize_watermarks)
- dev_priv->wm_disp.optimize_watermarks(state, crtc);
+ if (dev_priv->wm_disp->optimize_watermarks)
+ dev_priv->wm_disp->optimize_watermarks(state, crtc);
}
static int intel_compute_global_watermarks(struct intel_atomic_state *state)
{
struct drm_i915_private *dev_priv = to_i915(state->base.dev);
- if (dev_priv->wm_disp.compute_global_watermarks)
- return dev_priv->wm_disp.compute_global_watermarks(state);
+ if (dev_priv->wm_disp->compute_global_watermarks)
+ return dev_priv->wm_disp->compute_global_watermarks(state);
return 0;
}
if (DISPLAY_VER(dev_priv) != 2)
intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
- if (!dev_priv->wm_disp.initial_watermarks)
+ if (!dev_priv->wm_disp->initial_watermarks)
intel_update_watermarks(dev_priv);
/* clock the pipe down to 640x480@60 to potentially save power */
int i;
/* Only supported on platforms that use atomic watermark design */
- if (!dev_priv->wm_disp.optimize_watermarks)
+ if (!dev_priv->wm_disp->optimize_watermarks)
return;
state = drm_atomic_state_alloc(&dev_priv->drm);
}
}
+static const struct drm_i915_wm_disp_funcs skl_wm_funcs = {
+ .compute_global_watermarks = skl_compute_wm,
+};
+
+static const struct drm_i915_wm_disp_funcs ilk_wm_funcs = {
+ .compute_pipe_wm = ilk_compute_pipe_wm,
+ .compute_intermediate_wm = ilk_compute_intermediate_wm,
+ .initial_watermarks = ilk_initial_watermarks,
+ .optimize_watermarks = ilk_optimize_watermarks,
+};
+
+static const struct drm_i915_wm_disp_funcs vlv_wm_funcs = {
+ .compute_pipe_wm = vlv_compute_pipe_wm,
+ .compute_intermediate_wm = vlv_compute_intermediate_wm,
+ .initial_watermarks = vlv_initial_watermarks,
+ .optimize_watermarks = vlv_optimize_watermarks,
+ .atomic_update_watermarks = vlv_atomic_update_fifo,
+};
+
+static const struct drm_i915_wm_disp_funcs g4x_wm_funcs = {
+ .compute_pipe_wm = g4x_compute_pipe_wm,
+ .compute_intermediate_wm = g4x_compute_intermediate_wm,
+ .initial_watermarks = g4x_initial_watermarks,
+ .optimize_watermarks = g4x_optimize_watermarks,
+};
+
+static const struct drm_i915_wm_disp_funcs pnv_wm_funcs = {
+ .update_wm = pnv_update_wm,
+};
+
+static const struct drm_i915_wm_disp_funcs i965_wm_funcs = {
+ .update_wm = i965_update_wm,
+};
+
+static const struct drm_i915_wm_disp_funcs i9xx_wm_funcs = {
+ .update_wm = i9xx_update_wm,
+};
+
+static const struct drm_i915_wm_disp_funcs i845_wm_funcs = {
+ .update_wm = i845_update_wm,
+};
+
+static const struct drm_i915_wm_disp_funcs nop_funcs = {
+};
+
/* Set up chip specific power management-related functions */
void intel_init_pm(struct drm_i915_private *dev_priv)
{
/* For FIFO watermark updates */
if (DISPLAY_VER(dev_priv) >= 9) {
skl_setup_wm_latency(dev_priv);
- dev_priv->wm_disp.compute_global_watermarks = skl_compute_wm;
+ dev_priv->wm_disp = &skl_wm_funcs;
} else if (HAS_PCH_SPLIT(dev_priv)) {
ilk_setup_wm_latency(dev_priv);
dev_priv->wm.spr_latency[1] && dev_priv->wm.cur_latency[1]) ||
(DISPLAY_VER(dev_priv) != 5 && dev_priv->wm.pri_latency[0] &&
dev_priv->wm.spr_latency[0] && dev_priv->wm.cur_latency[0])) {
- dev_priv->wm_disp.compute_pipe_wm = ilk_compute_pipe_wm;
- dev_priv->wm_disp.compute_intermediate_wm =
- ilk_compute_intermediate_wm;
- dev_priv->wm_disp.initial_watermarks =
- ilk_initial_watermarks;
- dev_priv->wm_disp.optimize_watermarks =
- ilk_optimize_watermarks;
+ dev_priv->wm_disp = &ilk_wm_funcs;
} else {
drm_dbg_kms(&dev_priv->drm,
"Failed to read display plane latency. "
"Disable CxSR\n");
+ dev_priv->wm_disp = &nop_funcs;
}
} else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
vlv_setup_wm_latency(dev_priv);
- dev_priv->wm_disp.compute_pipe_wm = vlv_compute_pipe_wm;
- dev_priv->wm_disp.compute_intermediate_wm = vlv_compute_intermediate_wm;
- dev_priv->wm_disp.initial_watermarks = vlv_initial_watermarks;
- dev_priv->wm_disp.optimize_watermarks = vlv_optimize_watermarks;
- dev_priv->wm_disp.atomic_update_watermarks = vlv_atomic_update_fifo;
+ dev_priv->wm_disp = &vlv_wm_funcs;
} else if (IS_G4X(dev_priv)) {
g4x_setup_wm_latency(dev_priv);
- dev_priv->wm_disp.compute_pipe_wm = g4x_compute_pipe_wm;
- dev_priv->wm_disp.compute_intermediate_wm = g4x_compute_intermediate_wm;
- dev_priv->wm_disp.initial_watermarks = g4x_initial_watermarks;
- dev_priv->wm_disp.optimize_watermarks = g4x_optimize_watermarks;
+ dev_priv->wm_disp = &g4x_wm_funcs;
} else if (IS_PINEVIEW(dev_priv)) {
if (!intel_get_cxsr_latency(!IS_MOBILE(dev_priv),
dev_priv->is_ddr3,
dev_priv->fsb_freq, dev_priv->mem_freq);
/* Disable CxSR and never update its watermark again */
intel_set_memory_cxsr(dev_priv, false);
- dev_priv->wm_disp.update_wm = NULL;
+ dev_priv->wm_disp = &nop_funcs;
} else
- dev_priv->wm_disp.update_wm = pnv_update_wm;
+ dev_priv->wm_disp = &pnv_wm_funcs;
} else if (DISPLAY_VER(dev_priv) == 4) {
- dev_priv->wm_disp.update_wm = i965_update_wm;
+ dev_priv->wm_disp = &i965_wm_funcs;
} else if (DISPLAY_VER(dev_priv) == 3) {
- dev_priv->wm_disp.update_wm = i9xx_update_wm;
+ dev_priv->wm_disp = &i9xx_wm_funcs;
} else if (DISPLAY_VER(dev_priv) == 2) {
if (INTEL_NUM_PIPES(dev_priv) == 1)
- dev_priv->wm_disp.update_wm = i845_update_wm;
+ dev_priv->wm_disp = &i845_wm_funcs;
else
- dev_priv->wm_disp.update_wm = i9xx_update_wm;
+ dev_priv->wm_disp = &i9xx_wm_funcs;
} else {
drm_err(&dev_priv->drm,
"unexpected fall-through in %s\n", __func__);
+ dev_priv->wm_disp = &nop_funcs;
}
}