drm/i915: make sure eDP PLL is enabled at the right time

We need to make sure the eDP PLL is enabled before the pipes or planes,
so do it as part of the DP prepare mode set function.

Signed-off-by: Jesse Barnes <jbarnes@virtuousgeek.org>

+60 -39
+2 -37
drivers/gpu/drm/i915/intel_display.c
··· 1622 return 0; 1623 } 1624 1625 - static void ironlake_disable_pll_edp (struct drm_crtc *crtc) 1626 - { 1627 - struct drm_device *dev = crtc->dev; 1628 - struct drm_i915_private *dev_priv = dev->dev_private; 1629 - u32 dpa_ctl; 1630 - 1631 - DRM_DEBUG_KMS("\n"); 1632 - dpa_ctl = I915_READ(DP_A); 1633 - dpa_ctl &= ~DP_PLL_ENABLE; 1634 - I915_WRITE(DP_A, dpa_ctl); 1635 - } 1636 - 1637 - static void ironlake_enable_pll_edp (struct drm_crtc *crtc) 1638 - { 1639 - struct drm_device *dev = crtc->dev; 1640 - struct drm_i915_private *dev_priv = dev->dev_private; 1641 - u32 dpa_ctl; 1642 - 1643 - dpa_ctl = I915_READ(DP_A); 1644 - dpa_ctl |= DP_PLL_ENABLE; 1645 - I915_WRITE(DP_A, dpa_ctl); 1646 - POSTING_READ(DP_A); 1647 - udelay(200); 1648 - } 1649 - 1650 - 1651 static void ironlake_set_pll_edp (struct drm_crtc *crtc, int clock) 1652 { 1653 struct drm_device *dev = crtc->dev; ··· 1914 } 1915 } 1916 1917 - if (HAS_eDP) { 1918 - /* enable eDP PLL */ 1919 - ironlake_enable_pll_edp(crtc); 1920 - } else { 1921 1922 /* enable PCH FDI RX PLL, wait warmup plus DMI latency */ 1923 temp = I915_READ(fdi_rx_reg); ··· 2212 temp = I915_READ(pch_dpll_reg); 2213 I915_WRITE(pch_dpll_reg, temp & ~DPLL_VCO_ENABLE); 2214 I915_READ(pch_dpll_reg); 2215 - 2216 - if (HAS_eDP) { 2217 - ironlake_disable_pll_edp(crtc); 2218 - } 2219 2220 /* Switch from PCDclk to Rawclk */ 2221 temp = I915_READ(fdi_rx_reg); ··· 3897 dpll_reg = pch_dpll_reg; 3898 } 3899 3900 - if (is_edp) { 3901 - ironlake_disable_pll_edp(crtc); 3902 - } else if ((dpll & DPLL_VCO_ENABLE)) { 3903 I915_WRITE(fp_reg, fp); 3904 I915_WRITE(dpll_reg, dpll & ~DPLL_VCO_ENABLE); 3905 I915_READ(dpll_reg);
··· 1622 return 0; 1623 } 1624 1625 static void ironlake_set_pll_edp (struct drm_crtc *crtc, int clock) 1626 { 1627 struct drm_device *dev = crtc->dev; ··· 1940 } 1941 } 1942 1943 + if (!HAS_eDP) { 1944 1945 /* enable PCH FDI RX PLL, wait warmup plus DMI latency */ 1946 temp = I915_READ(fdi_rx_reg); ··· 2241 temp = I915_READ(pch_dpll_reg); 2242 I915_WRITE(pch_dpll_reg, temp & ~DPLL_VCO_ENABLE); 2243 I915_READ(pch_dpll_reg); 2244 2245 /* Switch from PCDclk to Rawclk */ 2246 temp = I915_READ(fdi_rx_reg); ··· 3930 dpll_reg = pch_dpll_reg; 3931 } 3932 3933 + if (!is_edp) { 3934 I915_WRITE(fp_reg, fp); 3935 I915_WRITE(dpll_reg, dpll & ~DPLL_VCO_ENABLE); 3936 I915_READ(dpll_reg);
+58 -2
drivers/gpu/drm/i915/intel_dp.c
··· 831 I915_WRITE(PCH_PP_CONTROL, pp); 832 } 833 834 static void 835 intel_dp_dpms(struct drm_encoder *encoder, int mode) 836 { ··· 900 } 901 if (dp_reg & DP_PORT_EN) 902 intel_dp_link_down(intel_dp); 903 } else { 904 if (!(dp_reg & DP_PORT_EN)) { 905 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) ··· 1483 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 1484 .dpms = intel_dp_dpms, 1485 .mode_fixup = intel_dp_mode_fixup, 1486 - .prepare = intel_encoder_prepare, 1487 .mode_set = intel_dp_mode_set, 1488 - .commit = intel_encoder_commit, 1489 }; 1490 1491 static const struct drm_connector_funcs intel_dp_connector_funcs = {
··· 831 I915_WRITE(PCH_PP_CONTROL, pp); 832 } 833 834 + static void ironlake_edp_pll_on(struct drm_encoder *encoder) 835 + { 836 + struct drm_device *dev = encoder->dev; 837 + struct drm_i915_private *dev_priv = dev->dev_private; 838 + u32 dpa_ctl; 839 + 840 + DRM_DEBUG_KMS("\n"); 841 + dpa_ctl = I915_READ(DP_A); 842 + dpa_ctl &= ~DP_PLL_ENABLE; 843 + I915_WRITE(DP_A, dpa_ctl); 844 + } 845 + 846 + static void ironlake_edp_pll_off(struct drm_encoder *encoder) 847 + { 848 + struct drm_device *dev = encoder->dev; 849 + struct drm_i915_private *dev_priv = dev->dev_private; 850 + u32 dpa_ctl; 851 + 852 + dpa_ctl = I915_READ(DP_A); 853 + dpa_ctl |= DP_PLL_ENABLE; 854 + I915_WRITE(DP_A, dpa_ctl); 855 + udelay(200); 856 + } 857 + 858 + static void intel_dp_prepare(struct drm_encoder *encoder) 859 + { 860 + struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 861 + struct drm_device *dev = encoder->dev; 862 + struct drm_i915_private *dev_priv = dev->dev_private; 863 + uint32_t dp_reg = I915_READ(intel_dp->output_reg); 864 + 865 + if (IS_eDP(intel_dp)) { 866 + ironlake_edp_backlight_off(dev); 867 + ironlake_edp_panel_on(dev); 868 + ironlake_edp_pll_on(encoder); 869 + } 870 + if (dp_reg & DP_PORT_EN) 871 + intel_dp_link_down(intel_dp); 872 + } 873 + 874 + static void intel_dp_commit(struct drm_encoder *encoder) 875 + { 876 + struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 877 + struct drm_device *dev = encoder->dev; 878 + struct drm_i915_private *dev_priv = dev->dev_private; 879 + uint32_t dp_reg = I915_READ(intel_dp->output_reg); 880 + 881 + if (!(dp_reg & DP_PORT_EN)) { 882 + intel_dp_link_train(intel_dp); 883 + } 884 + if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) 885 + ironlake_edp_backlight_on(dev); 886 + } 887 + 888 static void 889 intel_dp_dpms(struct drm_encoder *encoder, int mode) 890 { ··· 846 } 847 if (dp_reg & DP_PORT_EN) 848 intel_dp_link_down(intel_dp); 849 + if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) 850 + ironlake_edp_pll_off(encoder); 851 } else { 852 if (!(dp_reg & DP_PORT_EN)) { 853 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) ··· 1427 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 1428 .dpms = intel_dp_dpms, 1429 .mode_fixup = intel_dp_mode_fixup, 1430 + .prepare = intel_dp_prepare, 1431 .mode_set = intel_dp_mode_set, 1432 + .commit = intel_dp_commit, 1433 }; 1434 1435 static const struct drm_connector_funcs intel_dp_connector_funcs = {