Commit 7ff89ca2 authored by Ville Syrjälä's avatar Ville Syrjälä

drm/i915: Move most cdclk/rawclk related code to intel_cdclk.c

Let's try to shrink intel_display.c a bit by moving the cdclk/rawclk
stuff to a new file. It's all reasonably self contained so we don't
even have to add that many non-static symbols.

We'll also take the opportunity to shuffle around the functions a bit
to get things in a more consistent order based on the platform.

v2: Add kernel-docs (Ander)
v3: Deal with IS_GEN9_BC()
v4: Deal with i945gm_get_cdclk()
Signed-off-by: default avatarVille Syrjälä <ville.syrjala@linux.intel.com>
Reviewed-by: default avatarAnder Conselvan de Oliveira <conselvan2@gmail.com>
Link: http://patchwork.freedesktop.org/patch/msgid/20170207183305.19656-1-ville.syrjala@linux.intel.com
parent 4717e8bb
...@@ -213,6 +213,15 @@ Video BIOS Table (VBT) ...@@ -213,6 +213,15 @@ Video BIOS Table (VBT)
.. kernel-doc:: drivers/gpu/drm/i915/intel_vbt_defs.h .. kernel-doc:: drivers/gpu/drm/i915/intel_vbt_defs.h
:internal: :internal:
Display clocks
--------------
.. kernel-doc:: drivers/gpu/drm/i915/intel_cdclk.c
:doc: CDCLK / RAWCLK
.. kernel-doc:: drivers/gpu/drm/i915/intel_cdclk.c
:internal:
Display PLLs Display PLLs
------------ ------------
......
...@@ -72,6 +72,7 @@ i915-y += intel_audio.o \ ...@@ -72,6 +72,7 @@ i915-y += intel_audio.o \
intel_atomic.o \ intel_atomic.o \
intel_atomic_plane.o \ intel_atomic_plane.o \
intel_bios.o \ intel_bios.o \
intel_cdclk.o \
intel_color.o \ intel_color.o \
intel_display.o \ intel_display.o \
intel_dpio_phy.o \ intel_dpio_phy.o \
......
/*
* Copyright © 2006-2017 Intel Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#include "intel_drv.h"
/**
* DOC: CDCLK / RAWCLK
*
* The display engine uses several different clocks to do its work. There
* are two main clocks involved that aren't directly related to the actual
* pixel clock or any symbol/bit clock of the actual output port. These
* are the core display clock (CDCLK) and RAWCLK.
*
* CDCLK clocks most of the display pipe logic, and thus its frequency
* must be high enough to support the rate at which pixels are flowing
* through the pipes. Downscaling must also be accounted as that increases
* the effective pixel rate.
*
* On several platforms the CDCLK frequency can be changed dynamically
* to minimize power consumption for a given display configuration.
* Typically changes to the CDCLK frequency require all the display pipes
* to be shut down while the frequency is being changed.
*
* On SKL+ the DMC will toggle the CDCLK off/on during DC5/6 entry/exit.
* DMC will not change the active CDCLK frequency however, so that part
* will still be performed by the driver directly.
*
* RAWCLK is a fixed frequency clock, often used by various auxiliary
* blocks such as AUX CH or backlight PWM. Hence the only thing we
* really need to know about RAWCLK is its frequency so that various
* dividers can be programmed correctly.
*/
static int fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 133333;
}
static int fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 200000;
}
static int fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 266667;
}
static int fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 333333;
}
static int fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 400000;
}
static int fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 450000;
}
static int i85x_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 hpllcc = 0;
/*
* 852GM/852GMV only supports 133 MHz and the HPLLCC
* encoding is different :(
* FIXME is this the right way to detect 852GM/852GMV?
*/
if (pdev->revision == 0x1)
return 133333;
pci_bus_read_config_word(pdev->bus,
PCI_DEVFN(0, 3), HPLLCC, &hpllcc);
/* Assume that the hardware is in the high speed state. This
* should be the default.
*/
switch (hpllcc & GC_CLOCK_CONTROL_MASK) {
case GC_CLOCK_133_200:
case GC_CLOCK_133_200_2:
case GC_CLOCK_100_200:
return 200000;
case GC_CLOCK_166_250:
return 250000;
case GC_CLOCK_100_133:
return 133333;
case GC_CLOCK_133_266:
case GC_CLOCK_133_266_2:
case GC_CLOCK_166_266:
return 266667;
}
/* Shouldn't happen */
return 0;
}
static int i915gm_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 gcfgc = 0;
pci_read_config_word(pdev, GCFGC, &gcfgc);
if (gcfgc & GC_LOW_FREQUENCY_ENABLE)
return 133333;
switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
case GC_DISPLAY_CLOCK_333_320_MHZ:
return 333333;
default:
case GC_DISPLAY_CLOCK_190_200_MHZ:
return 190000;
}
}
static int i945gm_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 gcfgc = 0;
pci_read_config_word(pdev, GCFGC, &gcfgc);
if (gcfgc & GC_LOW_FREQUENCY_ENABLE)
return 133333;
switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
case GC_DISPLAY_CLOCK_333_320_MHZ:
return 320000;
default:
case GC_DISPLAY_CLOCK_190_200_MHZ:
return 200000;
}
}
static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv)
{
static const unsigned int blb_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 4800000,
[4] = 6400000,
};
static const unsigned int pnv_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 4800000,
[4] = 2666667,
};
static const unsigned int cl_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 6400000,
[4] = 3333333,
[5] = 3566667,
[6] = 4266667,
};
static const unsigned int elk_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 4800000,
};
static const unsigned int ctg_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 6400000,
[4] = 2666667,
[5] = 4266667,
};
const unsigned int *vco_table;
unsigned int vco;
uint8_t tmp = 0;
/* FIXME other chipsets? */
if (IS_GM45(dev_priv))
vco_table = ctg_vco;
else if (IS_G4X(dev_priv))
vco_table = elk_vco;
else if (IS_I965GM(dev_priv))
vco_table = cl_vco;
else if (IS_PINEVIEW(dev_priv))
vco_table = pnv_vco;
else if (IS_G33(dev_priv))
vco_table = blb_vco;
else
return 0;
tmp = I915_READ(IS_MOBILE(dev_priv) ? HPLLVCO_MOBILE : HPLLVCO);
vco = vco_table[tmp & 0x7];
if (vco == 0)
DRM_ERROR("Bad HPLL VCO (HPLLVCO=0x%02x)\n", tmp);
else
DRM_DEBUG_KMS("HPLL VCO %u kHz\n", vco);
return vco;
}
static int g33_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
static const uint8_t div_3200[] = { 12, 10, 8, 7, 5, 16 };
static const uint8_t div_4000[] = { 14, 12, 10, 8, 6, 20 };
static const uint8_t div_4800[] = { 20, 14, 12, 10, 8, 24 };
static const uint8_t div_5333[] = { 20, 16, 12, 12, 8, 28 };
const uint8_t *div_table;
unsigned int cdclk_sel, vco = intel_hpll_vco(dev_priv);
uint16_t tmp = 0;
pci_read_config_word(pdev, GCFGC, &tmp);
cdclk_sel = (tmp >> 4) & 0x7;
if (cdclk_sel >= ARRAY_SIZE(div_3200))
goto fail;
switch (vco) {
case 3200000:
div_table = div_3200;
break;
case 4000000:
div_table = div_4000;
break;
case 4800000:
div_table = div_4800;
break;
case 5333333:
div_table = div_5333;
break;
default:
goto fail;
}
return DIV_ROUND_CLOSEST(vco, div_table[cdclk_sel]);
fail:
DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%08x\n",
vco, tmp);
return 190476;
}
static int pnv_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 gcfgc = 0;
pci_read_config_word(pdev, GCFGC, &gcfgc);
switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
case GC_DISPLAY_CLOCK_267_MHZ_PNV:
return 266667;
case GC_DISPLAY_CLOCK_333_MHZ_PNV:
return 333333;
case GC_DISPLAY_CLOCK_444_MHZ_PNV:
return 444444;
case GC_DISPLAY_CLOCK_200_MHZ_PNV:
return 200000;
default:
DRM_ERROR("Unknown pnv display core clock 0x%04x\n", gcfgc);
case GC_DISPLAY_CLOCK_133_MHZ_PNV:
return 133333;
case GC_DISPLAY_CLOCK_167_MHZ_PNV:
return 166667;
}
}
static int i965gm_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
static const uint8_t div_3200[] = { 16, 10, 8 };
static const uint8_t div_4000[] = { 20, 12, 10 };
static const uint8_t div_5333[] = { 24, 16, 14 };
const uint8_t *div_table;
unsigned int cdclk_sel, vco = intel_hpll_vco(dev_priv);
uint16_t tmp = 0;
pci_read_config_word(pdev, GCFGC, &tmp);
cdclk_sel = ((tmp >> 8) & 0x1f) - 1;
if (cdclk_sel >= ARRAY_SIZE(div_3200))
goto fail;
switch (vco) {
case 3200000:
div_table = div_3200;
break;
case 4000000:
div_table = div_4000;
break;
case 5333333:
div_table = div_5333;
break;
default:
goto fail;
}
return DIV_ROUND_CLOSEST(vco, div_table[cdclk_sel]);
fail:
DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%04x\n",
vco, tmp);
return 200000;
}
static int gm45_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
unsigned int cdclk_sel, vco = intel_hpll_vco(dev_priv);
uint16_t tmp = 0;
pci_read_config_word(pdev, GCFGC, &tmp);
cdclk_sel = (tmp >> 12) & 0x1;
switch (vco) {
case 2666667:
case 4000000:
case 5333333:
return cdclk_sel ? 333333 : 222222;
case 3200000:
return cdclk_sel ? 320000 : 228571;
default:
DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u, CFGC=0x%04x\n",
vco, tmp);
return 222222;
}
}
static int hsw_get_cdclk(struct drm_i915_private *dev_priv)
{
uint32_t lcpll = I915_READ(LCPLL_CTL);
uint32_t freq = lcpll & LCPLL_CLK_FREQ_MASK;
if (lcpll & LCPLL_CD_SOURCE_FCLK)
return 800000;
else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
return 450000;
else if (freq == LCPLL_CLK_FREQ_450)
return 450000;
else if (IS_HSW_ULT(dev_priv))
return 337500;
else
return 540000;
}
static int vlv_calc_cdclk(struct drm_i915_private *dev_priv,
int max_pixclk)
{
int freq_320 = (dev_priv->hpll_freq << 1) % 320000 != 0 ?
333333 : 320000;
int limit = IS_CHERRYVIEW(dev_priv) ? 95 : 90;
/*
* We seem to get an unstable or solid color picture at 200MHz.
* Not sure what's wrong. For now use 200MHz only when all pipes
* are off.
*/
if (!IS_CHERRYVIEW(dev_priv) &&
max_pixclk > freq_320*limit/100)
return 400000;
else if (max_pixclk > 266667*limit/100)
return freq_320;
else if (max_pixclk > 0)
return 266667;
else
return 200000;
}
static int vlv_get_cdclk(struct drm_i915_private *dev_priv)
{
return vlv_get_cck_clock_hpll(dev_priv, "cdclk",
CCK_DISPLAY_CLOCK_CONTROL);
}
static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
{
unsigned int credits, default_credits;
if (IS_CHERRYVIEW(dev_priv))
default_credits = PFI_CREDIT(12);
else
default_credits = PFI_CREDIT(8);
if (dev_priv->cdclk_freq >= dev_priv->czclk_freq) {
/* CHV suggested value is 31 or 63 */
if (IS_CHERRYVIEW(dev_priv))
credits = PFI_CREDIT_63;
else
credits = PFI_CREDIT(15);
} else {
credits = default_credits;
}
/*
* WA - write default credits before re-programming
* FIXME: should we also set the resend bit here?
*/
I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
default_credits);
I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
credits | PFI_CREDIT_RESEND);
/*
* FIXME is this guaranteed to clear
* immediately or should we poll for it?
*/
WARN_ON(I915_READ(GCI_CONTROL) & PFI_CREDIT_RESEND);
}
static void vlv_set_cdclk(struct drm_device *dev, int cdclk)
{
struct drm_i915_private *dev_priv = to_i915(dev);
u32 val, cmd;
WARN_ON(dev_priv->display.get_cdclk(dev_priv) != dev_priv->cdclk_freq);
if (cdclk >= 320000) /* jump to highest voltage for 400MHz too */
cmd = 2;
else if (cdclk == 266667)
cmd = 1;
else
cmd = 0;
mutex_lock(&dev_priv->rps.hw_lock);
val = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ);
val &= ~DSPFREQGUAR_MASK;
val |= (cmd << DSPFREQGUAR_SHIFT);
vlv_punit_write(dev_priv, PUNIT_REG_DSPFREQ, val);
if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) &
DSPFREQSTAT_MASK) == (cmd << DSPFREQSTAT_SHIFT),
50)) {
DRM_ERROR("timed out waiting for CDclk change\n");
}
mutex_unlock(&dev_priv->rps.hw_lock);
mutex_lock(&dev_priv->sb_lock);
if (cdclk == 400000) {
u32 divider;
divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1,
cdclk) - 1;
/* adjust cdclk divider */
val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
val &= ~CCK_FREQUENCY_VALUES;
val |= divider;
vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
CCK_FREQUENCY_STATUS) == (divider << CCK_FREQUENCY_STATUS_SHIFT),
50))
DRM_ERROR("timed out waiting for CDclk change\n");
}
/* adjust self-refresh exit latency value */
val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
val &= ~0x7f;
/*
* For high bandwidth configs, we set a higher latency in the bunit
* so that the core display fetch happens in time to avoid underruns.
*/
if (cdclk == 400000)
val |= 4500 / 250; /* 4.5 usec */
else
val |= 3000 / 250; /* 3.0 usec */
vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
mutex_unlock(&dev_priv->sb_lock);
intel_update_cdclk(dev_priv);
}
static void chv_set_cdclk(struct drm_device *dev, int cdclk)
{
struct drm_i915_private *dev_priv = to_i915(dev);
u32 val, cmd;
WARN_ON(dev_priv->display.get_cdclk(dev_priv) != dev_priv->cdclk_freq);
switch (cdclk) {
case 333333:
case 320000:
case 266667:
case 200000:
break;
default:
MISSING_CASE(cdclk);
return;
}
/*
* Specs are full of misinformation, but testing on actual
* hardware has shown that we just need to write the desired
* CCK divider into the Punit register.
*/
cmd = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
mutex_lock(&dev_priv->rps.hw_lock);
val = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ);
val &= ~DSPFREQGUAR_MASK_CHV;
val |= (cmd << DSPFREQGUAR_SHIFT_CHV);
vlv_punit_write(dev_priv, PUNIT_REG_DSPFREQ, val);
if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) &
DSPFREQSTAT_MASK_CHV) == (cmd << DSPFREQSTAT_SHIFT_CHV),
50)) {
DRM_ERROR("timed out waiting for CDclk change\n");
}
mutex_unlock(&dev_priv->rps.hw_lock);
intel_update_cdclk(dev_priv);
}
static int bdw_calc_cdclk(int max_pixclk)
{
if (max_pixclk > 540000)
return 675000;
else if (max_pixclk > 450000)
return 540000;
else if (max_pixclk > 337500)
return 450000;
else
return 337500;
}
static int bdw_get_cdclk(struct drm_i915_private *dev_priv)
{
uint32_t lcpll = I915_READ(LCPLL_CTL);
uint32_t freq = lcpll & LCPLL_CLK_FREQ_MASK;
if (lcpll & LCPLL_CD_SOURCE_FCLK)
return 800000;
else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
return 450000;
else if (freq == LCPLL_CLK_FREQ_450)
return 450000;
else if (freq == LCPLL_CLK_FREQ_54O_BDW)
return 540000;
else if (freq == LCPLL_CLK_FREQ_337_5_BDW)
return 337500;
else
return 675000;
}
static void bdw_set_cdclk(struct drm_device *dev, int cdclk)
{
struct drm_i915_private *dev_priv = to_i915(dev);
uint32_t val, data;
int ret;
if (WARN((I915_READ(LCPLL_CTL) &
(LCPLL_PLL_DISABLE | LCPLL_PLL_LOCK |
LCPLL_CD_CLOCK_DISABLE | LCPLL_ROOT_CD_CLOCK_DISABLE |
LCPLL_CD2X_CLOCK_DISABLE | LCPLL_POWER_DOWN_ALLOW |
LCPLL_CD_SOURCE_FCLK)) != LCPLL_PLL_LOCK,
"trying to change cdclk frequency with cdclk not enabled\n"))
return;
mutex_lock(&dev_priv->rps.hw_lock);
ret = sandybridge_pcode_write(dev_priv,
BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) {
DRM_ERROR("failed to inform pcode about cdclk change\n");
return;
}
val = I915_READ(LCPLL_CTL);
val |= LCPLL_CD_SOURCE_FCLK;
I915_WRITE(LCPLL_CTL, val);
if (wait_for_us(I915_READ(LCPLL_CTL) &
LCPLL_CD_SOURCE_FCLK_DONE, 1))
DRM_ERROR("Switching to FCLK failed\n");
val = I915_READ(LCPLL_CTL);
val &= ~LCPLL_CLK_FREQ_MASK;
switch (cdclk) {
case 450000:
val |= LCPLL_CLK_FREQ_450;
data = 0;
break;
case 540000:
val |= LCPLL_CLK_FREQ_54O_BDW;
data = 1;
break;
case 337500:
val |= LCPLL_CLK_FREQ_337_5_BDW;
data = 2;
break;
case 675000:
val |= LCPLL_CLK_FREQ_675_BDW;
data = 3;
break;
default:
WARN(1, "invalid cdclk frequency\n");
return;
}
I915_WRITE(LCPLL_CTL, val);
val = I915_READ(LCPLL_CTL);
val &= ~LCPLL_CD_SOURCE_FCLK;
I915_WRITE(LCPLL_CTL, val);
if (wait_for_us((I915_READ(LCPLL_CTL) &
LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
DRM_ERROR("Switching back to LCPLL failed\n");
mutex_lock(&dev_priv->rps.hw_lock);
sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ, data);
mutex_unlock(&dev_priv->rps.hw_lock);
I915_WRITE(CDCLK_FREQ, DIV_ROUND_CLOSEST(cdclk, 1000) - 1);
intel_update_cdclk(dev_priv);
WARN(cdclk != dev_priv->cdclk_freq,
"cdclk requested %d kHz but got %d kHz\n",
cdclk, dev_priv->cdclk_freq);
}
static int skl_calc_cdclk(int max_pixclk, int vco)
{
if (vco == 8640000) {
if (max_pixclk > 540000)
return 617143;
else if (max_pixclk > 432000)
return 540000;
else if (max_pixclk > 308571)
return 432000;
else
return 308571;
} else {
if (max_pixclk > 540000)
return 675000;
else if (max_pixclk > 450000)
return 540000;
else if (max_pixclk > 337500)
return 450000;
else
return 337500;
}
}
static void skl_dpll0_update(struct drm_i915_private *dev_priv)
{
u32 val;
dev_priv->cdclk_pll.ref = 24000;
dev_priv->cdclk_pll.vco = 0;
val = I915_READ(LCPLL1_CTL);
if ((val & LCPLL_PLL_ENABLE) == 0)
return;
if (WARN_ON((val & LCPLL_PLL_LOCK) == 0))
return;
val = I915_READ(DPLL_CTRL1);
if (WARN_ON((val & (DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) |
DPLL_CTRL1_SSC(SKL_DPLL0) |
DPLL_CTRL1_OVERRIDE(SKL_DPLL0))) !=
DPLL_CTRL1_OVERRIDE(SKL_DPLL0)))
return;
switch (val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0)) {
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, SKL_DPLL0):
dev_priv->cdclk_pll.vco = 8100000;
break;
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, SKL_DPLL0):
dev_priv->cdclk_pll.vco = 8640000;
break;
default:
MISSING_CASE(val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
break;
}
}
static int skl_get_cdclk(struct drm_i915_private *dev_priv)
{
u32 cdctl;
skl_dpll0_update(dev_priv);
if (dev_priv->cdclk_pll.vco == 0)
return dev_priv->cdclk_pll.ref;
cdctl = I915_READ(CDCLK_CTL);
if (dev_priv->cdclk_pll.vco == 8640000) {
switch (cdctl & CDCLK_FREQ_SEL_MASK) {
case CDCLK_FREQ_450_432:
return 432000;
case CDCLK_FREQ_337_308:
return 308571;
case CDCLK_FREQ_540:
return 540000;
case CDCLK_FREQ_675_617:
return 617143;
default:
MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
}
} else {
switch (cdctl & CDCLK_FREQ_SEL_MASK) {
case CDCLK_FREQ_450_432:
return 450000;
case CDCLK_FREQ_337_308:
return 337500;
case CDCLK_FREQ_540:
return 540000;
case CDCLK_FREQ_675_617:
return 675000;
default:
MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
}
}
return dev_priv->cdclk_pll.ref;
}
/* convert from kHz to .1 fixpoint MHz with -1MHz offset */
static int skl_cdclk_decimal(int cdclk)
{
return DIV_ROUND_CLOSEST(cdclk - 1000, 500);
}
static void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv,
int vco)
{
bool changed = dev_priv->skl_preferred_vco_freq != vco;
dev_priv->skl_preferred_vco_freq = vco;
if (changed)
intel_update_max_cdclk(dev_priv);
}
static void skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
{
int min_cdclk = skl_calc_cdclk(0, vco);
u32 val;
WARN_ON(vco != 8100000 && vco != 8640000);
/* select the minimum CDCLK before enabling DPLL 0 */
val = CDCLK_FREQ_337_308 | skl_cdclk_decimal(min_cdclk);
I915_WRITE(CDCLK_CTL, val);
POSTING_READ(CDCLK_CTL);
/*
* We always enable DPLL0 with the lowest link rate possible, but still
* taking into account the VCO required to operate the eDP panel at the
* desired frequency. The usual DP link rates operate with a VCO of
* 8100 while the eDP 1.4 alternate link rates need a VCO of 8640.
* The modeset code is responsible for the selection of the exact link
* rate later on, with the constraint of choosing a frequency that
* works with vco.
*/
val = I915_READ(DPLL_CTRL1);
val &= ~(DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | DPLL_CTRL1_SSC(SKL_DPLL0) |
DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
val |= DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
if (vco == 8640000)
val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
SKL_DPLL0);
else
val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
SKL_DPLL0);
I915_WRITE(DPLL_CTRL1, val);
POSTING_READ(DPLL_CTRL1);
I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) | LCPLL_PLL_ENABLE);
if (intel_wait_for_register(dev_priv,
LCPLL1_CTL, LCPLL_PLL_LOCK, LCPLL_PLL_LOCK,
5))
DRM_ERROR("DPLL0 not locked\n");
dev_priv->cdclk_pll.vco = vco;
/* We'll want to keep using the current vco from now on. */
skl_set_preferred_cdclk_vco(dev_priv, vco);
}
static void skl_dpll0_disable(struct drm_i915_private *dev_priv)
{
I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) & ~LCPLL_PLL_ENABLE);
if (intel_wait_for_register(dev_priv,
LCPLL1_CTL, LCPLL_PLL_LOCK, 0,
1))
DRM_ERROR("Couldn't disable DPLL0\n");
dev_priv->cdclk_pll.vco = 0;
}
static void skl_set_cdclk(struct drm_i915_private *dev_priv,
int cdclk, int vco)
{
u32 freq_select, pcu_ack;
int ret;
WARN_ON((cdclk == 24000) != (vco == 0));
DRM_DEBUG_DRIVER("Changing CDCLK to %d kHz (VCO %d kHz)\n",
cdclk, vco);
mutex_lock(&dev_priv->rps.hw_lock);
ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
SKL_CDCLK_PREPARE_FOR_CHANGE,
SKL_CDCLK_READY_FOR_CHANGE,
SKL_CDCLK_READY_FOR_CHANGE, 3);
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) {
DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
ret);
return;
}
/* set CDCLK_CTL */
switch (cdclk) {
case 450000:
case 432000:
freq_select = CDCLK_FREQ_450_432;
pcu_ack = 1;
break;
case 540000:
freq_select = CDCLK_FREQ_540;
pcu_ack = 2;
break;
case 308571:
case 337500:
default:
freq_select = CDCLK_FREQ_337_308;
pcu_ack = 0;
break;
case 617143:
case 675000:
freq_select = CDCLK_FREQ_675_617;
pcu_ack = 3;
break;
}
if (dev_priv->cdclk_pll.vco != 0 &&
dev_priv->cdclk_pll.vco != vco)
skl_dpll0_disable(dev_priv);
if (dev_priv->cdclk_pll.vco != vco)
skl_dpll0_enable(dev_priv, vco);
I915_WRITE(CDCLK_CTL, freq_select | skl_cdclk_decimal(cdclk));
POSTING_READ(CDCLK_CTL);
/* inform PCU of the change */
mutex_lock(&dev_priv->rps.hw_lock);
sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL, pcu_ack);
mutex_unlock(&dev_priv->rps.hw_lock);
intel_update_cdclk(dev_priv);
}
static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
{
uint32_t cdctl, expected;
/*
* check if the pre-os initialized the display
* There is SWF18 scratchpad register defined which is set by the
* pre-os which can be used by the OS drivers to check the status
*/
if ((I915_READ(SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
goto sanitize;
intel_update_cdclk(dev_priv);
/* Is PLL enabled and locked ? */
if (dev_priv->cdclk_pll.vco == 0 ||
dev_priv->cdclk_freq == dev_priv->cdclk_pll.ref)
goto sanitize;
/* DPLL okay; verify the cdclock
*
* Noticed in some instances that the freq selection is correct but
* decimal part is programmed wrong from BIOS where pre-os does not
* enable display. Verify the same as well.
*/
cdctl = I915_READ(CDCLK_CTL);
expected = (cdctl & CDCLK_FREQ_SEL_MASK) |
skl_cdclk_decimal(dev_priv->cdclk_freq);
if (cdctl == expected)
/* All well; nothing to sanitize */
return;
sanitize:
DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
/* force cdclk programming */
dev_priv->cdclk_freq = 0;
/* force full PLL disable + enable */
dev_priv->cdclk_pll.vco = -1;
}
/**
* skl_init_cdclk - Initialize CDCLK on SKL
* @dev_priv: i915 device
*
* Initialize CDCLK for SKL and derivatives. This is generally
* done only during the display core initialization sequence,
* after which the DMC will take care of turning CDCLK off/on
* as needed.
*/
void skl_init_cdclk(struct drm_i915_private *dev_priv)
{
int cdclk, vco;
skl_sanitize_cdclk(dev_priv);
if (dev_priv->cdclk_freq != 0 && dev_priv->cdclk_pll.vco != 0) {
/*
* Use the current vco as our initial
* guess as to what the preferred vco is.
*/
if (dev_priv->skl_preferred_vco_freq == 0)
skl_set_preferred_cdclk_vco(dev_priv,
dev_priv->cdclk_pll.vco);
return;
}
vco = dev_priv->skl_preferred_vco_freq;
if (vco == 0)
vco = 8100000;
cdclk = skl_calc_cdclk(0, vco);
skl_set_cdclk(dev_priv, cdclk, vco);
}
/**
* skl_uninit_cdclk - Uninitialize CDCLK on SKL
* @dev_priv: i915 device
*
* Uninitialize CDCLK for SKL and derivatives. This is done only
* during the display core uninitialization sequence.
*/
void skl_uninit_cdclk(struct drm_i915_private *dev_priv)
{
skl_set_cdclk(dev_priv, dev_priv->cdclk_pll.ref, 0);
}
static int bxt_calc_cdclk(int max_pixclk)
{
if (max_pixclk > 576000)
return 624000;
else if (max_pixclk > 384000)
return 576000;
else if (max_pixclk > 288000)
return 384000;
else if (max_pixclk > 144000)
return 288000;
else
return 144000;
}
static int glk_calc_cdclk(int max_pixclk)
{
if (max_pixclk > 2 * 158400)
return 316800;
else if (max_pixclk > 2 * 79200)
return 158400;
else
return 79200;
}
static int bxt_de_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
{
int ratio;
if (cdclk == dev_priv->cdclk_pll.ref)
return 0;
switch (cdclk) {
default:
MISSING_CASE(cdclk);
case 144000:
case 288000:
case 384000:
case 576000:
ratio = 60;
break;
case 624000:
ratio = 65;
break;
}
return dev_priv->cdclk_pll.ref * ratio;
}
static int glk_de_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
{
int ratio;
if (cdclk == dev_priv->cdclk_pll.ref)
return 0;
switch (cdclk) {
default:
MISSING_CASE(cdclk);
case 79200:
case 158400:
case 316800:
ratio = 33;
break;
}
return dev_priv->cdclk_pll.ref * ratio;
}
static void bxt_de_pll_update(struct drm_i915_private *dev_priv)
{
u32 val;
dev_priv->cdclk_pll.ref = 19200;
dev_priv->cdclk_pll.vco = 0;
val = I915_READ(BXT_DE_PLL_ENABLE);
if ((val & BXT_DE_PLL_PLL_ENABLE) == 0)
return;
if (WARN_ON((val & BXT_DE_PLL_LOCK) == 0))
return;
val = I915_READ(BXT_DE_PLL_CTL);
dev_priv->cdclk_pll.vco = (val & BXT_DE_PLL_RATIO_MASK) *
dev_priv->cdclk_pll.ref;
}
static int bxt_get_cdclk(struct drm_i915_private *dev_priv)
{
u32 divider;
int div, vco;
bxt_de_pll_update(dev_priv);
vco = dev_priv->cdclk_pll.vco;
if (vco == 0)
return dev_priv->cdclk_pll.ref;
divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
switch (divider) {
case BXT_CDCLK_CD2X_DIV_SEL_1:
div = 2;
break;
case BXT_CDCLK_CD2X_DIV_SEL_1_5:
WARN(IS_GEMINILAKE(dev_priv), "Unsupported divider\n");
div = 3;
break;
case BXT_CDCLK_CD2X_DIV_SEL_2:
div = 4;
break;
case BXT_CDCLK_CD2X_DIV_SEL_4:
div = 8;
break;
default:
MISSING_CASE(divider);
return dev_priv->cdclk_pll.ref;
}
return DIV_ROUND_CLOSEST(vco, div);
}
static void bxt_de_pll_disable(struct drm_i915_private *dev_priv)
{
I915_WRITE(BXT_DE_PLL_ENABLE, 0);
/* Timeout 200us */
if (intel_wait_for_register(dev_priv,
BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 0,
1))
DRM_ERROR("timeout waiting for DE PLL unlock\n");
dev_priv->cdclk_pll.vco = 0;
}
static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco)
{
int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk_pll.ref);
u32 val;
val = I915_READ(BXT_DE_PLL_CTL);
val &= ~BXT_DE_PLL_RATIO_MASK;
val |= BXT_DE_PLL_RATIO(ratio);
I915_WRITE(BXT_DE_PLL_CTL, val);
I915_WRITE(BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE);
/* Timeout 200us */
if (intel_wait_for_register(dev_priv,
BXT_DE_PLL_ENABLE,
BXT_DE_PLL_LOCK,
BXT_DE_PLL_LOCK,
1))
DRM_ERROR("timeout waiting for DE PLL lock\n");
dev_priv->cdclk_pll.vco = vco;
}
static void bxt_set_cdclk(struct drm_i915_private *dev_priv, int cdclk)
{
u32 val, divider;
int vco, ret;
if (IS_GEMINILAKE(dev_priv))
vco = glk_de_pll_vco(dev_priv, cdclk);
else
vco = bxt_de_pll_vco(dev_priv, cdclk);
DRM_DEBUG_DRIVER("Changing CDCLK to %d kHz (VCO %d kHz)\n",
cdclk, vco);
/* cdclk = vco / 2 / div{1,1.5,2,4} */
switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
case 8:
divider = BXT_CDCLK_CD2X_DIV_SEL_4;
break;
case 4:
divider = BXT_CDCLK_CD2X_DIV_SEL_2;
break;
case 3:
WARN(IS_GEMINILAKE(dev_priv), "Unsupported divider\n");
divider = BXT_CDCLK_CD2X_DIV_SEL_1_5;
break;
case 2:
divider = BXT_CDCLK_CD2X_DIV_SEL_1;
break;
default:
WARN_ON(cdclk != dev_priv->cdclk_pll.ref);
WARN_ON(vco != 0);
divider = BXT_CDCLK_CD2X_DIV_SEL_1;
break;
}
/* Inform power controller of upcoming frequency change */
mutex_lock(&dev_priv->rps.hw_lock);
ret = sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
0x80000000);
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) {
DRM_ERROR("PCode CDCLK freq change notify failed (err %d, freq %d)\n",
ret, cdclk);
return;
}
if (dev_priv->cdclk_pll.vco != 0 &&
dev_priv->cdclk_pll.vco != vco)
bxt_de_pll_disable(dev_priv);
if (dev_priv->cdclk_pll.vco != vco)
bxt_de_pll_enable(dev_priv, vco);
val = divider | skl_cdclk_decimal(cdclk);
/*
* FIXME if only the cd2x divider needs changing, it could be done
* without shutting off the pipe (if only one pipe is active).
*/
val |= BXT_CDCLK_CD2X_PIPE_NONE;
/*
* Disable SSA Precharge when CD clock frequency < 500 MHz,
* enable otherwise.
*/
if (cdclk >= 500000)
val |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
I915_WRITE(CDCLK_CTL, val);
mutex_lock(&dev_priv->rps.hw_lock);
ret = sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
DIV_ROUND_UP(cdclk, 25000));
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) {
DRM_ERROR("PCode CDCLK freq set failed, (err %d, freq %d)\n",
ret, cdclk);
return;
}
intel_update_cdclk(dev_priv);
}
static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv)
{
u32 cdctl, expected;
intel_update_cdclk(dev_priv);
if (dev_priv->cdclk_pll.vco == 0 ||
dev_priv->cdclk_freq == dev_priv->cdclk_pll.ref)
goto sanitize;
/* DPLL okay; verify the cdclock
*
* Some BIOS versions leave an incorrect decimal frequency value and
* set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
* so sanitize this register.
*/
cdctl = I915_READ(CDCLK_CTL);
/*
* Let's ignore the pipe field, since BIOS could have configured the
* dividers both synching to an active pipe, or asynchronously
* (PIPE_NONE).
*/
cdctl &= ~BXT_CDCLK_CD2X_PIPE_NONE;
expected = (cdctl & BXT_CDCLK_CD2X_DIV_SEL_MASK) |
skl_cdclk_decimal(dev_priv->cdclk_freq);
/*
* Disable SSA Precharge when CD clock frequency < 500 MHz,
* enable otherwise.
*/
if (dev_priv->cdclk_freq >= 500000)
expected |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
if (cdctl == expected)
/* All well; nothing to sanitize */
return;
sanitize:
DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
/* force cdclk programming */
dev_priv->cdclk_freq = 0;
/* force full PLL disable + enable */
dev_priv->cdclk_pll.vco = -1;
}
/**
* bxt_init_cdclk - Initialize CDCLK on BXT
* @dev_priv: i915 device
*
* Initialize CDCLK for BXT and derivatives. This is generally
* done only during the display core initialization sequence,
* after which the DMC will take care of turning CDCLK off/on
* as needed.
*/
void bxt_init_cdclk(struct drm_i915_private *dev_priv)
{
int cdclk;
bxt_sanitize_cdclk(dev_priv);
if (dev_priv->cdclk_freq != 0 && dev_priv->cdclk_pll.vco != 0)
return;
/*
* FIXME:
* - The initial CDCLK needs to be read from VBT.
* Need to make this change after VBT has changes for BXT.
*/
if (IS_GEMINILAKE(dev_priv))
cdclk = glk_calc_cdclk(0);
else
cdclk = bxt_calc_cdclk(0);
bxt_set_cdclk(dev_priv, cdclk);
}
/**
* bxt_uninit_cdclk - Uninitialize CDCLK on BXT
* @dev_priv: i915 device
*
* Uninitialize CDCLK for BXT and derivatives. This is done only
* during the display core uninitialization sequence.
*/
void bxt_uninit_cdclk(struct drm_i915_private *dev_priv)
{
bxt_set_cdclk(dev_priv, dev_priv->cdclk_pll.ref);
}
static int bdw_adjust_min_pipe_pixel_rate(struct intel_crtc_state *crtc_state,
int pixel_rate)
{
struct drm_i915_private *dev_priv =
to_i915(crtc_state->base.crtc->dev);
/* pixel rate mustn't exceed 95% of cdclk with IPS on BDW */
if (IS_BROADWELL(dev_priv) && crtc_state->ips_enabled)
pixel_rate = DIV_ROUND_UP(pixel_rate * 100, 95);
/* BSpec says "Do not use DisplayPort with CDCLK less than
* 432 MHz, audio enabled, port width x4, and link rate
* HBR2 (5.4 GHz), or else there may be audio corruption or
* screen corruption."
*/
if (intel_crtc_has_dp_encoder(crtc_state) &&
crtc_state->has_audio &&
crtc_state->port_clock >= 540000 &&
crtc_state->lane_count == 4)
pixel_rate = max(432000, pixel_rate);
return pixel_rate;
}
/* compute the max rate for new configuration */
static int intel_max_pixel_rate(struct drm_atomic_state *state)
{
struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
struct drm_i915_private *dev_priv = to_i915(state->dev);
struct drm_crtc *crtc;
struct drm_crtc_state *cstate;
struct intel_crtc_state *crtc_state;
unsigned int max_pixel_rate = 0, i;
enum pipe pipe;
memcpy(intel_state->min_pixclk, dev_priv->min_pixclk,
sizeof(intel_state->min_pixclk));
for_each_crtc_in_state(state, crtc, cstate, i) {
int pixel_rate;
crtc_state = to_intel_crtc_state(cstate);
if (!crtc_state->base.enable) {
intel_state->min_pixclk[i] = 0;
continue;
}
pixel_rate = crtc_state->pixel_rate;
if (IS_BROADWELL(dev_priv) || IS_GEN9(dev_priv))
pixel_rate =
bdw_adjust_min_pipe_pixel_rate(crtc_state,
pixel_rate);
intel_state->min_pixclk[i] = pixel_rate;
}
for_each_pipe(dev_priv, pipe)
max_pixel_rate = max(intel_state->min_pixclk[pipe],
max_pixel_rate);
return max_pixel_rate;
}
static int vlv_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct drm_device *dev = state->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
int max_pixclk = intel_max_pixel_rate(state);
struct intel_atomic_state *intel_state =
to_intel_atomic_state(state);
intel_state->cdclk = intel_state->dev_cdclk =
vlv_calc_cdclk(dev_priv, max_pixclk);
if (!intel_state->active_crtcs)
intel_state->dev_cdclk = vlv_calc_cdclk(dev_priv, 0);
return 0;
}
static void vlv_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_device *dev = old_state->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_atomic_state *old_intel_state =
to_intel_atomic_state(old_state);
unsigned int req_cdclk = old_intel_state->dev_cdclk;
/*
* FIXME: We can end up here with all power domains off, yet
* with a CDCLK frequency other than the minimum. To account
* for this take the PIPE-A power domain, which covers the HW
* blocks needed for the following programming. This can be
* removed once it's guaranteed that we get here either with
* the minimum CDCLK set, or the required power domains
* enabled.
*/
intel_display_power_get(dev_priv, POWER_DOMAIN_PIPE_A);
if (IS_CHERRYVIEW(dev_priv))
chv_set_cdclk(dev, req_cdclk);
else
vlv_set_cdclk(dev, req_cdclk);
vlv_program_pfi_credits(dev_priv);
intel_display_power_put(dev_priv, POWER_DOMAIN_PIPE_A);
}
static int bdw_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct drm_i915_private *dev_priv = to_i915(state->dev);
struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
int max_pixclk = intel_max_pixel_rate(state);
int cdclk;
/*
* FIXME should also account for plane ratio
* once 64bpp pixel formats are supported.
*/
cdclk = bdw_calc_cdclk(max_pixclk);
if (cdclk > dev_priv->max_cdclk_freq) {
DRM_DEBUG_KMS("requested cdclk (%d kHz) exceeds max (%d kHz)\n",
cdclk, dev_priv->max_cdclk_freq);
return -EINVAL;
}
intel_state->cdclk = intel_state->dev_cdclk = cdclk;
if (!intel_state->active_crtcs)
intel_state->dev_cdclk = bdw_calc_cdclk(0);
return 0;
}
static void bdw_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_device *dev = old_state->dev;
struct intel_atomic_state *old_intel_state =
to_intel_atomic_state(old_state);
unsigned int req_cdclk = old_intel_state->dev_cdclk;
bdw_set_cdclk(dev, req_cdclk);
}
static int skl_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
struct drm_i915_private *dev_priv = to_i915(state->dev);
const int max_pixclk = intel_max_pixel_rate(state);
int vco = intel_state->cdclk_pll_vco;
int cdclk;
/*
* FIXME should also account for plane ratio
* once 64bpp pixel formats are supported.
*/
cdclk = skl_calc_cdclk(max_pixclk, vco);
/*
* FIXME move the cdclk caclulation to
* compute_config() so we can fail gracegully.
*/
if (cdclk > dev_priv->max_cdclk_freq) {
DRM_ERROR("requested cdclk (%d kHz) exceeds max (%d kHz)\n",
cdclk, dev_priv->max_cdclk_freq);
cdclk = dev_priv->max_cdclk_freq;
}
intel_state->cdclk = intel_state->dev_cdclk = cdclk;
if (!intel_state->active_crtcs)
intel_state->dev_cdclk = skl_calc_cdclk(0, vco);
return 0;
}
static void skl_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_i915_private *dev_priv = to_i915(old_state->dev);
struct intel_atomic_state *intel_state =
to_intel_atomic_state(old_state);
unsigned int req_cdclk = intel_state->dev_cdclk;
unsigned int req_vco = intel_state->cdclk_pll_vco;
skl_set_cdclk(dev_priv, req_cdclk, req_vco);
}
static int bxt_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct drm_i915_private *dev_priv = to_i915(state->dev);
int max_pixclk = intel_max_pixel_rate(state);
struct intel_atomic_state *intel_state =
to_intel_atomic_state(state);
int cdclk;
if (IS_GEMINILAKE(dev_priv))
cdclk = glk_calc_cdclk(max_pixclk);
else
cdclk = bxt_calc_cdclk(max_pixclk);
intel_state->cdclk = intel_state->dev_cdclk = cdclk;
if (!intel_state->active_crtcs) {
if (IS_GEMINILAKE(dev_priv))
cdclk = glk_calc_cdclk(0);
else
cdclk = bxt_calc_cdclk(0);
intel_state->dev_cdclk = cdclk;
}
return 0;
}
static void bxt_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_device *dev = old_state->dev;
struct intel_atomic_state *old_intel_state =
to_intel_atomic_state(old_state);
unsigned int req_cdclk = old_intel_state->dev_cdclk;
bxt_set_cdclk(to_i915(dev), req_cdclk);
}
static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv)
{
int max_cdclk_freq = dev_priv->max_cdclk_freq;
if (IS_GEMINILAKE(dev_priv))
return 2 * max_cdclk_freq;
else if (INTEL_INFO(dev_priv)->gen >= 9 ||
IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
return max_cdclk_freq;
else if (IS_CHERRYVIEW(dev_priv))
return max_cdclk_freq*95/100;
else if (INTEL_INFO(dev_priv)->gen < 4)
return 2*max_cdclk_freq*90/100;
else
return max_cdclk_freq*90/100;
}
/**
* intel_update_max_cdclk - Determine the maximum support CDCLK frequency
* @dev_priv: i915 device
*
* Determine the maximum CDCLK frequency the platform supports, and also
* derive the maximum dot clock frequency the maximum CDCLK frequency
* allows.
*/
void intel_update_max_cdclk(struct drm_i915_private *dev_priv)
{
if (IS_GEN9_BC(dev_priv)) {
u32 limit = I915_READ(SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
int max_cdclk, vco;
vco = dev_priv->skl_preferred_vco_freq;
WARN_ON(vco != 8100000 && vco != 8640000);
/*
* Use the lower (vco 8640) cdclk values as a
* first guess. skl_calc_cdclk() will correct it
* if the preferred vco is 8100 instead.
*/
if (limit == SKL_DFSM_CDCLK_LIMIT_675)
max_cdclk = 617143;
else if (limit == SKL_DFSM_CDCLK_LIMIT_540)
max_cdclk = 540000;
else if (limit == SKL_DFSM_CDCLK_LIMIT_450)
max_cdclk = 432000;
else
max_cdclk = 308571;
dev_priv->max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
} else if (IS_GEMINILAKE(dev_priv)) {
dev_priv->max_cdclk_freq = 316800;
} else if (IS_BROXTON(dev_priv)) {
dev_priv->max_cdclk_freq = 624000;
} else if (IS_BROADWELL(dev_priv)) {
/*
* FIXME with extra cooling we can allow
* 540 MHz for ULX and 675 Mhz for ULT.
* How can we know if extra cooling is
* available? PCI ID, VTB, something else?
*/
if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
dev_priv->max_cdclk_freq = 450000;
else if (IS_BDW_ULX(dev_priv))
dev_priv->max_cdclk_freq = 450000;
else if (IS_BDW_ULT(dev_priv))
dev_priv->max_cdclk_freq = 540000;
else
dev_priv->max_cdclk_freq = 675000;
} else if (IS_CHERRYVIEW(dev_priv)) {
dev_priv->max_cdclk_freq = 320000;
} else if (IS_VALLEYVIEW(dev_priv)) {
dev_priv->max_cdclk_freq = 400000;
} else {
/* otherwise assume cdclk is fixed */
dev_priv->max_cdclk_freq = dev_priv->cdclk_freq;
}
dev_priv->max_dotclk_freq = intel_compute_max_dotclk(dev_priv);
DRM_DEBUG_DRIVER("Max CD clock rate: %d kHz\n",
dev_priv->max_cdclk_freq);
DRM_DEBUG_DRIVER("Max dotclock rate: %d kHz\n",
dev_priv->max_dotclk_freq);
}
/**
* intel_update_cdclk - Determine the current CDCLK frequency
* @dev_priv: i915 device
*
* Determine the current CDCLK frequency.
*/
void intel_update_cdclk(struct drm_i915_private *dev_priv)
{
dev_priv->cdclk_freq = dev_priv->display.get_cdclk(dev_priv);
if (INTEL_GEN(dev_priv) >= 9)
DRM_DEBUG_DRIVER("Current CD clock rate: %d kHz, VCO: %d kHz, ref: %d kHz\n",
dev_priv->cdclk_freq, dev_priv->cdclk_pll.vco,
dev_priv->cdclk_pll.ref);
else
DRM_DEBUG_DRIVER("Current CD clock rate: %d kHz\n",
dev_priv->cdclk_freq);
/*
* 9:0 CMBUS [sic] CDCLK frequency (cdfreq):
* Programmng [sic] note: bit[9:2] should be programmed to the number
* of cdclk that generates 4MHz reference clock freq which is used to
* generate GMBus clock. This will vary with the cdclk freq.
*/
if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
I915_WRITE(GMBUSFREQ_VLV,
DIV_ROUND_UP(dev_priv->cdclk_freq, 1000));
}
static int pch_rawclk(struct drm_i915_private *dev_priv)
{
return (I915_READ(PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
}
static int vlv_hrawclk(struct drm_i915_private *dev_priv)
{
/* RAWCLK_FREQ_VLV register updated from power well code */
return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
CCK_DISPLAY_REF_CLOCK_CONTROL);
}
static int g4x_hrawclk(struct drm_i915_private *dev_priv)
{
uint32_t clkcfg;
/* hrawclock is 1/4 the FSB frequency */
clkcfg = I915_READ(CLKCFG);
switch (clkcfg & CLKCFG_FSB_MASK) {
case CLKCFG_FSB_400:
return 100000;
case CLKCFG_FSB_533:
return 133333;
case CLKCFG_FSB_667:
return 166667;
case CLKCFG_FSB_800:
return 200000;
case CLKCFG_FSB_1067:
return 266667;
case CLKCFG_FSB_1333:
return 333333;
/* these two are just a guess; one of them might be right */
case CLKCFG_FSB_1600:
case CLKCFG_FSB_1600_ALT:
return 400000;
default:
return 133333;
}
}
/**
* intel_update_rawclk - Determine the current RAWCLK frequency
* @dev_priv: i915 device
*
* Determine the current RAWCLK frequency. RAWCLK is a fixed
* frequency clock so this needs to done only once.
*/
void intel_update_rawclk(struct drm_i915_private *dev_priv)
{
if (HAS_PCH_SPLIT(dev_priv))
dev_priv->rawclk_freq = pch_rawclk(dev_priv);
else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
dev_priv->rawclk_freq = vlv_hrawclk(dev_priv);
else if (IS_G4X(dev_priv) || IS_PINEVIEW(dev_priv))
dev_priv->rawclk_freq = g4x_hrawclk(dev_priv);
else
/* no rawclk on other platforms, or no need to know it */
return;
DRM_DEBUG_DRIVER("rawclk rate: %d kHz\n", dev_priv->rawclk_freq);
}
/**
* intel_init_cdclk_hooks - Initialize CDCLK related modesetting hooks
* @dev_priv: i915 device
*/
void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv)
{
if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
vlv_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
vlv_modeset_calc_cdclk;
} else if (IS_BROADWELL(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
bdw_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
bdw_modeset_calc_cdclk;
} else if (IS_GEN9_LP(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
bxt_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
bxt_modeset_calc_cdclk;
} else if (IS_GEN9_BC(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
skl_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
skl_modeset_calc_cdclk;
}
if (IS_GEN9_BC(dev_priv))
dev_priv->display.get_cdclk = skl_get_cdclk;
else if (IS_GEN9_LP(dev_priv))
dev_priv->display.get_cdclk = bxt_get_cdclk;
else if (IS_BROADWELL(dev_priv))
dev_priv->display.get_cdclk = bdw_get_cdclk;
else if (IS_HASWELL(dev_priv))
dev_priv->display.get_cdclk = hsw_get_cdclk;
else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
dev_priv->display.get_cdclk = vlv_get_cdclk;
else if (IS_GEN6(dev_priv) || IS_IVYBRIDGE(dev_priv))
dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
else if (IS_GEN5(dev_priv))
dev_priv->display.get_cdclk = fixed_450mhz_get_cdclk;
else if (IS_GM45(dev_priv))
dev_priv->display.get_cdclk = gm45_get_cdclk;
else if (IS_G4X(dev_priv))
dev_priv->display.get_cdclk = g33_get_cdclk;
else if (IS_I965GM(dev_priv))
dev_priv->display.get_cdclk = i965gm_get_cdclk;
else if (IS_I965G(dev_priv))
dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
else if (IS_PINEVIEW(dev_priv))
dev_priv->display.get_cdclk = pnv_get_cdclk;
else if (IS_G33(dev_priv))
dev_priv->display.get_cdclk = g33_get_cdclk;
else if (IS_I945GM(dev_priv))
dev_priv->display.get_cdclk = i945gm_get_cdclk;
else if (IS_I945G(dev_priv))
dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
else if (IS_I915GM(dev_priv))
dev_priv->display.get_cdclk = i915gm_get_cdclk;
else if (IS_I915G(dev_priv))
dev_priv->display.get_cdclk = fixed_333mhz_get_cdclk;
else if (IS_I865G(dev_priv))
dev_priv->display.get_cdclk = fixed_266mhz_get_cdclk;
else if (IS_I85X(dev_priv))
dev_priv->display.get_cdclk = i85x_get_cdclk;
else if (IS_I845G(dev_priv))
dev_priv->display.get_cdclk = fixed_200mhz_get_cdclk;
else { /* 830 */
WARN(!IS_I830(dev_priv),
"Unknown platform. Assuming 133 MHz CDCLK\n");
dev_priv->display.get_cdclk = fixed_133mhz_get_cdclk;
}
}
...@@ -122,9 +122,6 @@ static void ironlake_pfit_disable(struct intel_crtc *crtc, bool force); ...@@ -122,9 +122,6 @@ static void ironlake_pfit_disable(struct intel_crtc *crtc, bool force);
static void ironlake_pfit_enable(struct intel_crtc *crtc); static void ironlake_pfit_enable(struct intel_crtc *crtc);
static void intel_modeset_setup_hw_state(struct drm_device *dev); static void intel_modeset_setup_hw_state(struct drm_device *dev);
static void intel_pre_disable_primary_noatomic(struct drm_crtc *crtc); static void intel_pre_disable_primary_noatomic(struct drm_crtc *crtc);
static int intel_max_pixel_rate(struct drm_atomic_state *state);
static int glk_calc_cdclk(int max_pixclk);
static int bxt_calc_cdclk(int max_pixclk);
struct intel_limit { struct intel_limit {
struct { struct {
...@@ -170,7 +167,7 @@ int vlv_get_cck_clock(struct drm_i915_private *dev_priv, ...@@ -170,7 +167,7 @@ int vlv_get_cck_clock(struct drm_i915_private *dev_priv,
return DIV_ROUND_CLOSEST(ref_freq << 1, divider + 1); return DIV_ROUND_CLOSEST(ref_freq << 1, divider + 1);
} }
static int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv, int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv,
const char *name, u32 reg) const char *name, u32 reg)
{ {
if (dev_priv->hpll_freq == 0) if (dev_priv->hpll_freq == 0)
...@@ -180,63 +177,6 @@ static int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv, ...@@ -180,63 +177,6 @@ static int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv,
dev_priv->hpll_freq); dev_priv->hpll_freq);
} }
static int
intel_pch_rawclk(struct drm_i915_private *dev_priv)
{
return (I915_READ(PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
}
static int
intel_vlv_hrawclk(struct drm_i915_private *dev_priv)
{
/* RAWCLK_FREQ_VLV register updated from power well code */
return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
CCK_DISPLAY_REF_CLOCK_CONTROL);
}
static int
intel_g4x_hrawclk(struct drm_i915_private *dev_priv)
{
uint32_t clkcfg;
/* hrawclock is 1/4 the FSB frequency */
clkcfg = I915_READ(CLKCFG);
switch (clkcfg & CLKCFG_FSB_MASK) {
case CLKCFG_FSB_400:
return 100000;
case CLKCFG_FSB_533:
return 133333;
case CLKCFG_FSB_667:
return 166667;
case CLKCFG_FSB_800:
return 200000;
case CLKCFG_FSB_1067:
return 266667;
case CLKCFG_FSB_1333:
return 333333;
/* these two are just a guess; one of them might be right */
case CLKCFG_FSB_1600:
case CLKCFG_FSB_1600_ALT:
return 400000;
default:
return 133333;
}
}
void intel_update_rawclk(struct drm_i915_private *dev_priv)
{
if (HAS_PCH_SPLIT(dev_priv))
dev_priv->rawclk_freq = intel_pch_rawclk(dev_priv);
else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
dev_priv->rawclk_freq = intel_vlv_hrawclk(dev_priv);
else if (IS_G4X(dev_priv) || IS_PINEVIEW(dev_priv))
dev_priv->rawclk_freq = intel_g4x_hrawclk(dev_priv);
else
return; /* no rawclk on other platforms, or no need to know it */
DRM_DEBUG_DRIVER("rawclk rate: %d kHz\n", dev_priv->rawclk_freq);
}
static void intel_update_czclk(struct drm_i915_private *dev_priv) static void intel_update_czclk(struct drm_i915_private *dev_priv)
{ {
if (!(IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))) if (!(IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)))
...@@ -5791,1085 +5731,203 @@ static void modeset_put_power_domains(struct drm_i915_private *dev_priv, ...@@ -5791,1085 +5731,203 @@ static void modeset_put_power_domains(struct drm_i915_private *dev_priv,
intel_display_power_put(dev_priv, domain); intel_display_power_put(dev_priv, domain);
} }
static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv) static void valleyview_crtc_enable(struct intel_crtc_state *pipe_config,
struct drm_atomic_state *old_state)
{ {
int max_cdclk_freq = dev_priv->max_cdclk_freq; struct drm_crtc *crtc = pipe_config->base.crtc;
struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
int pipe = intel_crtc->pipe;
if (IS_GEMINILAKE(dev_priv)) if (WARN_ON(intel_crtc->active))
return 2 * max_cdclk_freq; return;
else if (INTEL_INFO(dev_priv)->gen >= 9 ||
IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
return max_cdclk_freq;
else if (IS_CHERRYVIEW(dev_priv))
return max_cdclk_freq*95/100;
else if (INTEL_INFO(dev_priv)->gen < 4)
return 2*max_cdclk_freq*90/100;
else
return max_cdclk_freq*90/100;
}
static int skl_calc_cdclk(int max_pixclk, int vco); if (intel_crtc_has_dp_encoder(intel_crtc->config))
intel_dp_set_m_n(intel_crtc, M1_N1);
static void intel_update_max_cdclk(struct drm_i915_private *dev_priv) intel_set_pipe_timings(intel_crtc);
{ intel_set_pipe_src_size(intel_crtc);
if (IS_GEN9_BC(dev_priv)) {
u32 limit = I915_READ(SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
int max_cdclk, vco;
vco = dev_priv->skl_preferred_vco_freq; if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B) {
WARN_ON(vco != 8100000 && vco != 8640000); struct drm_i915_private *dev_priv = to_i915(dev);
/* I915_WRITE(CHV_BLEND(pipe), CHV_BLEND_LEGACY);
* Use the lower (vco 8640) cdclk values as a I915_WRITE(CHV_CANVAS(pipe), 0);
* first guess. skl_calc_cdclk() will correct it
* if the preferred vco is 8100 instead.
*/
if (limit == SKL_DFSM_CDCLK_LIMIT_675)
max_cdclk = 617143;
else if (limit == SKL_DFSM_CDCLK_LIMIT_540)
max_cdclk = 540000;
else if (limit == SKL_DFSM_CDCLK_LIMIT_450)
max_cdclk = 432000;
else
max_cdclk = 308571;
dev_priv->max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
} else if (IS_GEMINILAKE(dev_priv)) {
dev_priv->max_cdclk_freq = 316800;
} else if (IS_BROXTON(dev_priv)) {
dev_priv->max_cdclk_freq = 624000;
} else if (IS_BROADWELL(dev_priv)) {
/*
* FIXME with extra cooling we can allow
* 540 MHz for ULX and 675 Mhz for ULT.
* How can we know if extra cooling is
* available? PCI ID, VTB, something else?
*/
if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
dev_priv->max_cdclk_freq = 450000;
else if (IS_BDW_ULX(dev_priv))
dev_priv->max_cdclk_freq = 450000;
else if (IS_BDW_ULT(dev_priv))
dev_priv->max_cdclk_freq = 540000;
else
dev_priv->max_cdclk_freq = 675000;
} else if (IS_CHERRYVIEW(dev_priv)) {
dev_priv->max_cdclk_freq = 320000;
} else if (IS_VALLEYVIEW(dev_priv)) {
dev_priv->max_cdclk_freq = 400000;
} else {
/* otherwise assume cdclk is fixed */
dev_priv->max_cdclk_freq = dev_priv->cdclk_freq;
} }
dev_priv->max_dotclk_freq = intel_compute_max_dotclk(dev_priv); i9xx_set_pipeconf(intel_crtc);
DRM_DEBUG_DRIVER("Max CD clock rate: %d kHz\n",
dev_priv->max_cdclk_freq);
DRM_DEBUG_DRIVER("Max dotclock rate: %d kHz\n",
dev_priv->max_dotclk_freq);
}
static void intel_update_cdclk(struct drm_i915_private *dev_priv)
{
dev_priv->cdclk_freq = dev_priv->display.get_cdclk(dev_priv);
if (INTEL_GEN(dev_priv) >= 9)
DRM_DEBUG_DRIVER("Current CD clock rate: %d kHz, VCO: %d kHz, ref: %d kHz\n",
dev_priv->cdclk_freq, dev_priv->cdclk_pll.vco,
dev_priv->cdclk_pll.ref);
else
DRM_DEBUG_DRIVER("Current CD clock rate: %d kHz\n",
dev_priv->cdclk_freq);
/*
* 9:0 CMBUS [sic] CDCLK frequency (cdfreq):
* Programmng [sic] note: bit[9:2] should be programmed to the number
* of cdclk that generates 4MHz reference clock freq which is used to
* generate GMBus clock. This will vary with the cdclk freq.
*/
if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
I915_WRITE(GMBUSFREQ_VLV, DIV_ROUND_UP(dev_priv->cdclk_freq, 1000));
}
/* convert from kHz to .1 fixpoint MHz with -1MHz offset */ intel_crtc->active = true;
static int skl_cdclk_decimal(int cdclk)
{
return DIV_ROUND_CLOSEST(cdclk - 1000, 500);
}
static int bxt_de_pll_vco(struct drm_i915_private *dev_priv, int cdclk) intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
{
int ratio;
if (cdclk == dev_priv->cdclk_pll.ref) intel_encoders_pre_pll_enable(crtc, pipe_config, old_state);
return 0;
switch (cdclk) { if (IS_CHERRYVIEW(dev_priv)) {
default: chv_prepare_pll(intel_crtc, intel_crtc->config);
MISSING_CASE(cdclk); chv_enable_pll(intel_crtc, intel_crtc->config);
case 144000: } else {
case 288000: vlv_prepare_pll(intel_crtc, intel_crtc->config);
case 384000: vlv_enable_pll(intel_crtc, intel_crtc->config);
case 576000:
ratio = 60;
break;
case 624000:
ratio = 65;
break;
} }
return dev_priv->cdclk_pll.ref * ratio; intel_encoders_pre_enable(crtc, pipe_config, old_state);
}
static int glk_de_pll_vco(struct drm_i915_private *dev_priv, int cdclk) i9xx_pfit_enable(intel_crtc);
{
int ratio;
if (cdclk == dev_priv->cdclk_pll.ref) intel_color_load_luts(&pipe_config->base);
return 0;
switch (cdclk) { intel_update_watermarks(intel_crtc);
default: intel_enable_pipe(intel_crtc);
MISSING_CASE(cdclk);
case 79200: assert_vblank_disabled(crtc);
case 158400: drm_crtc_vblank_on(crtc);
case 316800:
ratio = 33;
break;
}
return dev_priv->cdclk_pll.ref * ratio; intel_encoders_enable(crtc, pipe_config, old_state);
} }
static void bxt_de_pll_disable(struct drm_i915_private *dev_priv) static void i9xx_set_pll_dividers(struct intel_crtc *crtc)
{ {
I915_WRITE(BXT_DE_PLL_ENABLE, 0); struct drm_device *dev = crtc->base.dev;
struct drm_i915_private *dev_priv = to_i915(dev);
/* Timeout 200us */
if (intel_wait_for_register(dev_priv,
BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 0,
1))
DRM_ERROR("timeout waiting for DE PLL unlock\n");
dev_priv->cdclk_pll.vco = 0; I915_WRITE(FP0(crtc->pipe), crtc->config->dpll_hw_state.fp0);
I915_WRITE(FP1(crtc->pipe), crtc->config->dpll_hw_state.fp1);
} }
static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco) static void i9xx_crtc_enable(struct intel_crtc_state *pipe_config,
struct drm_atomic_state *old_state)
{ {
int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk_pll.ref); struct drm_crtc *crtc = pipe_config->base.crtc;
u32 val; struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
enum pipe pipe = intel_crtc->pipe;
val = I915_READ(BXT_DE_PLL_CTL); if (WARN_ON(intel_crtc->active))
val &= ~BXT_DE_PLL_RATIO_MASK; return;
val |= BXT_DE_PLL_RATIO(ratio);
I915_WRITE(BXT_DE_PLL_CTL, val);
I915_WRITE(BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE); i9xx_set_pll_dividers(intel_crtc);
/* Timeout 200us */ if (intel_crtc_has_dp_encoder(intel_crtc->config))
if (intel_wait_for_register(dev_priv, intel_dp_set_m_n(intel_crtc, M1_N1);
BXT_DE_PLL_ENABLE,
BXT_DE_PLL_LOCK,
BXT_DE_PLL_LOCK,
1))
DRM_ERROR("timeout waiting for DE PLL lock\n");
dev_priv->cdclk_pll.vco = vco; intel_set_pipe_timings(intel_crtc);
} intel_set_pipe_src_size(intel_crtc);
static void bxt_set_cdclk(struct drm_i915_private *dev_priv, int cdclk) i9xx_set_pipeconf(intel_crtc);
{
u32 val, divider;
int vco, ret;
if (IS_GEMINILAKE(dev_priv)) intel_crtc->active = true;
vco = glk_de_pll_vco(dev_priv, cdclk);
else
vco = bxt_de_pll_vco(dev_priv, cdclk);
DRM_DEBUG_DRIVER("Changing CDCLK to %d kHz (VCO %d kHz)\n", cdclk, vco); if (!IS_GEN2(dev_priv))
intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
/* cdclk = vco / 2 / div{1,1.5,2,4} */ intel_encoders_pre_enable(crtc, pipe_config, old_state);
switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
case 8:
divider = BXT_CDCLK_CD2X_DIV_SEL_4;
break;
case 4:
divider = BXT_CDCLK_CD2X_DIV_SEL_2;
break;
case 3:
WARN(IS_GEMINILAKE(dev_priv), "Unsupported divider\n");
divider = BXT_CDCLK_CD2X_DIV_SEL_1_5;
break;
case 2:
divider = BXT_CDCLK_CD2X_DIV_SEL_1;
break;
default:
WARN_ON(cdclk != dev_priv->cdclk_pll.ref);
WARN_ON(vco != 0);
divider = BXT_CDCLK_CD2X_DIV_SEL_1; i9xx_enable_pll(intel_crtc);
break;
}
/* Inform power controller of upcoming frequency change */ i9xx_pfit_enable(intel_crtc);
mutex_lock(&dev_priv->rps.hw_lock);
ret = sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
0x80000000);
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) { intel_color_load_luts(&pipe_config->base);
DRM_ERROR("PCode CDCLK freq change notify failed (err %d, freq %d)\n",
ret, cdclk);
return;
}
if (dev_priv->cdclk_pll.vco != 0 && intel_update_watermarks(intel_crtc);
dev_priv->cdclk_pll.vco != vco) intel_enable_pipe(intel_crtc);
bxt_de_pll_disable(dev_priv);
if (dev_priv->cdclk_pll.vco != vco) assert_vblank_disabled(crtc);
bxt_de_pll_enable(dev_priv, vco); drm_crtc_vblank_on(crtc);
val = divider | skl_cdclk_decimal(cdclk); intel_encoders_enable(crtc, pipe_config, old_state);
/* }
* FIXME if only the cd2x divider needs changing, it could be done
* without shutting off the pipe (if only one pipe is active).
*/
val |= BXT_CDCLK_CD2X_PIPE_NONE;
/*
* Disable SSA Precharge when CD clock frequency < 500 MHz,
* enable otherwise.
*/
if (cdclk >= 500000)
val |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
I915_WRITE(CDCLK_CTL, val);
mutex_lock(&dev_priv->rps.hw_lock); static void i9xx_pfit_disable(struct intel_crtc *crtc)
ret = sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ, {
DIV_ROUND_UP(cdclk, 25000)); struct drm_device *dev = crtc->base.dev;
mutex_unlock(&dev_priv->rps.hw_lock); struct drm_i915_private *dev_priv = to_i915(dev);
if (ret) { if (!crtc->config->gmch_pfit.control)
DRM_ERROR("PCode CDCLK freq set failed, (err %d, freq %d)\n",
ret, cdclk);
return; return;
}
intel_update_cdclk(dev_priv); assert_pipe_disabled(dev_priv, crtc->pipe);
DRM_DEBUG_DRIVER("disabling pfit, current: 0x%08x\n",
I915_READ(PFIT_CONTROL));
I915_WRITE(PFIT_CONTROL, 0);
} }
static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv) static void i9xx_crtc_disable(struct intel_crtc_state *old_crtc_state,
struct drm_atomic_state *old_state)
{ {
u32 cdctl, expected; struct drm_crtc *crtc = old_crtc_state->base.crtc;
struct drm_device *dev = crtc->dev;
intel_update_cdclk(dev_priv); struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
if (dev_priv->cdclk_pll.vco == 0 || int pipe = intel_crtc->pipe;
dev_priv->cdclk_freq == dev_priv->cdclk_pll.ref)
goto sanitize;
/* DPLL okay; verify the cdclock
*
* Some BIOS versions leave an incorrect decimal frequency value and
* set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
* so sanitize this register.
*/
cdctl = I915_READ(CDCLK_CTL);
/*
* Let's ignore the pipe field, since BIOS could have configured the
* dividers both synching to an active pipe, or asynchronously
* (PIPE_NONE).
*/
cdctl &= ~BXT_CDCLK_CD2X_PIPE_NONE;
expected = (cdctl & BXT_CDCLK_CD2X_DIV_SEL_MASK) |
skl_cdclk_decimal(dev_priv->cdclk_freq);
/* /*
* Disable SSA Precharge when CD clock frequency < 500 MHz, * On gen2 planes are double buffered but the pipe isn't, so we must
* enable otherwise. * wait for planes to fully turn off before disabling the pipe.
*/ */
if (dev_priv->cdclk_freq >= 500000) if (IS_GEN2(dev_priv))
expected |= BXT_CDCLK_SSA_PRECHARGE_ENABLE; intel_wait_for_vblank(dev_priv, pipe);
if (cdctl == expected)
/* All well; nothing to sanitize */
return;
sanitize:
DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
/* force cdclk programming */ intel_encoders_disable(crtc, old_crtc_state, old_state);
dev_priv->cdclk_freq = 0;
/* force full PLL disable + enable */ drm_crtc_vblank_off(crtc);
dev_priv->cdclk_pll.vco = -1; assert_vblank_disabled(crtc);
}
void bxt_init_cdclk(struct drm_i915_private *dev_priv) intel_disable_pipe(intel_crtc);
{
int cdclk;
bxt_sanitize_cdclk(dev_priv); i9xx_pfit_disable(intel_crtc);
if (dev_priv->cdclk_freq != 0 && dev_priv->cdclk_pll.vco != 0) intel_encoders_post_disable(crtc, old_crtc_state, old_state);
return;
/* if (!intel_crtc_has_type(intel_crtc->config, INTEL_OUTPUT_DSI)) {
* FIXME: if (IS_CHERRYVIEW(dev_priv))
* - The initial CDCLK needs to be read from VBT. chv_disable_pll(dev_priv, pipe);
* Need to make this change after VBT has changes for BXT. else if (IS_VALLEYVIEW(dev_priv))
*/ vlv_disable_pll(dev_priv, pipe);
if (IS_GEMINILAKE(dev_priv))
cdclk = glk_calc_cdclk(0);
else else
cdclk = bxt_calc_cdclk(0); i9xx_disable_pll(intel_crtc);
}
bxt_set_cdclk(dev_priv, cdclk);
}
void bxt_uninit_cdclk(struct drm_i915_private *dev_priv) intel_encoders_post_pll_disable(crtc, old_crtc_state, old_state);
{
bxt_set_cdclk(dev_priv, dev_priv->cdclk_pll.ref);
}
static int skl_calc_cdclk(int max_pixclk, int vco) if (!IS_GEN2(dev_priv))
{ intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
if (vco == 8640000) {
if (max_pixclk > 540000)
return 617143;
else if (max_pixclk > 432000)
return 540000;
else if (max_pixclk > 308571)
return 432000;
else
return 308571;
} else {
if (max_pixclk > 540000)
return 675000;
else if (max_pixclk > 450000)
return 540000;
else if (max_pixclk > 337500)
return 450000;
else
return 337500;
}
} }
static void static void intel_crtc_disable_noatomic(struct drm_crtc *crtc)
skl_dpll0_update(struct drm_i915_private *dev_priv)
{ {
u32 val; struct intel_encoder *encoder;
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
dev_priv->cdclk_pll.ref = 24000; struct drm_i915_private *dev_priv = to_i915(crtc->dev);
dev_priv->cdclk_pll.vco = 0; enum intel_display_power_domain domain;
unsigned long domains;
val = I915_READ(LCPLL1_CTL); struct drm_atomic_state *state;
if ((val & LCPLL_PLL_ENABLE) == 0) struct intel_crtc_state *crtc_state;
return; int ret;
if (WARN_ON((val & LCPLL_PLL_LOCK) == 0)) if (!intel_crtc->active)
return; return;
val = I915_READ(DPLL_CTRL1); if (crtc->primary->state->visible) {
WARN_ON(intel_crtc->flip_work);
if (WARN_ON((val & (DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | intel_pre_disable_primary_noatomic(crtc);
DPLL_CTRL1_SSC(SKL_DPLL0) |
DPLL_CTRL1_OVERRIDE(SKL_DPLL0))) !=
DPLL_CTRL1_OVERRIDE(SKL_DPLL0)))
return;
switch (val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0)) { intel_crtc_disable_planes(crtc, 1 << drm_plane_index(crtc->primary));
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, SKL_DPLL0): crtc->primary->state->visible = false;
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, SKL_DPLL0):
dev_priv->cdclk_pll.vco = 8100000;
break;
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, SKL_DPLL0):
case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, SKL_DPLL0):
dev_priv->cdclk_pll.vco = 8640000;
break;
default:
MISSING_CASE(val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
break;
} }
}
void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv, int vco) state = drm_atomic_state_alloc(crtc->dev);
{ if (!state) {
bool changed = dev_priv->skl_preferred_vco_freq != vco; DRM_DEBUG_KMS("failed to disable [CRTC:%d:%s], out of memory",
crtc->base.id, crtc->name);
return;
}
dev_priv->skl_preferred_vco_freq = vco; state->acquire_ctx = crtc->dev->mode_config.acquire_ctx;
if (changed)
intel_update_max_cdclk(dev_priv);
}
static void
skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
{
int min_cdclk = skl_calc_cdclk(0, vco);
u32 val;
WARN_ON(vco != 8100000 && vco != 8640000);
/* select the minimum CDCLK before enabling DPLL 0 */
val = CDCLK_FREQ_337_308 | skl_cdclk_decimal(min_cdclk);
I915_WRITE(CDCLK_CTL, val);
POSTING_READ(CDCLK_CTL);
/*
* We always enable DPLL0 with the lowest link rate possible, but still
* taking into account the VCO required to operate the eDP panel at the
* desired frequency. The usual DP link rates operate with a VCO of
* 8100 while the eDP 1.4 alternate link rates need a VCO of 8640.
* The modeset code is responsible for the selection of the exact link
* rate later on, with the constraint of choosing a frequency that
* works with vco.
*/
val = I915_READ(DPLL_CTRL1);
val &= ~(DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | DPLL_CTRL1_SSC(SKL_DPLL0) |
DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
val |= DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
if (vco == 8640000)
val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
SKL_DPLL0);
else
val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
SKL_DPLL0);
I915_WRITE(DPLL_CTRL1, val);
POSTING_READ(DPLL_CTRL1);
I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) | LCPLL_PLL_ENABLE);
if (intel_wait_for_register(dev_priv,
LCPLL1_CTL, LCPLL_PLL_LOCK, LCPLL_PLL_LOCK,
5))
DRM_ERROR("DPLL0 not locked\n");
dev_priv->cdclk_pll.vco = vco;
/* We'll want to keep using the current vco from now on. */
skl_set_preferred_cdclk_vco(dev_priv, vco);
}
static void
skl_dpll0_disable(struct drm_i915_private *dev_priv)
{
I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) & ~LCPLL_PLL_ENABLE);
if (intel_wait_for_register(dev_priv,
LCPLL1_CTL, LCPLL_PLL_LOCK, 0,
1))
DRM_ERROR("Couldn't disable DPLL0\n");
dev_priv->cdclk_pll.vco = 0;
}
static void skl_set_cdclk(struct drm_i915_private *dev_priv, int cdclk, int vco)
{
u32 freq_select, pcu_ack;
int ret;
WARN_ON((cdclk == 24000) != (vco == 0));
DRM_DEBUG_DRIVER("Changing CDCLK to %d kHz (VCO %d kHz)\n", cdclk, vco);
mutex_lock(&dev_priv->rps.hw_lock);
ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
SKL_CDCLK_PREPARE_FOR_CHANGE,
SKL_CDCLK_READY_FOR_CHANGE,
SKL_CDCLK_READY_FOR_CHANGE, 3);
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) {
DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
ret);
return;
}
/* set CDCLK_CTL */
switch (cdclk) {
case 450000:
case 432000:
freq_select = CDCLK_FREQ_450_432;
pcu_ack = 1;
break;
case 540000:
freq_select = CDCLK_FREQ_540;
pcu_ack = 2;
break;
case 308571:
case 337500:
default:
freq_select = CDCLK_FREQ_337_308;
pcu_ack = 0;
break;
case 617143:
case 675000:
freq_select = CDCLK_FREQ_675_617;
pcu_ack = 3;
break;
}
if (dev_priv->cdclk_pll.vco != 0 &&
dev_priv->cdclk_pll.vco != vco)
skl_dpll0_disable(dev_priv);
if (dev_priv->cdclk_pll.vco != vco)
skl_dpll0_enable(dev_priv, vco);
I915_WRITE(CDCLK_CTL, freq_select | skl_cdclk_decimal(cdclk));
POSTING_READ(CDCLK_CTL);
/* inform PCU of the change */
mutex_lock(&dev_priv->rps.hw_lock);
sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL, pcu_ack);
mutex_unlock(&dev_priv->rps.hw_lock);
intel_update_cdclk(dev_priv);
}
static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv);
void skl_uninit_cdclk(struct drm_i915_private *dev_priv)
{
skl_set_cdclk(dev_priv, dev_priv->cdclk_pll.ref, 0);
}
void skl_init_cdclk(struct drm_i915_private *dev_priv)
{
int cdclk, vco;
skl_sanitize_cdclk(dev_priv);
if (dev_priv->cdclk_freq != 0 && dev_priv->cdclk_pll.vco != 0) {
/*
* Use the current vco as our initial
* guess as to what the preferred vco is.
*/
if (dev_priv->skl_preferred_vco_freq == 0)
skl_set_preferred_cdclk_vco(dev_priv,
dev_priv->cdclk_pll.vco);
return;
}
vco = dev_priv->skl_preferred_vco_freq;
if (vco == 0)
vco = 8100000;
cdclk = skl_calc_cdclk(0, vco);
skl_set_cdclk(dev_priv, cdclk, vco);
}
static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
{
uint32_t cdctl, expected;
/*
* check if the pre-os intialized the display
* There is SWF18 scratchpad register defined which is set by the
* pre-os which can be used by the OS drivers to check the status
*/
if ((I915_READ(SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
goto sanitize;
intel_update_cdclk(dev_priv);
/* Is PLL enabled and locked ? */
if (dev_priv->cdclk_pll.vco == 0 ||
dev_priv->cdclk_freq == dev_priv->cdclk_pll.ref)
goto sanitize;
/* DPLL okay; verify the cdclock
*
* Noticed in some instances that the freq selection is correct but
* decimal part is programmed wrong from BIOS where pre-os does not
* enable display. Verify the same as well.
*/
cdctl = I915_READ(CDCLK_CTL);
expected = (cdctl & CDCLK_FREQ_SEL_MASK) |
skl_cdclk_decimal(dev_priv->cdclk_freq);
if (cdctl == expected)
/* All well; nothing to sanitize */
return;
sanitize:
DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
/* force cdclk programming */
dev_priv->cdclk_freq = 0;
/* force full PLL disable + enable */
dev_priv->cdclk_pll.vco = -1;
}
/* Adjust CDclk dividers to allow high res or save power if possible */
static void valleyview_set_cdclk(struct drm_device *dev, int cdclk)
{
struct drm_i915_private *dev_priv = to_i915(dev);
u32 val, cmd;
WARN_ON(dev_priv->display.get_cdclk(dev_priv) != dev_priv->cdclk_freq);
if (cdclk >= 320000) /* jump to highest voltage for 400MHz too */
cmd = 2;
else if (cdclk == 266667)
cmd = 1;
else
cmd = 0;
mutex_lock(&dev_priv->rps.hw_lock);
val = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ);
val &= ~DSPFREQGUAR_MASK;
val |= (cmd << DSPFREQGUAR_SHIFT);
vlv_punit_write(dev_priv, PUNIT_REG_DSPFREQ, val);
if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) &
DSPFREQSTAT_MASK) == (cmd << DSPFREQSTAT_SHIFT),
50)) {
DRM_ERROR("timed out waiting for CDclk change\n");
}
mutex_unlock(&dev_priv->rps.hw_lock);
mutex_lock(&dev_priv->sb_lock);
if (cdclk == 400000) {
u32 divider;
divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
/* adjust cdclk divider */
val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
val &= ~CCK_FREQUENCY_VALUES;
val |= divider;
vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
CCK_FREQUENCY_STATUS) == (divider << CCK_FREQUENCY_STATUS_SHIFT),
50))
DRM_ERROR("timed out waiting for CDclk change\n");
}
/* adjust self-refresh exit latency value */
val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
val &= ~0x7f;
/*
* For high bandwidth configs, we set a higher latency in the bunit
* so that the core display fetch happens in time to avoid underruns.
*/
if (cdclk == 400000)
val |= 4500 / 250; /* 4.5 usec */
else
val |= 3000 / 250; /* 3.0 usec */
vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
mutex_unlock(&dev_priv->sb_lock);
intel_update_cdclk(dev_priv);
}
static void cherryview_set_cdclk(struct drm_device *dev, int cdclk)
{
struct drm_i915_private *dev_priv = to_i915(dev);
u32 val, cmd;
WARN_ON(dev_priv->display.get_cdclk(dev_priv) != dev_priv->cdclk_freq);
switch (cdclk) {
case 333333:
case 320000:
case 266667:
case 200000:
break;
default:
MISSING_CASE(cdclk);
return;
}
/*
* Specs are full of misinformation, but testing on actual
* hardware has shown that we just need to write the desired
* CCK divider into the Punit register.
*/
cmd = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
mutex_lock(&dev_priv->rps.hw_lock);
val = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ);
val &= ~DSPFREQGUAR_MASK_CHV;
val |= (cmd << DSPFREQGUAR_SHIFT_CHV);
vlv_punit_write(dev_priv, PUNIT_REG_DSPFREQ, val);
if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) &
DSPFREQSTAT_MASK_CHV) == (cmd << DSPFREQSTAT_SHIFT_CHV),
50)) {
DRM_ERROR("timed out waiting for CDclk change\n");
}
mutex_unlock(&dev_priv->rps.hw_lock);
intel_update_cdclk(dev_priv);
}
static int valleyview_calc_cdclk(struct drm_i915_private *dev_priv,
int max_pixclk)
{
int freq_320 = (dev_priv->hpll_freq << 1) % 320000 != 0 ? 333333 : 320000;
int limit = IS_CHERRYVIEW(dev_priv) ? 95 : 90;
/*
* Really only a few cases to deal with, as only 4 CDclks are supported:
* 200MHz
* 267MHz
* 320/333MHz (depends on HPLL freq)
* 400MHz (VLV only)
* So we check to see whether we're above 90% (VLV) or 95% (CHV)
* of the lower bin and adjust if needed.
*
* We seem to get an unstable or solid color picture at 200MHz.
* Not sure what's wrong. For now use 200MHz only when all pipes
* are off.
*/
if (!IS_CHERRYVIEW(dev_priv) &&
max_pixclk > freq_320*limit/100)
return 400000;
else if (max_pixclk > 266667*limit/100)
return freq_320;
else if (max_pixclk > 0)
return 266667;
else
return 200000;
}
static int glk_calc_cdclk(int max_pixclk)
{
if (max_pixclk > 2 * 158400)
return 316800;
else if (max_pixclk > 2 * 79200)
return 158400;
else
return 79200;
}
static int bxt_calc_cdclk(int max_pixclk)
{
if (max_pixclk > 576000)
return 624000;
else if (max_pixclk > 384000)
return 576000;
else if (max_pixclk > 288000)
return 384000;
else if (max_pixclk > 144000)
return 288000;
else
return 144000;
}
static int valleyview_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct drm_device *dev = state->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
int max_pixclk = intel_max_pixel_rate(state);
struct intel_atomic_state *intel_state =
to_intel_atomic_state(state);
intel_state->cdclk = intel_state->dev_cdclk =
valleyview_calc_cdclk(dev_priv, max_pixclk);
if (!intel_state->active_crtcs)
intel_state->dev_cdclk = valleyview_calc_cdclk(dev_priv, 0);
return 0;
}
static int bxt_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct drm_i915_private *dev_priv = to_i915(state->dev);
int max_pixclk = intel_max_pixel_rate(state);
struct intel_atomic_state *intel_state =
to_intel_atomic_state(state);
int cdclk;
if (IS_GEMINILAKE(dev_priv))
cdclk = glk_calc_cdclk(max_pixclk);
else
cdclk = bxt_calc_cdclk(max_pixclk);
intel_state->cdclk = intel_state->dev_cdclk = cdclk;
if (!intel_state->active_crtcs) {
if (IS_GEMINILAKE(dev_priv))
cdclk = glk_calc_cdclk(0);
else
cdclk = bxt_calc_cdclk(0);
intel_state->dev_cdclk = cdclk;
}
return 0;
}
static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
{
unsigned int credits, default_credits;
if (IS_CHERRYVIEW(dev_priv))
default_credits = PFI_CREDIT(12);
else
default_credits = PFI_CREDIT(8);
if (dev_priv->cdclk_freq >= dev_priv->czclk_freq) {
/* CHV suggested value is 31 or 63 */
if (IS_CHERRYVIEW(dev_priv))
credits = PFI_CREDIT_63;
else
credits = PFI_CREDIT(15);
} else {
credits = default_credits;
}
/*
* WA - write default credits before re-programming
* FIXME: should we also set the resend bit here?
*/
I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
default_credits);
I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
credits | PFI_CREDIT_RESEND);
/*
* FIXME is this guaranteed to clear
* immediately or should we poll for it?
*/
WARN_ON(I915_READ(GCI_CONTROL) & PFI_CREDIT_RESEND);
}
static void valleyview_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_device *dev = old_state->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_atomic_state *old_intel_state =
to_intel_atomic_state(old_state);
unsigned req_cdclk = old_intel_state->dev_cdclk;
/*
* FIXME: We can end up here with all power domains off, yet
* with a CDCLK frequency other than the minimum. To account
* for this take the PIPE-A power domain, which covers the HW
* blocks needed for the following programming. This can be
* removed once it's guaranteed that we get here either with
* the minimum CDCLK set, or the required power domains
* enabled.
*/
intel_display_power_get(dev_priv, POWER_DOMAIN_PIPE_A);
if (IS_CHERRYVIEW(dev_priv))
cherryview_set_cdclk(dev, req_cdclk);
else
valleyview_set_cdclk(dev, req_cdclk);
vlv_program_pfi_credits(dev_priv);
intel_display_power_put(dev_priv, POWER_DOMAIN_PIPE_A);
}
static void valleyview_crtc_enable(struct intel_crtc_state *pipe_config,
struct drm_atomic_state *old_state)
{
struct drm_crtc *crtc = pipe_config->base.crtc;
struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
int pipe = intel_crtc->pipe;
if (WARN_ON(intel_crtc->active))
return;
if (intel_crtc_has_dp_encoder(intel_crtc->config))
intel_dp_set_m_n(intel_crtc, M1_N1);
intel_set_pipe_timings(intel_crtc);
intel_set_pipe_src_size(intel_crtc);
if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B) {
struct drm_i915_private *dev_priv = to_i915(dev);
I915_WRITE(CHV_BLEND(pipe), CHV_BLEND_LEGACY);
I915_WRITE(CHV_CANVAS(pipe), 0);
}
i9xx_set_pipeconf(intel_crtc);
intel_crtc->active = true;
intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
intel_encoders_pre_pll_enable(crtc, pipe_config, old_state);
if (IS_CHERRYVIEW(dev_priv)) {
chv_prepare_pll(intel_crtc, intel_crtc->config);
chv_enable_pll(intel_crtc, intel_crtc->config);
} else {
vlv_prepare_pll(intel_crtc, intel_crtc->config);
vlv_enable_pll(intel_crtc, intel_crtc->config);
}
intel_encoders_pre_enable(crtc, pipe_config, old_state);
i9xx_pfit_enable(intel_crtc);
intel_color_load_luts(&pipe_config->base);
intel_update_watermarks(intel_crtc);
intel_enable_pipe(intel_crtc);
assert_vblank_disabled(crtc);
drm_crtc_vblank_on(crtc);
intel_encoders_enable(crtc, pipe_config, old_state);
}
static void i9xx_set_pll_dividers(struct intel_crtc *crtc)
{
struct drm_device *dev = crtc->base.dev;
struct drm_i915_private *dev_priv = to_i915(dev);
I915_WRITE(FP0(crtc->pipe), crtc->config->dpll_hw_state.fp0);
I915_WRITE(FP1(crtc->pipe), crtc->config->dpll_hw_state.fp1);
}
static void i9xx_crtc_enable(struct intel_crtc_state *pipe_config,
struct drm_atomic_state *old_state)
{
struct drm_crtc *crtc = pipe_config->base.crtc;
struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
enum pipe pipe = intel_crtc->pipe;
if (WARN_ON(intel_crtc->active))
return;
i9xx_set_pll_dividers(intel_crtc);
if (intel_crtc_has_dp_encoder(intel_crtc->config))
intel_dp_set_m_n(intel_crtc, M1_N1);
intel_set_pipe_timings(intel_crtc);
intel_set_pipe_src_size(intel_crtc);
i9xx_set_pipeconf(intel_crtc);
intel_crtc->active = true;
if (!IS_GEN2(dev_priv))
intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
intel_encoders_pre_enable(crtc, pipe_config, old_state);
i9xx_enable_pll(intel_crtc);
i9xx_pfit_enable(intel_crtc);
intel_color_load_luts(&pipe_config->base);
intel_update_watermarks(intel_crtc);
intel_enable_pipe(intel_crtc);
assert_vblank_disabled(crtc);
drm_crtc_vblank_on(crtc);
intel_encoders_enable(crtc, pipe_config, old_state);
}
static void i9xx_pfit_disable(struct intel_crtc *crtc)
{
struct drm_device *dev = crtc->base.dev;
struct drm_i915_private *dev_priv = to_i915(dev);
if (!crtc->config->gmch_pfit.control)
return;
assert_pipe_disabled(dev_priv, crtc->pipe);
DRM_DEBUG_DRIVER("disabling pfit, current: 0x%08x\n",
I915_READ(PFIT_CONTROL));
I915_WRITE(PFIT_CONTROL, 0);
}
static void i9xx_crtc_disable(struct intel_crtc_state *old_crtc_state,
struct drm_atomic_state *old_state)
{
struct drm_crtc *crtc = old_crtc_state->base.crtc;
struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = to_i915(dev);
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
int pipe = intel_crtc->pipe;
/*
* On gen2 planes are double buffered but the pipe isn't, so we must
* wait for planes to fully turn off before disabling the pipe.
*/
if (IS_GEN2(dev_priv))
intel_wait_for_vblank(dev_priv, pipe);
intel_encoders_disable(crtc, old_crtc_state, old_state);
drm_crtc_vblank_off(crtc);
assert_vblank_disabled(crtc);
intel_disable_pipe(intel_crtc);
i9xx_pfit_disable(intel_crtc);
intel_encoders_post_disable(crtc, old_crtc_state, old_state);
if (!intel_crtc_has_type(intel_crtc->config, INTEL_OUTPUT_DSI)) {
if (IS_CHERRYVIEW(dev_priv))
chv_disable_pll(dev_priv, pipe);
else if (IS_VALLEYVIEW(dev_priv))
vlv_disable_pll(dev_priv, pipe);
else
i9xx_disable_pll(intel_crtc);
}
intel_encoders_post_pll_disable(crtc, old_crtc_state, old_state);
if (!IS_GEN2(dev_priv))
intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
}
static void intel_crtc_disable_noatomic(struct drm_crtc *crtc)
{
struct intel_encoder *encoder;
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
struct drm_i915_private *dev_priv = to_i915(crtc->dev);
enum intel_display_power_domain domain;
unsigned long domains;
struct drm_atomic_state *state;
struct intel_crtc_state *crtc_state;
int ret;
if (!intel_crtc->active)
return;
if (crtc->primary->state->visible) {
WARN_ON(intel_crtc->flip_work);
intel_pre_disable_primary_noatomic(crtc);
intel_crtc_disable_planes(crtc, 1 << drm_plane_index(crtc->primary));
crtc->primary->state->visible = false;
}
state = drm_atomic_state_alloc(crtc->dev);
if (!state) {
DRM_DEBUG_KMS("failed to disable [CRTC:%d:%s], out of memory",
crtc->base.id, crtc->name);
return;
}
state->acquire_ctx = crtc->dev->mode_config.acquire_ctx;
/* Everything's already locked, -EDEADLK can't happen. */ /* Everything's already locked, -EDEADLK can't happen. */
crtc_state = intel_atomic_get_crtc_state(state, intel_crtc); crtc_state = intel_atomic_get_crtc_state(state, intel_crtc);
...@@ -7147,546 +6205,104 @@ static bool pipe_config_supports_ips(struct drm_i915_private *dev_priv, ...@@ -7147,546 +6205,104 @@ static bool pipe_config_supports_ips(struct drm_i915_private *dev_priv,
return true; return true;
/* /*
* We compare against max which means we must take * We compare against max which means we must take
* the increased cdclk requirement into account when * the increased cdclk requirement into account when
* calculating the new cdclk. * calculating the new cdclk.
* *
* Should measure whether using a lower cdclk w/o IPS * Should measure whether using a lower cdclk w/o IPS
*/
return pipe_config->pixel_rate <=
dev_priv->max_cdclk_freq * 95 / 100;
}
static void hsw_compute_ips_config(struct intel_crtc *crtc,
struct intel_crtc_state *pipe_config)
{
struct drm_device *dev = crtc->base.dev;
struct drm_i915_private *dev_priv = to_i915(dev);
pipe_config->ips_enabled = i915.enable_ips &&
hsw_crtc_supports_ips(crtc) &&
pipe_config_supports_ips(dev_priv, pipe_config);
}
static bool intel_crtc_supports_double_wide(const struct intel_crtc *crtc)
{
const struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
/* GDG double wide on either pipe, otherwise pipe A only */
return INTEL_INFO(dev_priv)->gen < 4 &&
(crtc->pipe == PIPE_A || IS_I915G(dev_priv));
}
static void intel_crtc_compute_pixel_rate(struct intel_crtc_state *crtc_state)
{
struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
if (HAS_GMCH_DISPLAY(dev_priv))
/* FIXME calculate proper pipe pixel rate for GMCH pfit */
crtc_state->pixel_rate =
crtc_state->base.adjusted_mode.crtc_clock;
else
crtc_state->pixel_rate =
ilk_pipe_pixel_rate(crtc_state);
}
static int intel_crtc_compute_config(struct intel_crtc *crtc,
struct intel_crtc_state *pipe_config)
{
struct drm_device *dev = crtc->base.dev;
struct drm_i915_private *dev_priv = to_i915(dev);
const struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
int clock_limit = dev_priv->max_dotclk_freq;
if (INTEL_GEN(dev_priv) < 4) {
clock_limit = dev_priv->max_cdclk_freq * 9 / 10;
/*
* Enable double wide mode when the dot clock
* is > 90% of the (display) core speed.
*/
if (intel_crtc_supports_double_wide(crtc) &&
adjusted_mode->crtc_clock > clock_limit) {
clock_limit = dev_priv->max_dotclk_freq;
pipe_config->double_wide = true;
}
}
if (adjusted_mode->crtc_clock > clock_limit) {
DRM_DEBUG_KMS("requested pixel clock (%d kHz) too high (max: %d kHz, double wide: %s)\n",
adjusted_mode->crtc_clock, clock_limit,
yesno(pipe_config->double_wide));
return -EINVAL;
}
/*
* Pipe horizontal size must be even in:
* - DVO ganged mode
* - LVDS dual channel mode
* - Double wide pipe
*/
if ((intel_crtc_has_type(pipe_config, INTEL_OUTPUT_LVDS) &&
intel_is_dual_link_lvds(dev)) || pipe_config->double_wide)
pipe_config->pipe_src_w &= ~1;
/* Cantiga+ cannot handle modes with a hsync front porch of 0.
* WaPruneModeWithIncorrectHsyncOffset:ctg,elk,ilk,snb,ivb,vlv,hsw.
*/
if ((INTEL_GEN(dev_priv) > 4 || IS_G4X(dev_priv)) &&
adjusted_mode->crtc_hsync_start == adjusted_mode->crtc_hdisplay)
return -EINVAL;
intel_crtc_compute_pixel_rate(pipe_config);
if (HAS_IPS(dev_priv))
hsw_compute_ips_config(crtc, pipe_config);
if (pipe_config->has_pch_encoder)
return ironlake_fdi_compute_config(crtc, pipe_config);
return 0;
}
static int skylake_get_cdclk(struct drm_i915_private *dev_priv)
{
u32 cdctl;
skl_dpll0_update(dev_priv);
if (dev_priv->cdclk_pll.vco == 0)
return dev_priv->cdclk_pll.ref;
cdctl = I915_READ(CDCLK_CTL);
if (dev_priv->cdclk_pll.vco == 8640000) {
switch (cdctl & CDCLK_FREQ_SEL_MASK) {
case CDCLK_FREQ_450_432:
return 432000;
case CDCLK_FREQ_337_308:
return 308571;
case CDCLK_FREQ_540:
return 540000;
case CDCLK_FREQ_675_617:
return 617143;
default:
MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
}
} else {
switch (cdctl & CDCLK_FREQ_SEL_MASK) {
case CDCLK_FREQ_450_432:
return 450000;
case CDCLK_FREQ_337_308:
return 337500;
case CDCLK_FREQ_540:
return 540000;
case CDCLK_FREQ_675_617:
return 675000;
default:
MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
}
}
return dev_priv->cdclk_pll.ref;
}
static void bxt_de_pll_update(struct drm_i915_private *dev_priv)
{
u32 val;
dev_priv->cdclk_pll.ref = 19200;
dev_priv->cdclk_pll.vco = 0;
val = I915_READ(BXT_DE_PLL_ENABLE);
if ((val & BXT_DE_PLL_PLL_ENABLE) == 0)
return;
if (WARN_ON((val & BXT_DE_PLL_LOCK) == 0))
return;
val = I915_READ(BXT_DE_PLL_CTL);
dev_priv->cdclk_pll.vco = (val & BXT_DE_PLL_RATIO_MASK) *
dev_priv->cdclk_pll.ref;
}
static int broxton_get_cdclk(struct drm_i915_private *dev_priv)
{
u32 divider;
int div, vco;
bxt_de_pll_update(dev_priv);
vco = dev_priv->cdclk_pll.vco;
if (vco == 0)
return dev_priv->cdclk_pll.ref;
divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
switch (divider) {
case BXT_CDCLK_CD2X_DIV_SEL_1:
div = 2;
break;
case BXT_CDCLK_CD2X_DIV_SEL_1_5:
WARN(IS_GEMINILAKE(dev_priv), "Unsupported divider\n");
div = 3;
break;
case BXT_CDCLK_CD2X_DIV_SEL_2:
div = 4;
break;
case BXT_CDCLK_CD2X_DIV_SEL_4:
div = 8;
break;
default:
MISSING_CASE(divider);
return dev_priv->cdclk_pll.ref;
}
return DIV_ROUND_CLOSEST(vco, div);
}
static int broadwell_get_cdclk(struct drm_i915_private *dev_priv)
{
uint32_t lcpll = I915_READ(LCPLL_CTL);
uint32_t freq = lcpll & LCPLL_CLK_FREQ_MASK;
if (lcpll & LCPLL_CD_SOURCE_FCLK)
return 800000;
else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
return 450000;
else if (freq == LCPLL_CLK_FREQ_450)
return 450000;
else if (freq == LCPLL_CLK_FREQ_54O_BDW)
return 540000;
else if (freq == LCPLL_CLK_FREQ_337_5_BDW)
return 337500;
else
return 675000;
}
static int haswell_get_cdclk(struct drm_i915_private *dev_priv)
{
uint32_t lcpll = I915_READ(LCPLL_CTL);
uint32_t freq = lcpll & LCPLL_CLK_FREQ_MASK;
if (lcpll & LCPLL_CD_SOURCE_FCLK)
return 800000;
else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
return 450000;
else if (freq == LCPLL_CLK_FREQ_450)
return 450000;
else if (IS_HSW_ULT(dev_priv))
return 337500;
else
return 540000;
}
static int valleyview_get_cdclk(struct drm_i915_private *dev_priv)
{
return vlv_get_cck_clock_hpll(dev_priv, "cdclk",
CCK_DISPLAY_CLOCK_CONTROL);
}
static int fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 450000;
}
static int fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 400000;
}
static int i945gm_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 gcfgc = 0;
pci_read_config_word(pdev, GCFGC, &gcfgc);
if (gcfgc & GC_LOW_FREQUENCY_ENABLE)
return 133333;
else {
switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
case GC_DISPLAY_CLOCK_333_320_MHZ:
return 320000;
default:
case GC_DISPLAY_CLOCK_190_200_MHZ:
return 200000;
}
}
}
static int fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 333333;
}
static int fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 200000;
}
static int pnv_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 gcfgc = 0;
pci_read_config_word(pdev, GCFGC, &gcfgc);
switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
case GC_DISPLAY_CLOCK_267_MHZ_PNV:
return 266667;
case GC_DISPLAY_CLOCK_333_MHZ_PNV:
return 333333;
case GC_DISPLAY_CLOCK_444_MHZ_PNV:
return 444444;
case GC_DISPLAY_CLOCK_200_MHZ_PNV:
return 200000;
default:
DRM_ERROR("Unknown pnv display core clock 0x%04x\n", gcfgc);
case GC_DISPLAY_CLOCK_133_MHZ_PNV:
return 133333;
case GC_DISPLAY_CLOCK_167_MHZ_PNV:
return 166667;
}
}
static int i915gm_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 gcfgc = 0;
pci_read_config_word(pdev, GCFGC, &gcfgc);
if (gcfgc & GC_LOW_FREQUENCY_ENABLE)
return 133333;
else {
switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
case GC_DISPLAY_CLOCK_333_320_MHZ:
return 333333;
default:
case GC_DISPLAY_CLOCK_190_200_MHZ:
return 190000;
}
}
}
static int fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv)
{
return 266667;
}
static int i85x_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
u16 hpllcc = 0;
/*
* 852GM/852GMV only supports 133 MHz and the HPLLCC
* encoding is different :(
* FIXME is this the right way to detect 852GM/852GMV?
*/
if (pdev->revision == 0x1)
return 133333;
pci_bus_read_config_word(pdev->bus,
PCI_DEVFN(0, 3), HPLLCC, &hpllcc);
/* Assume that the hardware is in the high speed state. This
* should be the default.
*/ */
switch (hpllcc & GC_CLOCK_CONTROL_MASK) { return pipe_config->pixel_rate <=
case GC_CLOCK_133_200: dev_priv->max_cdclk_freq * 95 / 100;
case GC_CLOCK_133_200_2:
case GC_CLOCK_100_200:
return 200000;
case GC_CLOCK_166_250:
return 250000;
case GC_CLOCK_100_133:
return 133333;
case GC_CLOCK_133_266:
case GC_CLOCK_133_266_2:
case GC_CLOCK_166_266:
return 266667;
}
/* Shouldn't happen */
return 0;
} }
static int fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv) static void hsw_compute_ips_config(struct intel_crtc *crtc,
struct intel_crtc_state *pipe_config)
{ {
return 133333; struct drm_device *dev = crtc->base.dev;
struct drm_i915_private *dev_priv = to_i915(dev);
pipe_config->ips_enabled = i915.enable_ips &&
hsw_crtc_supports_ips(crtc) &&
pipe_config_supports_ips(dev_priv, pipe_config);
} }
static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv) static bool intel_crtc_supports_double_wide(const struct intel_crtc *crtc)
{ {
static const unsigned int blb_vco[8] = { const struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 4800000,
[4] = 6400000,
};
static const unsigned int pnv_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 4800000,
[4] = 2666667,
};
static const unsigned int cl_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 6400000,
[4] = 3333333,
[5] = 3566667,
[6] = 4266667,
};
static const unsigned int elk_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 4800000,
};
static const unsigned int ctg_vco[8] = {
[0] = 3200000,
[1] = 4000000,
[2] = 5333333,
[3] = 6400000,
[4] = 2666667,
[5] = 4266667,
};
const unsigned int *vco_table;
unsigned int vco;
uint8_t tmp = 0;
/* FIXME other chipsets? */
if (IS_GM45(dev_priv))
vco_table = ctg_vco;
else if (IS_G4X(dev_priv))
vco_table = elk_vco;
else if (IS_I965GM(dev_priv))
vco_table = cl_vco;
else if (IS_PINEVIEW(dev_priv))
vco_table = pnv_vco;
else if (IS_G33(dev_priv))
vco_table = blb_vco;
else
return 0;
tmp = I915_READ(IS_MOBILE(dev_priv) ? HPLLVCO_MOBILE : HPLLVCO);
vco = vco_table[tmp & 0x7];
if (vco == 0)
DRM_ERROR("Bad HPLL VCO (HPLLVCO=0x%02x)\n", tmp);
else
DRM_DEBUG_KMS("HPLL VCO %u kHz\n", vco);
return vco; /* GDG double wide on either pipe, otherwise pipe A only */
return INTEL_INFO(dev_priv)->gen < 4 &&
(crtc->pipe == PIPE_A || IS_I915G(dev_priv));
} }
static int gm45_get_cdclk(struct drm_i915_private *dev_priv) static void intel_crtc_compute_pixel_rate(struct intel_crtc_state *crtc_state)
{ {
struct pci_dev *pdev = dev_priv->drm.pdev; struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
unsigned int cdclk_sel, vco = intel_hpll_vco(dev_priv);
uint16_t tmp = 0;
pci_read_config_word(pdev, GCFGC, &tmp);
cdclk_sel = (tmp >> 12) & 0x1;
switch (vco) { if (HAS_GMCH_DISPLAY(dev_priv))
case 2666667: /* FIXME calculate proper pipe pixel rate for GMCH pfit */
case 4000000: crtc_state->pixel_rate =
case 5333333: crtc_state->base.adjusted_mode.crtc_clock;
return cdclk_sel ? 333333 : 222222; else
case 3200000: crtc_state->pixel_rate =
return cdclk_sel ? 320000 : 228571; ilk_pipe_pixel_rate(crtc_state);
default:
DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u, CFGC=0x%04x\n", vco, tmp);
return 222222;
}
} }
static int i965gm_get_cdclk(struct drm_i915_private *dev_priv) static int intel_crtc_compute_config(struct intel_crtc *crtc,
struct intel_crtc_state *pipe_config)
{ {
struct pci_dev *pdev = dev_priv->drm.pdev; struct drm_device *dev = crtc->base.dev;
static const uint8_t div_3200[] = { 16, 10, 8 }; struct drm_i915_private *dev_priv = to_i915(dev);
static const uint8_t div_4000[] = { 20, 12, 10 }; const struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
static const uint8_t div_5333[] = { 24, 16, 14 }; int clock_limit = dev_priv->max_dotclk_freq;
const uint8_t *div_table;
unsigned int cdclk_sel, vco = intel_hpll_vco(dev_priv);
uint16_t tmp = 0;
pci_read_config_word(pdev, GCFGC, &tmp);
cdclk_sel = ((tmp >> 8) & 0x1f) - 1;
if (cdclk_sel >= ARRAY_SIZE(div_3200)) if (INTEL_GEN(dev_priv) < 4) {
goto fail; clock_limit = dev_priv->max_cdclk_freq * 9 / 10;
switch (vco) { /*
case 3200000: * Enable double wide mode when the dot clock
div_table = div_3200; * is > 90% of the (display) core speed.
break; */
case 4000000: if (intel_crtc_supports_double_wide(crtc) &&
div_table = div_4000; adjusted_mode->crtc_clock > clock_limit) {
break; clock_limit = dev_priv->max_dotclk_freq;
case 5333333: pipe_config->double_wide = true;
div_table = div_5333; }
break;
default:
goto fail;
} }
return DIV_ROUND_CLOSEST(vco, div_table[cdclk_sel]); if (adjusted_mode->crtc_clock > clock_limit) {
DRM_DEBUG_KMS("requested pixel clock (%d kHz) too high (max: %d kHz, double wide: %s)\n",
fail: adjusted_mode->crtc_clock, clock_limit,
DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%04x\n", vco, tmp); yesno(pipe_config->double_wide));
return 200000; return -EINVAL;
} }
static int g33_get_cdclk(struct drm_i915_private *dev_priv)
{
struct pci_dev *pdev = dev_priv->drm.pdev;
static const uint8_t div_3200[] = { 12, 10, 8, 7, 5, 16 };
static const uint8_t div_4000[] = { 14, 12, 10, 8, 6, 20 };
static const uint8_t div_4800[] = { 20, 14, 12, 10, 8, 24 };
static const uint8_t div_5333[] = { 20, 16, 12, 12, 8, 28 };
const uint8_t *div_table;
unsigned int cdclk_sel, vco = intel_hpll_vco(dev_priv);
uint16_t tmp = 0;
pci_read_config_word(pdev, GCFGC, &tmp); /*
* Pipe horizontal size must be even in:
* - DVO ganged mode
* - LVDS dual channel mode
* - Double wide pipe
*/
if ((intel_crtc_has_type(pipe_config, INTEL_OUTPUT_LVDS) &&
intel_is_dual_link_lvds(dev)) || pipe_config->double_wide)
pipe_config->pipe_src_w &= ~1;
cdclk_sel = (tmp >> 4) & 0x7; /* Cantiga+ cannot handle modes with a hsync front porch of 0.
* WaPruneModeWithIncorrectHsyncOffset:ctg,elk,ilk,snb,ivb,vlv,hsw.
*/
if ((INTEL_GEN(dev_priv) > 4 || IS_G4X(dev_priv)) &&
adjusted_mode->crtc_hsync_start == adjusted_mode->crtc_hdisplay)
return -EINVAL;
if (cdclk_sel >= ARRAY_SIZE(div_3200)) intel_crtc_compute_pixel_rate(pipe_config);
goto fail;
switch (vco) { if (HAS_IPS(dev_priv))
case 3200000: hsw_compute_ips_config(crtc, pipe_config);
div_table = div_3200;
break;
case 4000000:
div_table = div_4000;
break;
case 4800000:
div_table = div_4800;
break;
case 5333333:
div_table = div_5333;
break;
default:
goto fail;
}
return DIV_ROUND_CLOSEST(vco, div_table[cdclk_sel]); if (pipe_config->has_pch_encoder)
return ironlake_fdi_compute_config(crtc, pipe_config);
fail: return 0;
DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%08x\n", vco, tmp);
return 190476;
} }
static void static void
...@@ -10250,245 +8866,6 @@ void hsw_disable_pc8(struct drm_i915_private *dev_priv) ...@@ -10250,245 +8866,6 @@ void hsw_disable_pc8(struct drm_i915_private *dev_priv)
} }
} }
static void bxt_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_device *dev = old_state->dev;
struct intel_atomic_state *old_intel_state =
to_intel_atomic_state(old_state);
unsigned int req_cdclk = old_intel_state->dev_cdclk;
bxt_set_cdclk(to_i915(dev), req_cdclk);
}
static int bdw_adjust_min_pipe_pixel_rate(struct intel_crtc_state *crtc_state,
int pixel_rate)
{
struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
/* pixel rate mustn't exceed 95% of cdclk with IPS on BDW */
if (IS_BROADWELL(dev_priv) && crtc_state->ips_enabled)
pixel_rate = DIV_ROUND_UP(pixel_rate * 100, 95);
/* BSpec says "Do not use DisplayPort with CDCLK less than
* 432 MHz, audio enabled, port width x4, and link rate
* HBR2 (5.4 GHz), or else there may be audio corruption or
* screen corruption."
*/
if (intel_crtc_has_dp_encoder(crtc_state) &&
crtc_state->has_audio &&
crtc_state->port_clock >= 540000 &&
crtc_state->lane_count == 4)
pixel_rate = max(432000, pixel_rate);
return pixel_rate;
}
/* compute the max rate for new configuration */
static int intel_max_pixel_rate(struct drm_atomic_state *state)
{
struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
struct drm_i915_private *dev_priv = to_i915(state->dev);
struct drm_crtc *crtc;
struct drm_crtc_state *cstate;
struct intel_crtc_state *crtc_state;
unsigned max_pixel_rate = 0, i;
enum pipe pipe;
memcpy(intel_state->min_pixclk, dev_priv->min_pixclk,
sizeof(intel_state->min_pixclk));
for_each_crtc_in_state(state, crtc, cstate, i) {
int pixel_rate;
crtc_state = to_intel_crtc_state(cstate);
if (!crtc_state->base.enable) {
intel_state->min_pixclk[i] = 0;
continue;
}
pixel_rate = crtc_state->pixel_rate;
if (IS_BROADWELL(dev_priv) || IS_GEN9(dev_priv))
pixel_rate = bdw_adjust_min_pipe_pixel_rate(crtc_state,
pixel_rate);
intel_state->min_pixclk[i] = pixel_rate;
}
for_each_pipe(dev_priv, pipe)
max_pixel_rate = max(intel_state->min_pixclk[pipe], max_pixel_rate);
return max_pixel_rate;
}
static void broadwell_set_cdclk(struct drm_device *dev, int cdclk)
{
struct drm_i915_private *dev_priv = to_i915(dev);
uint32_t val, data;
int ret;
if (WARN((I915_READ(LCPLL_CTL) &
(LCPLL_PLL_DISABLE | LCPLL_PLL_LOCK |
LCPLL_CD_CLOCK_DISABLE | LCPLL_ROOT_CD_CLOCK_DISABLE |
LCPLL_CD2X_CLOCK_DISABLE | LCPLL_POWER_DOWN_ALLOW |
LCPLL_CD_SOURCE_FCLK)) != LCPLL_PLL_LOCK,
"trying to change cdclk frequency with cdclk not enabled\n"))
return;
mutex_lock(&dev_priv->rps.hw_lock);
ret = sandybridge_pcode_write(dev_priv,
BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
mutex_unlock(&dev_priv->rps.hw_lock);
if (ret) {
DRM_ERROR("failed to inform pcode about cdclk change\n");
return;
}
val = I915_READ(LCPLL_CTL);
val |= LCPLL_CD_SOURCE_FCLK;
I915_WRITE(LCPLL_CTL, val);
if (wait_for_us(I915_READ(LCPLL_CTL) &
LCPLL_CD_SOURCE_FCLK_DONE, 1))
DRM_ERROR("Switching to FCLK failed\n");
val = I915_READ(LCPLL_CTL);
val &= ~LCPLL_CLK_FREQ_MASK;
switch (cdclk) {
case 450000:
val |= LCPLL_CLK_FREQ_450;
data = 0;
break;
case 540000:
val |= LCPLL_CLK_FREQ_54O_BDW;
data = 1;
break;
case 337500:
val |= LCPLL_CLK_FREQ_337_5_BDW;
data = 2;
break;
case 675000:
val |= LCPLL_CLK_FREQ_675_BDW;
data = 3;
break;
default:
WARN(1, "invalid cdclk frequency\n");
return;
}
I915_WRITE(LCPLL_CTL, val);
val = I915_READ(LCPLL_CTL);
val &= ~LCPLL_CD_SOURCE_FCLK;
I915_WRITE(LCPLL_CTL, val);
if (wait_for_us((I915_READ(LCPLL_CTL) &
LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
DRM_ERROR("Switching back to LCPLL failed\n");
mutex_lock(&dev_priv->rps.hw_lock);
sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ, data);
mutex_unlock(&dev_priv->rps.hw_lock);
I915_WRITE(CDCLK_FREQ, DIV_ROUND_CLOSEST(cdclk, 1000) - 1);
intel_update_cdclk(dev_priv);
WARN(cdclk != dev_priv->cdclk_freq,
"cdclk requested %d kHz but got %d kHz\n",
cdclk, dev_priv->cdclk_freq);
}
static int broadwell_calc_cdclk(int max_pixclk)
{
if (max_pixclk > 540000)
return 675000;
else if (max_pixclk > 450000)
return 540000;
else if (max_pixclk > 337500)
return 450000;
else
return 337500;
}
static int broadwell_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct drm_i915_private *dev_priv = to_i915(state->dev);
struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
int max_pixclk = intel_max_pixel_rate(state);
int cdclk;
/*
* FIXME should also account for plane ratio
* once 64bpp pixel formats are supported.
*/
cdclk = broadwell_calc_cdclk(max_pixclk);
if (cdclk > dev_priv->max_cdclk_freq) {
DRM_DEBUG_KMS("requested cdclk (%d kHz) exceeds max (%d kHz)\n",
cdclk, dev_priv->max_cdclk_freq);
return -EINVAL;
}
intel_state->cdclk = intel_state->dev_cdclk = cdclk;
if (!intel_state->active_crtcs)
intel_state->dev_cdclk = broadwell_calc_cdclk(0);
return 0;
}
static void broadwell_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_device *dev = old_state->dev;
struct intel_atomic_state *old_intel_state =
to_intel_atomic_state(old_state);
unsigned req_cdclk = old_intel_state->dev_cdclk;
broadwell_set_cdclk(dev, req_cdclk);
}
static int skl_modeset_calc_cdclk(struct drm_atomic_state *state)
{
struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
struct drm_i915_private *dev_priv = to_i915(state->dev);
const int max_pixclk = intel_max_pixel_rate(state);
int vco = intel_state->cdclk_pll_vco;
int cdclk;
/*
* FIXME should also account for plane ratio
* once 64bpp pixel formats are supported.
*/
cdclk = skl_calc_cdclk(max_pixclk, vco);
/*
* FIXME move the cdclk caclulation to
* compute_config() so we can fail gracegully.
*/
if (cdclk > dev_priv->max_cdclk_freq) {
DRM_ERROR("requested cdclk (%d kHz) exceeds max (%d kHz)\n",
cdclk, dev_priv->max_cdclk_freq);
cdclk = dev_priv->max_cdclk_freq;
}
intel_state->cdclk = intel_state->dev_cdclk = cdclk;
if (!intel_state->active_crtcs)
intel_state->dev_cdclk = skl_calc_cdclk(0, vco);
return 0;
}
static void skl_modeset_commit_cdclk(struct drm_atomic_state *old_state)
{
struct drm_i915_private *dev_priv = to_i915(old_state->dev);
struct intel_atomic_state *intel_state = to_intel_atomic_state(old_state);
unsigned int req_cdclk = intel_state->dev_cdclk;
unsigned int req_vco = intel_state->cdclk_pll_vco;
skl_set_cdclk(dev_priv, req_cdclk, req_vco);
}
static int haswell_crtc_compute_clock(struct intel_crtc *crtc, static int haswell_crtc_compute_clock(struct intel_crtc *crtc,
struct intel_crtc_state *crtc_state) struct intel_crtc_state *crtc_state)
{ {
...@@ -16170,6 +14547,8 @@ static const struct drm_mode_config_funcs intel_mode_funcs = { ...@@ -16170,6 +14547,8 @@ static const struct drm_mode_config_funcs intel_mode_funcs = {
*/ */
void intel_init_display_hooks(struct drm_i915_private *dev_priv) void intel_init_display_hooks(struct drm_i915_private *dev_priv)
{ {
intel_init_cdclk_hooks(dev_priv);
if (INTEL_INFO(dev_priv)->gen >= 9) { if (INTEL_INFO(dev_priv)->gen >= 9) {
dev_priv->display.get_pipe_config = haswell_get_pipe_config; dev_priv->display.get_pipe_config = haswell_get_pipe_config;
dev_priv->display.get_initial_plane_config = dev_priv->display.get_initial_plane_config =
...@@ -16238,52 +14617,6 @@ void intel_init_display_hooks(struct drm_i915_private *dev_priv) ...@@ -16238,52 +14617,6 @@ void intel_init_display_hooks(struct drm_i915_private *dev_priv)
dev_priv->display.crtc_disable = i9xx_crtc_disable; dev_priv->display.crtc_disable = i9xx_crtc_disable;
} }
/* Returns the core display clock speed */
if (IS_GEN9_BC(dev_priv))
dev_priv->display.get_cdclk = skylake_get_cdclk;
else if (IS_GEN9_LP(dev_priv))
dev_priv->display.get_cdclk = broxton_get_cdclk;
else if (IS_BROADWELL(dev_priv))
dev_priv->display.get_cdclk = broadwell_get_cdclk;
else if (IS_HASWELL(dev_priv))
dev_priv->display.get_cdclk = haswell_get_cdclk;
else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
dev_priv->display.get_cdclk = valleyview_get_cdclk;
else if (IS_GEN6(dev_priv) || IS_IVYBRIDGE(dev_priv))
dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
else if (IS_GEN5(dev_priv))
dev_priv->display.get_cdclk = fixed_450mhz_get_cdclk;
else if (IS_GM45(dev_priv))
dev_priv->display.get_cdclk = gm45_get_cdclk;
else if (IS_G4X(dev_priv))
dev_priv->display.get_cdclk = g33_get_cdclk;
else if (IS_I965GM(dev_priv))
dev_priv->display.get_cdclk = i965gm_get_cdclk;
else if (IS_I965G(dev_priv))
dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
else if (IS_PINEVIEW(dev_priv))
dev_priv->display.get_cdclk = pnv_get_cdclk;
else if (IS_G33(dev_priv))
dev_priv->display.get_cdclk = g33_get_cdclk;
else if (IS_I945GM(dev_priv))
dev_priv->display.get_cdclk = i945gm_get_cdclk;
else if (IS_I945G(dev_priv))
dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
else if (IS_I915GM(dev_priv))
dev_priv->display.get_cdclk = i915gm_get_cdclk;
else if (IS_I915G(dev_priv))
dev_priv->display.get_cdclk = fixed_333mhz_get_cdclk;
else if (IS_I865G(dev_priv))
dev_priv->display.get_cdclk = fixed_266mhz_get_cdclk;
else if (IS_I85X(dev_priv))
dev_priv->display.get_cdclk = i85x_get_cdclk;
else if (IS_I845G(dev_priv))
dev_priv->display.get_cdclk = fixed_200mhz_get_cdclk;
else { /* 830 */
WARN(!IS_I830(dev_priv), "Unknown platform. Assuming 133 MHz CDCLK\n");
dev_priv->display.get_cdclk = fixed_133mhz_get_cdclk;
}
if (IS_GEN5(dev_priv)) { if (IS_GEN5(dev_priv)) {
dev_priv->display.fdi_link_train = ironlake_fdi_link_train; dev_priv->display.fdi_link_train = ironlake_fdi_link_train;
} else if (IS_GEN6(dev_priv)) { } else if (IS_GEN6(dev_priv)) {
...@@ -16295,28 +14628,6 @@ void intel_init_display_hooks(struct drm_i915_private *dev_priv) ...@@ -16295,28 +14628,6 @@ void intel_init_display_hooks(struct drm_i915_private *dev_priv)
dev_priv->display.fdi_link_train = hsw_fdi_link_train; dev_priv->display.fdi_link_train = hsw_fdi_link_train;
} }
if (IS_BROADWELL(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
broadwell_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
broadwell_modeset_calc_cdclk;
} else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
valleyview_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
valleyview_modeset_calc_cdclk;
} else if (IS_GEN9_LP(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
bxt_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
bxt_modeset_calc_cdclk;
} else if (IS_GEN9_BC(dev_priv)) {
dev_priv->display.modeset_commit_cdclk =
skl_modeset_commit_cdclk;
dev_priv->display.modeset_calc_cdclk =
skl_modeset_calc_cdclk;
}
if (dev_priv->info.gen >= 9) if (dev_priv->info.gen >= 9)
dev_priv->display.update_crtcs = skl_update_crtcs; dev_priv->display.update_crtcs = skl_update_crtcs;
else else
......
...@@ -1246,12 +1246,19 @@ void intel_audio_codec_disable(struct intel_encoder *encoder); ...@@ -1246,12 +1246,19 @@ void intel_audio_codec_disable(struct intel_encoder *encoder);
void i915_audio_component_init(struct drm_i915_private *dev_priv); void i915_audio_component_init(struct drm_i915_private *dev_priv);
void i915_audio_component_cleanup(struct drm_i915_private *dev_priv); void i915_audio_component_cleanup(struct drm_i915_private *dev_priv);
/* intel_cdclk.c */
void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv);
void intel_update_max_cdclk(struct drm_i915_private *dev_priv);
void intel_update_cdclk(struct drm_i915_private *dev_priv);
void intel_update_rawclk(struct drm_i915_private *dev_priv);
/* intel_display.c */ /* intel_display.c */
enum transcoder intel_crtc_pch_transcoder(struct intel_crtc *crtc); enum transcoder intel_crtc_pch_transcoder(struct intel_crtc *crtc);
void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv, int vco);
void intel_update_rawclk(struct drm_i915_private *dev_priv); void intel_update_rawclk(struct drm_i915_private *dev_priv);
int vlv_get_cck_clock(struct drm_i915_private *dev_priv, int vlv_get_cck_clock(struct drm_i915_private *dev_priv,
const char *name, u32 reg, int ref_freq); const char *name, u32 reg, int ref_freq);
int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv,
const char *name, u32 reg);
void lpt_disable_pch_transcoder(struct drm_i915_private *dev_priv); void lpt_disable_pch_transcoder(struct drm_i915_private *dev_priv);
void lpt_disable_iclkip(struct drm_i915_private *dev_priv); void lpt_disable_iclkip(struct drm_i915_private *dev_priv);
extern const struct drm_plane_funcs intel_plane_funcs; extern const struct drm_plane_funcs intel_plane_funcs;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment