summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Isely <isely@pobox.com>2008-04-09 14:15:03 +0800
committerZhenyu Wang <zhenyu.z.wang@intel.com>2008-04-10 09:10:49 +0800
commit592c5faf031253a8b3274f811a136fd9e42c1a18 (patch)
tree7928d2b91a398c160202498ef2d4c79a094c4efd
parenta83e55e1131025dc049ae219d5e2d3861066f4b7 (diff)
Implement support for 24 bit pixel format
The Intel driver appears to be coded to only work with displays expecting 18 bit pixels. However I have an application using a LCD display that expects pixel data in 24 bit format. The difference is only 2 bits in a single GPU register. This patch implements that change, controlled by a new driver option, "LVDS24Bit". The default value is false, which is the previous behavior. When set to true, then 24 bit panels should work (at least the one I'm testing here does). Fd.o bug #15201 Signed-off-by: Mike Isely <isely@pobox.com> (cherry picked from commit e031cc02e65acfbafb48136dad414751e04425c5)
-rw-r--r--man/intel.man17
-rw-r--r--src/i830.h2
-rw-r--r--src/i830_display.c19
-rw-r--r--src/i830_driver.c8
4 files changed, 42 insertions, 4 deletions
diff --git a/man/intel.man b/man/intel.man
index 69af25e8..8a8b7a09 100644
--- a/man/intel.man
+++ b/man/intel.man
@@ -183,6 +183,23 @@ causing a crash. If you find that your platform needs this option, please file
a bug against xf86-video-intel at http://bugs.freedesktop.org which includes
the output of 'lspci -v' and 'lspci -vn'.
.TP
+.BI "Option \*qLVDS24Bit\*q \*q" boolean \*q
+Specify 24 bit pixel format (i.e. 8 bits per color) to be used for the
+LVDS output. Some newer LCD panels expect pixels to be formatted and
+sent as 8 bits per color channel instead of the more common 6 bits per
+color channel. Set this option to true to enable the newer format.
+Note that this concept is entirely different and independent from the
+frame buffer color depth - which is still controlled in the usual way
+within the X server. This option instead selects the physical format
+/ sequencing of the digital bits sent to the display. Setting the
+frame buffer color depth is really a matter of preference by the user,
+while setting the pixel format here is a requirement of the connected
+hardware. Leaving this unset implies the default value of false,
+which is almost always going to be right choice. If your
+LVDS-connected display on the other hand is extremely washed out
+(e.g. white on a lighter white), trying this option might clear the
+problem.
+.TP
.BI "Option \*qXvMC\*q \*q" boolean \*q
Enable XvMC driver. Current support MPEG2 MC on 915/945 and G33 series.
User should provide absolute path to libIntelXvMC.so in XvMCConfig file.
diff --git a/src/i830.h b/src/i830.h
index bd41a2f9..834e4dcd 100644
--- a/src/i830.h
+++ b/src/i830.h
@@ -548,6 +548,8 @@ typedef struct _I830Rec {
/* Broken-out options. */
OptionInfoPtr Options;
+ Bool lvds_24_bit_mode;
+
Bool StolenOnly;
Bool swfSaved;
diff --git a/src/i830_display.c b/src/i830_display.c
index 4f3f8ef9..4091e792 100644
--- a/src/i830_display.c
+++ b/src/i830_display.c
@@ -1289,10 +1289,21 @@ i830_crtc_mode_set(xf86CrtcPtr crtc, DisplayModePtr mode,
else
lvds &= ~(LVDS_B0B3_POWER_UP | LVDS_CLKB_POWER_UP);
- /* It would be nice to set 24 vs 18-bit mode (LVDS_A3_POWER_UP)
- * appropriately here, but we need to look more thoroughly into how
- * panels behave in the two modes.
- */
+ if (pI830->lvds_24_bit_mode) {
+ /* Option set which requests 24-bit mode
+ * (LVDS_A3_POWER_UP, as opposed to 18-bit mode) here; we
+ * still need to look more thoroughly into how panels
+ * behave in the two modes. This option enables that
+ * experimentation.
+ */
+ xf86DrvMsg(pScrn->scrnIndex, X_INFO,
+ "Selecting less common 24 bit TMDS pixel format.\n");
+ lvds |= LVDS_A3_POWER_UP;
+ lvds |= LVDS_DATA_FORMAT_DOT_ONE;
+ } else {
+ xf86DrvMsg(pScrn->scrnIndex, X_INFO,
+ "Selecting standard 18 bit TMDS pixel format.\n");
+ }
/* Enable dithering if we're in 18-bit mode. */
if (IS_I965G(pI830))
diff --git a/src/i830_driver.c b/src/i830_driver.c
index 5866d672..155e7a98 100644
--- a/src/i830_driver.c
+++ b/src/i830_driver.c
@@ -296,6 +296,7 @@ typedef enum {
OPTION_COLOR_KEY,
OPTION_CHECKDEVICES,
OPTION_MODEDEBUG,
+ OPTION_LVDS24BITMODE,
OPTION_FBC,
OPTION_TILING,
#ifdef XF86DRI_MM
@@ -322,6 +323,7 @@ static OptionInfoRec I830Options[] = {
{OPTION_VIDEO_KEY, "VideoKey", OPTV_INTEGER, {0}, FALSE},
{OPTION_CHECKDEVICES, "CheckDevices",OPTV_BOOLEAN, {0}, FALSE},
{OPTION_MODEDEBUG, "ModeDebug", OPTV_BOOLEAN, {0}, FALSE},
+ {OPTION_LVDS24BITMODE, "LVDS24Bit", OPTV_BOOLEAN, {0}, FALSE},
{OPTION_FBC, "FramebufferCompression", OPTV_BOOLEAN, {0}, TRUE},
{OPTION_TILING, "Tiling", OPTV_BOOLEAN, {0}, TRUE},
#ifdef XF86DRI_MM
@@ -1398,6 +1400,12 @@ I830PreInit(ScrnInfoPtr pScrn, int flags)
pI830->debug_modes = FALSE;
}
+ if (xf86ReturnOptValBool(pI830->Options, OPTION_LVDS24BITMODE, FALSE)) {
+ pI830->lvds_24_bit_mode = TRUE;
+ } else {
+ pI830->lvds_24_bit_mode = FALSE;
+ }
+
if (xf86ReturnOptValBool(pI830->Options, OPTION_FORCEENABLEPIPEA, FALSE))
pI830->quirk_flag |= QUIRK_PIPEA_FORCE;