blob: 12b03b3647034134411ffd1723f5c3c8141c979c [file] [log] [blame]
/*
* Samsung SoC MIPI DSI Master driver.
*
* Copyright (c) 2014 Samsung Electronics Co., Ltd
*
* Contacts: Tomasz Figa <t.figa@samsung.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*/
#include <drm/drmP.h>
#include <drm/drm_crtc_helper.h>
#include <drm/drm_mipi_dsi.h>
#include <drm/drm_panel.h>
#include <drm/drm_atomic_helper.h>
#include <linux/clk.h>
#include <linux/gpio/consumer.h>
#include <linux/irq.h>
#include <linux/of_device.h>
#include <linux/of_gpio.h>
#include <linux/of_graph.h>
#include <linux/phy/phy.h>
#include <linux/regulator/consumer.h>
#include <linux/component.h>
#include <video/mipi_display.h>
#include <video/videomode.h>
#include "exynos_drm_crtc.h"
#include "exynos_drm_drv.h"
/* returns true iff both arguments logically differs */
#define NEQV(a, b) (!(a) ^ !(b))
/* DSIM_STATUS */
#define DSIM_STOP_STATE_DAT(x) (((x) & 0xf) << 0)
#define DSIM_STOP_STATE_CLK (1 << 8)
#define DSIM_TX_READY_HS_CLK (1 << 10)
#define DSIM_PLL_STABLE (1 << 31)
/* DSIM_SWRST */
#define DSIM_FUNCRST (1 << 16)
#define DSIM_SWRST (1 << 0)
/* DSIM_TIMEOUT */
#define DSIM_LPDR_TIMEOUT(x) ((x) << 0)
#define DSIM_BTA_TIMEOUT(x) ((x) << 16)
/* DSIM_CLKCTRL */
#define DSIM_ESC_PRESCALER(x) (((x) & 0xffff) << 0)
#define DSIM_ESC_PRESCALER_MASK (0xffff << 0)
#define DSIM_LANE_ESC_CLK_EN_CLK (1 << 19)
#define DSIM_LANE_ESC_CLK_EN_DATA(x) (((x) & 0xf) << 20)
#define DSIM_LANE_ESC_CLK_EN_DATA_MASK (0xf << 20)
#define DSIM_BYTE_CLKEN (1 << 24)
#define DSIM_BYTE_CLK_SRC(x) (((x) & 0x3) << 25)
#define DSIM_BYTE_CLK_SRC_MASK (0x3 << 25)
#define DSIM_PLL_BYPASS (1 << 27)
#define DSIM_ESC_CLKEN (1 << 28)
#define DSIM_TX_REQUEST_HSCLK (1 << 31)
/* DSIM_CONFIG */
#define DSIM_LANE_EN_CLK (1 << 0)
#define DSIM_LANE_EN(x) (((x) & 0xf) << 1)
#define DSIM_NUM_OF_DATA_LANE(x) (((x) & 0x3) << 5)
#define DSIM_SUB_PIX_FORMAT(x) (((x) & 0x7) << 8)
#define DSIM_MAIN_PIX_FORMAT_MASK (0x7 << 12)
#define DSIM_MAIN_PIX_FORMAT_RGB888 (0x7 << 12)
#define DSIM_MAIN_PIX_FORMAT_RGB666 (0x6 << 12)
#define DSIM_MAIN_PIX_FORMAT_RGB666_P (0x5 << 12)
#define DSIM_MAIN_PIX_FORMAT_RGB565 (0x4 << 12)
#define DSIM_SUB_VC (((x) & 0x3) << 16)
#define DSIM_MAIN_VC (((x) & 0x3) << 18)
#define DSIM_HSA_MODE (1 << 20)
#define DSIM_HBP_MODE (1 << 21)
#define DSIM_HFP_MODE (1 << 22)
#define DSIM_HSE_MODE (1 << 23)
#define DSIM_AUTO_MODE (1 << 24)
#define DSIM_VIDEO_MODE (1 << 25)
#define DSIM_BURST_MODE (1 << 26)
#define DSIM_SYNC_INFORM (1 << 27)
#define DSIM_EOT_DISABLE (1 << 28)
#define DSIM_MFLUSH_VS (1 << 29)
/* This flag is valid only for exynos3250/3472/4415/5260/5430 */
#define DSIM_CLKLANE_STOP (1 << 30)
/* DSIM_ESCMODE */
#define DSIM_TX_TRIGGER_RST (1 << 4)
#define DSIM_TX_LPDT_LP (1 << 6)
#define DSIM_CMD_LPDT_LP (1 << 7)
#define DSIM_FORCE_BTA (1 << 16)
#define DSIM_FORCE_STOP_STATE (1 << 20)
#define DSIM_STOP_STATE_CNT(x) (((x) & 0x7ff) << 21)
#define DSIM_STOP_STATE_CNT_MASK (0x7ff << 21)
/* DSIM_MDRESOL */
#define DSIM_MAIN_STAND_BY (1 << 31)
#define DSIM_MAIN_VRESOL(x, num_bits) (((x) & ((1 << (num_bits)) - 1)) << 16)
#define DSIM_MAIN_HRESOL(x, num_bits) (((x) & ((1 << (num_bits)) - 1)) << 0)
/* DSIM_MVPORCH */
#define DSIM_CMD_ALLOW(x) ((x) << 28)
#define DSIM_STABLE_VFP(x) ((x) << 16)
#define DSIM_MAIN_VBP(x) ((x) << 0)
#define DSIM_CMD_ALLOW_MASK (0xf << 28)
#define DSIM_STABLE_VFP_MASK (0x7ff << 16)
#define DSIM_MAIN_VBP_MASK (0x7ff << 0)
/* DSIM_MHPORCH */
#define DSIM_MAIN_HFP(x) ((x) << 16)
#define DSIM_MAIN_HBP(x) ((x) << 0)
#define DSIM_MAIN_HFP_MASK ((0xffff) << 16)
#define DSIM_MAIN_HBP_MASK ((0xffff) << 0)
/* DSIM_MSYNC */
#define DSIM_MAIN_VSA(x) ((x) << 22)
#define DSIM_MAIN_HSA(x) ((x) << 0)
#define DSIM_MAIN_VSA_MASK ((0x3ff) << 22)
#define DSIM_MAIN_HSA_MASK ((0xffff) << 0)
/* DSIM_SDRESOL */
#define DSIM_SUB_STANDY(x) ((x) << 31)
#define DSIM_SUB_VRESOL(x) ((x) << 16)
#define DSIM_SUB_HRESOL(x) ((x) << 0)
#define DSIM_SUB_STANDY_MASK ((0x1) << 31)
#define DSIM_SUB_VRESOL_MASK ((0x7ff) << 16)
#define DSIM_SUB_HRESOL_MASK ((0x7ff) << 0)
/* DSIM_INTSRC */
#define DSIM_INT_PLL_STABLE (1 << 31)
#define DSIM_INT_SW_RST_RELEASE (1 << 30)
#define DSIM_INT_SFR_FIFO_EMPTY (1 << 29)
#define DSIM_INT_SFR_HDR_FIFO_EMPTY (1 << 28)
#define DSIM_INT_BTA (1 << 25)
#define DSIM_INT_FRAME_DONE (1 << 24)
#define DSIM_INT_RX_TIMEOUT (1 << 21)
#define DSIM_INT_BTA_TIMEOUT (1 << 20)
#define DSIM_INT_RX_DONE (1 << 18)
#define DSIM_INT_RX_TE (1 << 17)
#define DSIM_INT_RX_ACK (1 << 16)
#define DSIM_INT_RX_ECC_ERR (1 << 15)
#define DSIM_INT_RX_CRC_ERR (1 << 14)
/* DSIM_FIFOCTRL */
#define DSIM_RX_DATA_FULL (1 << 25)
#define DSIM_RX_DATA_EMPTY (1 << 24)
#define DSIM_SFR_HEADER_FULL (1 << 23)
#define DSIM_SFR_HEADER_EMPTY (1 << 22)
#define DSIM_SFR_PAYLOAD_FULL (1 << 21)
#define DSIM_SFR_PAYLOAD_EMPTY (1 << 20)
#define DSIM_I80_HEADER_FULL (1 << 19)
#define DSIM_I80_HEADER_EMPTY (1 << 18)
#define DSIM_I80_PAYLOAD_FULL (1 << 17)
#define DSIM_I80_PAYLOAD_EMPTY (1 << 16)
#define DSIM_SD_HEADER_FULL (1 << 15)
#define DSIM_SD_HEADER_EMPTY (1 << 14)
#define DSIM_SD_PAYLOAD_FULL (1 << 13)
#define DSIM_SD_PAYLOAD_EMPTY (1 << 12)
#define DSIM_MD_HEADER_FULL (1 << 11)
#define DSIM_MD_HEADER_EMPTY (1 << 10)
#define DSIM_MD_PAYLOAD_FULL (1 << 9)
#define DSIM_MD_PAYLOAD_EMPTY (1 << 8)
#define DSIM_RX_FIFO (1 << 4)
#define DSIM_SFR_FIFO (1 << 3)
#define DSIM_I80_FIFO (1 << 2)
#define DSIM_SD_FIFO (1 << 1)
#define DSIM_MD_FIFO (1 << 0)
/* DSIM_PHYACCHR */
#define DSIM_AFC_EN (1 << 14)
#define DSIM_AFC_CTL(x) (((x) & 0x7) << 5)
/* DSIM_PLLCTRL */
#define DSIM_FREQ_BAND(x) ((x) << 24)
#define DSIM_PLL_EN (1 << 23)
#define DSIM_PLL_P(x) ((x) << 13)
#define DSIM_PLL_M(x) ((x) << 4)
#define DSIM_PLL_S(x) ((x) << 1)
/* DSIM_PHYCTRL */
#define DSIM_PHYCTRL_ULPS_EXIT(x) (((x) & 0x1ff) << 0)
#define DSIM_PHYCTRL_B_DPHYCTL_VREG_LP (1 << 30)
#define DSIM_PHYCTRL_B_DPHYCTL_SLEW_UP (1 << 14)
/* DSIM_PHYTIMING */
#define DSIM_PHYTIMING_LPX(x) ((x) << 8)
#define DSIM_PHYTIMING_HS_EXIT(x) ((x) << 0)
/* DSIM_PHYTIMING1 */
#define DSIM_PHYTIMING1_CLK_PREPARE(x) ((x) << 24)
#define DSIM_PHYTIMING1_CLK_ZERO(x) ((x) << 16)
#define DSIM_PHYTIMING1_CLK_POST(x) ((x) << 8)
#define DSIM_PHYTIMING1_CLK_TRAIL(x) ((x) << 0)
/* DSIM_PHYTIMING2 */
#define DSIM_PHYTIMING2_HS_PREPARE(x) ((x) << 16)
#define DSIM_PHYTIMING2_HS_ZERO(x) ((x) << 8)
#define DSIM_PHYTIMING2_HS_TRAIL(x) ((x) << 0)
#define DSI_MAX_BUS_WIDTH 4
#define DSI_NUM_VIRTUAL_CHANNELS 4
#define DSI_TX_FIFO_SIZE 2048
#define DSI_RX_FIFO_SIZE 256
#define DSI_XFER_TIMEOUT_MS 100
#define DSI_RX_FIFO_EMPTY 0x30800002
#define OLD_SCLK_MIPI_CLK_NAME "pll_clk"
#define REG_ADDR(dsi, reg_idx) ((dsi)->reg_base + \
dsi->driver_data->reg_ofs[(reg_idx)])
#define DSI_WRITE(dsi, reg_idx, val) writel((val), \
REG_ADDR((dsi), (reg_idx)))
#define DSI_READ(dsi, reg_idx) readl(REG_ADDR((dsi), (reg_idx)))
static char *clk_names[5] = { "bus_clk", "sclk_mipi",
"phyclk_mipidphy0_bitclkdiv8", "phyclk_mipidphy0_rxclkesc0",
"sclk_rgb_vclk_to_dsim0" };
enum exynos_dsi_transfer_type {
EXYNOS_DSI_TX,
EXYNOS_DSI_RX,
};
struct exynos_dsi_transfer {
struct list_head list;
struct completion completed;
int result;
u8 data_id;
u8 data[2];
u16 flags;
const u8 *tx_payload;
u16 tx_len;
u16 tx_done;
u8 *rx_payload;
u16 rx_len;
u16 rx_done;
};
#define DSIM_STATE_ENABLED BIT(0)
#define DSIM_STATE_INITIALIZED BIT(1)
#define DSIM_STATE_CMD_LPM BIT(2)
#define DSIM_STATE_VIDOUT_AVAILABLE BIT(3)
struct exynos_dsi_driver_data {
unsigned int *reg_ofs;
unsigned int plltmr_reg;
unsigned int has_freqband:1;
unsigned int has_clklane_stop:1;
unsigned int num_clks;
unsigned int max_freq;
unsigned int wait_for_reset;
unsigned int num_bits_resol;
unsigned int *reg_values;
};
struct exynos_dsi {
struct drm_encoder encoder;
struct mipi_dsi_host dsi_host;
struct drm_connector connector;
struct device_node *panel_node;
struct drm_panel *panel;
struct device *dev;
void __iomem *reg_base;
struct phy *phy;
struct clk **clks;
struct regulator_bulk_data supplies[2];
int irq;
int te_gpio;
u32 pll_clk_rate;
u32 burst_clk_rate;
u32 esc_clk_rate;
u32 lanes;
u32 mode_flags;
u32 format;
struct videomode vm;
int state;
struct drm_property *brightness;
struct completion completed;
spinlock_t transfer_lock; /* protects transfer_list */
struct list_head transfer_list;
struct exynos_dsi_driver_data *driver_data;
struct device_node *bridge_node;
};
#define host_to_dsi(host) container_of(host, struct exynos_dsi, dsi_host)
#define connector_to_dsi(c) container_of(c, struct exynos_dsi, connector)
static inline struct exynos_dsi *encoder_to_dsi(struct drm_encoder *e)
{
return container_of(e, struct exynos_dsi, encoder);
}
enum reg_idx {
DSIM_STATUS_REG, /* Status register */
DSIM_SWRST_REG, /* Software reset register */
DSIM_CLKCTRL_REG, /* Clock control register */
DSIM_TIMEOUT_REG, /* Time out register */
DSIM_CONFIG_REG, /* Configuration register */
DSIM_ESCMODE_REG, /* Escape mode register */
DSIM_MDRESOL_REG,
DSIM_MVPORCH_REG, /* Main display Vporch register */
DSIM_MHPORCH_REG, /* Main display Hporch register */
DSIM_MSYNC_REG, /* Main display sync area register */
DSIM_INTSRC_REG, /* Interrupt source register */
DSIM_INTMSK_REG, /* Interrupt mask register */
DSIM_PKTHDR_REG, /* Packet Header FIFO register */
DSIM_PAYLOAD_REG, /* Payload FIFO register */
DSIM_RXFIFO_REG, /* Read FIFO register */
DSIM_FIFOCTRL_REG, /* FIFO status and control register */
DSIM_PLLCTRL_REG, /* PLL control register */
DSIM_PHYCTRL_REG,
DSIM_PHYTIMING_REG,
DSIM_PHYTIMING1_REG,
DSIM_PHYTIMING2_REG,
NUM_REGS
};
static unsigned int exynos_reg_ofs[] = {
[DSIM_STATUS_REG] = 0x00,
[DSIM_SWRST_REG] = 0x04,
[DSIM_CLKCTRL_REG] = 0x08,
[DSIM_TIMEOUT_REG] = 0x0c,
[DSIM_CONFIG_REG] = 0x10,
[DSIM_ESCMODE_REG] = 0x14,
[DSIM_MDRESOL_REG] = 0x18,
[DSIM_MVPORCH_REG] = 0x1c,
[DSIM_MHPORCH_REG] = 0x20,
[DSIM_MSYNC_REG] = 0x24,
[DSIM_INTSRC_REG] = 0x2c,
[DSIM_INTMSK_REG] = 0x30,
[DSIM_PKTHDR_REG] = 0x34,
[DSIM_PAYLOAD_REG] = 0x38,
[DSIM_RXFIFO_REG] = 0x3c,
[DSIM_FIFOCTRL_REG] = 0x44,
[DSIM_PLLCTRL_REG] = 0x4c,
[DSIM_PHYCTRL_REG] = 0x5c,
[DSIM_PHYTIMING_REG] = 0x64,
[DSIM_PHYTIMING1_REG] = 0x68,
[DSIM_PHYTIMING2_REG] = 0x6c,
};
static unsigned int exynos5433_reg_ofs[] = {
[DSIM_STATUS_REG] = 0x04,
[DSIM_SWRST_REG] = 0x0C,
[DSIM_CLKCTRL_REG] = 0x10,
[DSIM_TIMEOUT_REG] = 0x14,
[DSIM_CONFIG_REG] = 0x18,
[DSIM_ESCMODE_REG] = 0x1C,
[DSIM_MDRESOL_REG] = 0x20,
[DSIM_MVPORCH_REG] = 0x24,
[DSIM_MHPORCH_REG] = 0x28,
[DSIM_MSYNC_REG] = 0x2C,
[DSIM_INTSRC_REG] = 0x34,
[DSIM_INTMSK_REG] = 0x38,
[DSIM_PKTHDR_REG] = 0x3C,
[DSIM_PAYLOAD_REG] = 0x40,
[DSIM_RXFIFO_REG] = 0x44,
[DSIM_FIFOCTRL_REG] = 0x4C,
[DSIM_PLLCTRL_REG] = 0x94,
[DSIM_PHYCTRL_REG] = 0xA4,
[DSIM_PHYTIMING_REG] = 0xB4,
[DSIM_PHYTIMING1_REG] = 0xB8,
[DSIM_PHYTIMING2_REG] = 0xBC,
};
enum reg_value_idx {
RESET_TYPE,
PLL_TIMER,
STOP_STATE_CNT,
PHYCTRL_ULPS_EXIT,
PHYCTRL_VREG_LP,
PHYCTRL_SLEW_UP,
PHYTIMING_LPX,
PHYTIMING_HS_EXIT,
PHYTIMING_CLK_PREPARE,
PHYTIMING_CLK_ZERO,
PHYTIMING_CLK_POST,
PHYTIMING_CLK_TRAIL,
PHYTIMING_HS_PREPARE,
PHYTIMING_HS_ZERO,
PHYTIMING_HS_TRAIL
};
static unsigned int reg_values[] = {
[RESET_TYPE] = DSIM_SWRST,
[PLL_TIMER] = 500,
[STOP_STATE_CNT] = 0xf,
[PHYCTRL_ULPS_EXIT] = DSIM_PHYCTRL_ULPS_EXIT(0x0af),
[PHYCTRL_VREG_LP] = 0,
[PHYCTRL_SLEW_UP] = 0,
[PHYTIMING_LPX] = DSIM_PHYTIMING_LPX(0x06),
[PHYTIMING_HS_EXIT] = DSIM_PHYTIMING_HS_EXIT(0x0b),
[PHYTIMING_CLK_PREPARE] = DSIM_PHYTIMING1_CLK_PREPARE(0x07),
[PHYTIMING_CLK_ZERO] = DSIM_PHYTIMING1_CLK_ZERO(0x27),
[PHYTIMING_CLK_POST] = DSIM_PHYTIMING1_CLK_POST(0x0d),
[PHYTIMING_CLK_TRAIL] = DSIM_PHYTIMING1_CLK_TRAIL(0x08),
[PHYTIMING_HS_PREPARE] = DSIM_PHYTIMING2_HS_PREPARE(0x09),
[PHYTIMING_HS_ZERO] = DSIM_PHYTIMING2_HS_ZERO(0x0d),
[PHYTIMING_HS_TRAIL] = DSIM_PHYTIMING2_HS_TRAIL(0x0b),
};
static unsigned int exynos5433_reg_values[] = {
[RESET_TYPE] = DSIM_FUNCRST,
[PLL_TIMER] = 22200,
[STOP_STATE_CNT] = 0xa,
[PHYCTRL_ULPS_EXIT] = DSIM_PHYCTRL_ULPS_EXIT(0x190),
[PHYCTRL_VREG_LP] = DSIM_PHYCTRL_B_DPHYCTL_VREG_LP,
[PHYCTRL_SLEW_UP] = DSIM_PHYCTRL_B_DPHYCTL_SLEW_UP,
[PHYTIMING_LPX] = DSIM_PHYTIMING_LPX(0x07),
[PHYTIMING_HS_EXIT] = DSIM_PHYTIMING_HS_EXIT(0x0c),
[PHYTIMING_CLK_PREPARE] = DSIM_PHYTIMING1_CLK_PREPARE(0x09),
[PHYTIMING_CLK_ZERO] = DSIM_PHYTIMING1_CLK_ZERO(0x2d),
[PHYTIMING_CLK_POST] = DSIM_PHYTIMING1_CLK_POST(0x0e),
[PHYTIMING_CLK_TRAIL] = DSIM_PHYTIMING1_CLK_TRAIL(0x09),
[PHYTIMING_HS_PREPARE] = DSIM_PHYTIMING2_HS_PREPARE(0x0b),
[PHYTIMING_HS_ZERO] = DSIM_PHYTIMING2_HS_ZERO(0x10),
[PHYTIMING_HS_TRAIL] = DSIM_PHYTIMING2_HS_TRAIL(0x0c),
};
static struct exynos_dsi_driver_data exynos3_dsi_driver_data = {
.reg_ofs = exynos_reg_ofs,
.plltmr_reg = 0x50,
.has_freqband = 1,
.has_clklane_stop = 1,
.num_clks = 2,
.max_freq = 1000,
.wait_for_reset = 1,
.num_bits_resol = 11,
.reg_values = reg_values,
};
static struct exynos_dsi_driver_data exynos4_dsi_driver_data = {
.reg_ofs = exynos_reg_ofs,
.plltmr_reg = 0x50,
.has_freqband = 1,
.has_clklane_stop = 1,
.num_clks = 2,
.max_freq = 1000,
.wait_for_reset = 1,
.num_bits_resol = 11,
.reg_values = reg_values,
};
static struct exynos_dsi_driver_data exynos4415_dsi_driver_data = {
.reg_ofs = exynos_reg_ofs,
.plltmr_reg = 0x58,
.has_clklane_stop = 1,
.num_clks = 2,
.max_freq = 1000,
.wait_for_reset = 1,
.num_bits_resol = 11,
.reg_values = reg_values,
};
static struct exynos_dsi_driver_data exynos5_dsi_driver_data = {
.reg_ofs = exynos_reg_ofs,
.plltmr_reg = 0x58,
.num_clks = 2,
.max_freq = 1000,
.wait_for_reset = 1,
.num_bits_resol = 11,
.reg_values = reg_values,
};
static struct exynos_dsi_driver_data exynos5433_dsi_driver_data = {
.reg_ofs = exynos5433_reg_ofs,
.plltmr_reg = 0xa0,
.has_clklane_stop = 1,
.num_clks = 5,
.max_freq = 1500,
.wait_for_reset = 0,
.num_bits_resol = 12,
.reg_values = exynos5433_reg_values,
};
static struct of_device_id exynos_dsi_of_match[] = {
{ .compatible = "samsung,exynos3250-mipi-dsi",
.data = &exynos3_dsi_driver_data },
{ .compatible = "samsung,exynos4210-mipi-dsi",
.data = &exynos4_dsi_driver_data },
{ .compatible = "samsung,exynos4415-mipi-dsi",
.data = &exynos4415_dsi_driver_data },
{ .compatible = "samsung,exynos5410-mipi-dsi",
.data = &exynos5_dsi_driver_data },
{ .compatible = "samsung,exynos5433-mipi-dsi",
.data = &exynos5433_dsi_driver_data },
{ }
};
static inline struct exynos_dsi_driver_data *exynos_dsi_get_driver_data(
struct platform_device *pdev)
{
const struct of_device_id *of_id =
of_match_device(exynos_dsi_of_match, &pdev->dev);
return (struct exynos_dsi_driver_data *)of_id->data;
}
static void exynos_dsi_wait_for_reset(struct exynos_dsi *dsi)
{
if (wait_for_completion_timeout(&dsi->completed, msecs_to_jiffies(300)))
return;
dev_err(dsi->dev, "timeout waiting for reset\n");
}
static void exynos_dsi_reset(struct exynos_dsi *dsi)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
reinit_completion(&dsi->completed);
DSI_WRITE(dsi, DSIM_SWRST_REG, driver_data->reg_values[RESET_TYPE]);
}
#ifndef MHZ
#define MHZ (1000*1000)
#endif
static unsigned long exynos_dsi_pll_find_pms(struct exynos_dsi *dsi,
unsigned long fin, unsigned long fout, u8 *p, u16 *m, u8 *s)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
unsigned long best_freq = 0;
u32 min_delta = 0xffffffff;
u8 p_min, p_max;
u8 _p, uninitialized_var(best_p);
u16 _m, uninitialized_var(best_m);
u8 _s, uninitialized_var(best_s);
p_min = DIV_ROUND_UP(fin, (12 * MHZ));
p_max = fin / (6 * MHZ);
for (_p = p_min; _p <= p_max; ++_p) {
for (_s = 0; _s <= 5; ++_s) {
u64 tmp;
u32 delta;
tmp = (u64)fout * (_p << _s);
do_div(tmp, fin);
_m = tmp;
if (_m < 41 || _m > 125)
continue;
tmp = (u64)_m * fin;
do_div(tmp, _p);
if (tmp < 500 * MHZ ||
tmp > driver_data->max_freq * MHZ)
continue;
tmp = (u64)_m * fin;
do_div(tmp, _p << _s);
delta = abs(fout - tmp);
if (delta < min_delta) {
best_p = _p;
best_m = _m;
best_s = _s;
min_delta = delta;
best_freq = tmp;
}
}
}
if (best_freq) {
*p = best_p;
*m = best_m;
*s = best_s;
}
return best_freq;
}
static unsigned long exynos_dsi_set_pll(struct exynos_dsi *dsi,
unsigned long freq)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
unsigned long fin, fout;
int timeout;
u8 p, s;
u16 m;
u32 reg;
fin = dsi->pll_clk_rate;
fout = exynos_dsi_pll_find_pms(dsi, fin, freq, &p, &m, &s);
if (!fout) {
dev_err(dsi->dev,
"failed to find PLL PMS for requested frequency\n");
return 0;
}
dev_dbg(dsi->dev, "PLL freq %lu, (p %d, m %d, s %d)\n", fout, p, m, s);
writel(driver_data->reg_values[PLL_TIMER],
dsi->reg_base + driver_data->plltmr_reg);
reg = DSIM_PLL_EN | DSIM_PLL_P(p) | DSIM_PLL_M(m) | DSIM_PLL_S(s);
if (driver_data->has_freqband) {
static const unsigned long freq_bands[] = {
100 * MHZ, 120 * MHZ, 160 * MHZ, 200 * MHZ,
270 * MHZ, 320 * MHZ, 390 * MHZ, 450 * MHZ,
510 * MHZ, 560 * MHZ, 640 * MHZ, 690 * MHZ,
770 * MHZ, 870 * MHZ, 950 * MHZ,
};
int band;
for (band = 0; band < ARRAY_SIZE(freq_bands); ++band)
if (fout < freq_bands[band])
break;
dev_dbg(dsi->dev, "band %d\n", band);
reg |= DSIM_FREQ_BAND(band);
}
DSI_WRITE(dsi, DSIM_PLLCTRL_REG, reg);
timeout = 1000;
do {
if (timeout-- == 0) {
dev_err(dsi->dev, "PLL failed to stabilize\n");
return 0;
}
reg = DSI_READ(dsi, DSIM_STATUS_REG);
} while ((reg & DSIM_PLL_STABLE) == 0);
return fout;
}
static int exynos_dsi_enable_clock(struct exynos_dsi *dsi)
{
unsigned long hs_clk, byte_clk, esc_clk;
unsigned long esc_div;
u32 reg;
hs_clk = exynos_dsi_set_pll(dsi, dsi->burst_clk_rate);
if (!hs_clk) {
dev_err(dsi->dev, "failed to configure DSI PLL\n");
return -EFAULT;
}
byte_clk = hs_clk / 8;
esc_div = DIV_ROUND_UP(byte_clk, dsi->esc_clk_rate);
esc_clk = byte_clk / esc_div;
if (esc_clk > 20 * MHZ) {
++esc_div;
esc_clk = byte_clk / esc_div;
}
dev_dbg(dsi->dev, "hs_clk = %lu, byte_clk = %lu, esc_clk = %lu\n",
hs_clk, byte_clk, esc_clk);
reg = DSI_READ(dsi, DSIM_CLKCTRL_REG);
reg &= ~(DSIM_ESC_PRESCALER_MASK | DSIM_LANE_ESC_CLK_EN_CLK
| DSIM_LANE_ESC_CLK_EN_DATA_MASK | DSIM_PLL_BYPASS
| DSIM_BYTE_CLK_SRC_MASK);
reg |= DSIM_ESC_CLKEN | DSIM_BYTE_CLKEN
| DSIM_ESC_PRESCALER(esc_div)
| DSIM_LANE_ESC_CLK_EN_CLK
| DSIM_LANE_ESC_CLK_EN_DATA(BIT(dsi->lanes) - 1)
| DSIM_BYTE_CLK_SRC(0)
| DSIM_TX_REQUEST_HSCLK;
DSI_WRITE(dsi, DSIM_CLKCTRL_REG, reg);
return 0;
}
static void exynos_dsi_set_phy_ctrl(struct exynos_dsi *dsi)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
unsigned int *reg_values = driver_data->reg_values;
u32 reg;
if (driver_data->has_freqband)
return;
/* B D-PHY: D-PHY Master & Slave Analog Block control */
reg = reg_values[PHYCTRL_ULPS_EXIT] | reg_values[PHYCTRL_VREG_LP] |
reg_values[PHYCTRL_SLEW_UP];
DSI_WRITE(dsi, DSIM_PHYCTRL_REG, reg);
/*
* T LPX: Transmitted length of any Low-Power state period
* T HS-EXIT: Time that the transmitter drives LP-11 following a HS
* burst
*/
reg = reg_values[PHYTIMING_LPX] | reg_values[PHYTIMING_HS_EXIT];
DSI_WRITE(dsi, DSIM_PHYTIMING_REG, reg);
/*
* T CLK-PREPARE: Time that the transmitter drives the Clock Lane LP-00
* Line state immediately before the HS-0 Line state starting the
* HS transmission
* T CLK-ZERO: Time that the transmitter drives the HS-0 state prior to
* transmitting the Clock.
* T CLK_POST: Time that the transmitter continues to send HS clock
* after the last associated Data Lane has transitioned to LP Mode
* Interval is defined as the period from the end of T HS-TRAIL to
* the beginning of T CLK-TRAIL
* T CLK-TRAIL: Time that the transmitter drives the HS-0 state after
* the last payload clock bit of a HS transmission burst
*/
reg = reg_values[PHYTIMING_CLK_PREPARE] |
reg_values[PHYTIMING_CLK_ZERO] |
reg_values[PHYTIMING_CLK_POST] |
reg_values[PHYTIMING_CLK_TRAIL];
DSI_WRITE(dsi, DSIM_PHYTIMING1_REG, reg);
/*
* T HS-PREPARE: Time that the transmitter drives the Data Lane LP-00
* Line state immediately before the HS-0 Line state starting the
* HS transmission
* T HS-ZERO: Time that the transmitter drives the HS-0 state prior to
* transmitting the Sync sequence.
* T HS-TRAIL: Time that the transmitter drives the flipped differential
* state after last payload data bit of a HS transmission burst
*/
reg = reg_values[PHYTIMING_HS_PREPARE] | reg_values[PHYTIMING_HS_ZERO] |
reg_values[PHYTIMING_HS_TRAIL];
DSI_WRITE(dsi, DSIM_PHYTIMING2_REG, reg);
}
static void exynos_dsi_disable_clock(struct exynos_dsi *dsi)
{
u32 reg;
reg = DSI_READ(dsi, DSIM_CLKCTRL_REG);
reg &= ~(DSIM_LANE_ESC_CLK_EN_CLK | DSIM_LANE_ESC_CLK_EN_DATA_MASK
| DSIM_ESC_CLKEN | DSIM_BYTE_CLKEN);
DSI_WRITE(dsi, DSIM_CLKCTRL_REG, reg);
reg = DSI_READ(dsi, DSIM_PLLCTRL_REG);
reg &= ~DSIM_PLL_EN;
DSI_WRITE(dsi, DSIM_PLLCTRL_REG, reg);
}
static void exynos_dsi_enable_lane(struct exynos_dsi *dsi, u32 lane)
{
u32 reg = DSI_READ(dsi, DSIM_CONFIG_REG);
reg |= (DSIM_NUM_OF_DATA_LANE(dsi->lanes - 1) | DSIM_LANE_EN_CLK |
DSIM_LANE_EN(lane));
DSI_WRITE(dsi, DSIM_CONFIG_REG, reg);
}
static int exynos_dsi_init_link(struct exynos_dsi *dsi)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
int timeout;
u32 reg;
u32 lanes_mask;
/* Initialize FIFO pointers */
reg = DSI_READ(dsi, DSIM_FIFOCTRL_REG);
reg &= ~0x1f;
DSI_WRITE(dsi, DSIM_FIFOCTRL_REG, reg);
usleep_range(9000, 11000);
reg |= 0x1f;
DSI_WRITE(dsi, DSIM_FIFOCTRL_REG, reg);
usleep_range(9000, 11000);
/* DSI configuration */
reg = 0;
/*
* The first bit of mode_flags specifies display configuration.
* If this bit is set[= MIPI_DSI_MODE_VIDEO], dsi will support video
* mode, otherwise it will support command mode.
*/
if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
reg |= DSIM_VIDEO_MODE;
/*
* The user manual describes that following bits are ignored in
* command mode.
*/
if (!(dsi->mode_flags & MIPI_DSI_MODE_VSYNC_FLUSH))
reg |= DSIM_MFLUSH_VS;
if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
reg |= DSIM_SYNC_INFORM;
if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
reg |= DSIM_BURST_MODE;
if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_AUTO_VERT)
reg |= DSIM_AUTO_MODE;
if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HSE)
reg |= DSIM_HSE_MODE;
if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HFP))
reg |= DSIM_HFP_MODE;
if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HBP))
reg |= DSIM_HBP_MODE;
if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HSA))
reg |= DSIM_HSA_MODE;
}
if (!(dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET))
reg |= DSIM_EOT_DISABLE;
switch (dsi->format) {
case MIPI_DSI_FMT_RGB888:
reg |= DSIM_MAIN_PIX_FORMAT_RGB888;
break;
case MIPI_DSI_FMT_RGB666:
reg |= DSIM_MAIN_PIX_FORMAT_RGB666;
break;
case MIPI_DSI_FMT_RGB666_PACKED:
reg |= DSIM_MAIN_PIX_FORMAT_RGB666_P;
break;
case MIPI_DSI_FMT_RGB565:
reg |= DSIM_MAIN_PIX_FORMAT_RGB565;
break;
default:
dev_err(dsi->dev, "invalid pixel format\n");
return -EINVAL;
}
/*
* Use non-continuous clock mode if the periparal wants and
* host controller supports
*
* In non-continous clock mode, host controller will turn off
* the HS clock between high-speed transmissions to reduce
* power consumption.
*/
if (driver_data->has_clklane_stop &&
dsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) {
reg |= DSIM_CLKLANE_STOP;
}
DSI_WRITE(dsi, DSIM_CONFIG_REG, reg);
lanes_mask = BIT(dsi->lanes) - 1;
exynos_dsi_enable_lane(dsi, lanes_mask);
/* Check clock and data lane state are stop state */
timeout = 100;
do {
if (timeout-- == 0) {
dev_err(dsi->dev, "waiting for bus lanes timed out\n");
return -EFAULT;
}
reg = DSI_READ(dsi, DSIM_STATUS_REG);
if ((reg & DSIM_STOP_STATE_DAT(lanes_mask))
!= DSIM_STOP_STATE_DAT(lanes_mask))
continue;
} while (!(reg & (DSIM_STOP_STATE_CLK | DSIM_TX_READY_HS_CLK)));
reg = DSI_READ(dsi, DSIM_ESCMODE_REG);
reg &= ~DSIM_STOP_STATE_CNT_MASK;
reg |= DSIM_STOP_STATE_CNT(driver_data->reg_values[STOP_STATE_CNT]);
DSI_WRITE(dsi, DSIM_ESCMODE_REG, reg);
reg = DSIM_BTA_TIMEOUT(0xff) | DSIM_LPDR_TIMEOUT(0xffff);
DSI_WRITE(dsi, DSIM_TIMEOUT_REG, reg);
return 0;
}
static void exynos_dsi_set_display_mode(struct exynos_dsi *dsi)
{
struct videomode *vm = &dsi->vm;
unsigned int num_bits_resol = dsi->driver_data->num_bits_resol;
u32 reg;
if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
reg = DSIM_CMD_ALLOW(0xf)
| DSIM_STABLE_VFP(vm->vfront_porch)
| DSIM_MAIN_VBP(vm->vback_porch);
DSI_WRITE(dsi, DSIM_MVPORCH_REG, reg);
reg = DSIM_MAIN_HFP(vm->hfront_porch)
| DSIM_MAIN_HBP(vm->hback_porch);
DSI_WRITE(dsi, DSIM_MHPORCH_REG, reg);
reg = DSIM_MAIN_VSA(vm->vsync_len)
| DSIM_MAIN_HSA(vm->hsync_len);
DSI_WRITE(dsi, DSIM_MSYNC_REG, reg);
}
reg = DSIM_MAIN_HRESOL(vm->hactive, num_bits_resol) |
DSIM_MAIN_VRESOL(vm->vactive, num_bits_resol);
DSI_WRITE(dsi, DSIM_MDRESOL_REG, reg);
dev_dbg(dsi->dev, "LCD size = %dx%d\n", vm->hactive, vm->vactive);
}
static void exynos_dsi_set_display_enable(struct exynos_dsi *dsi, bool enable)
{
u32 reg;
reg = DSI_READ(dsi, DSIM_MDRESOL_REG);
if (enable)
reg |= DSIM_MAIN_STAND_BY;
else
reg &= ~DSIM_MAIN_STAND_BY;
DSI_WRITE(dsi, DSIM_MDRESOL_REG, reg);
}
static int exynos_dsi_wait_for_hdr_fifo(struct exynos_dsi *dsi)
{
int timeout = 2000;
do {
u32 reg = DSI_READ(dsi, DSIM_FIFOCTRL_REG);
if (!(reg & DSIM_SFR_HEADER_FULL))
return 0;
if (!cond_resched())
usleep_range(950, 1050);
} while (--timeout);
return -ETIMEDOUT;
}
static void exynos_dsi_set_cmd_lpm(struct exynos_dsi *dsi, bool lpm)
{
u32 v = DSI_READ(dsi, DSIM_ESCMODE_REG);
if (lpm)
v |= DSIM_CMD_LPDT_LP;
else
v &= ~DSIM_CMD_LPDT_LP;
DSI_WRITE(dsi, DSIM_ESCMODE_REG, v);
}
static void exynos_dsi_force_bta(struct exynos_dsi *dsi)
{
u32 v = DSI_READ(dsi, DSIM_ESCMODE_REG);
v |= DSIM_FORCE_BTA;
DSI_WRITE(dsi, DSIM_ESCMODE_REG, v);
}
static void exynos_dsi_send_to_fifo(struct exynos_dsi *dsi,
struct exynos_dsi_transfer *xfer)
{
struct device *dev = dsi->dev;
const u8 *payload = xfer->tx_payload + xfer->tx_done;
u16 length = xfer->tx_len - xfer->tx_done;
bool first = !xfer->tx_done;
u32 reg;
dev_dbg(dev, "< xfer %p: tx len %u, done %u, rx len %u, done %u\n",
xfer, xfer->tx_len, xfer->tx_done, xfer->rx_len, xfer->rx_done);
if (length > DSI_TX_FIFO_SIZE)
length = DSI_TX_FIFO_SIZE;
xfer->tx_done += length;
/* Send payload */
while (length >= 4) {
reg = (payload[3] << 24) | (payload[2] << 16)
| (payload[1] << 8) | payload[0];
DSI_WRITE(dsi, DSIM_PAYLOAD_REG, reg);
payload += 4;
length -= 4;
}
reg = 0;
switch (length) {
case 3:
reg |= payload[2] << 16;
/* Fall through */
case 2:
reg |= payload[1] << 8;
/* Fall through */
case 1:
reg |= payload[0];
DSI_WRITE(dsi, DSIM_PAYLOAD_REG, reg);
break;
case 0:
/* Do nothing */
break;
}
/* Send packet header */
if (!first)
return;
reg = (xfer->data[1] << 16) | (xfer->data[0] << 8) | xfer->data_id;
if (exynos_dsi_wait_for_hdr_fifo(dsi)) {
dev_err(dev, "waiting for header FIFO timed out\n");
return;
}
if (NEQV(xfer->flags & MIPI_DSI_MSG_USE_LPM,
dsi->state & DSIM_STATE_CMD_LPM)) {
exynos_dsi_set_cmd_lpm(dsi, xfer->flags & MIPI_DSI_MSG_USE_LPM);
dsi->state ^= DSIM_STATE_CMD_LPM;
}
DSI_WRITE(dsi, DSIM_PKTHDR_REG, reg);
if (xfer->flags & MIPI_DSI_MSG_REQ_ACK)
exynos_dsi_force_bta(dsi);
}
static void exynos_dsi_read_from_fifo(struct exynos_dsi *dsi,
struct exynos_dsi_transfer *xfer)
{
u8 *payload = xfer->rx_payload + xfer->rx_done;
bool first = !xfer->rx_done;
struct device *dev = dsi->dev;
u16 length;
u32 reg;
if (first) {
reg = DSI_READ(dsi, DSIM_RXFIFO_REG);
switch (reg & 0x3f) {
case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE:
case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE:
if (xfer->rx_len >= 2) {
payload[1] = reg >> 16;
++xfer->rx_done;
}
/* Fall through */
case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE:
case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE:
payload[0] = reg >> 8;
++xfer->rx_done;
xfer->rx_len = xfer->rx_done;
xfer->result = 0;
goto clear_fifo;
case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT:
dev_err(dev, "DSI Error Report: 0x%04x\n",
(reg >> 8) & 0xffff);
xfer->result = 0;
goto clear_fifo;
}
length = (reg >> 8) & 0xffff;
if (length > xfer->rx_len) {
dev_err(dev,
"response too long (%u > %u bytes), stripping\n",
xfer->rx_len, length);
length = xfer->rx_len;
} else if (length < xfer->rx_len)
xfer->rx_len = length;
}
length = xfer->rx_len - xfer->rx_done;
xfer->rx_done += length;
/* Receive payload */
while (length >= 4) {
reg = DSI_READ(dsi, DSIM_RXFIFO_REG);
payload[0] = (reg >> 0) & 0xff;
payload[1] = (reg >> 8) & 0xff;
payload[2] = (reg >> 16) & 0xff;
payload[3] = (reg >> 24) & 0xff;
payload += 4;
length -= 4;
}
if (length) {
reg = DSI_READ(dsi, DSIM_RXFIFO_REG);
switch (length) {
case 3:
payload[2] = (reg >> 16) & 0xff;
/* Fall through */
case 2:
payload[1] = (reg >> 8) & 0xff;
/* Fall through */
case 1:
payload[0] = reg & 0xff;
}
}
if (xfer->rx_done == xfer->rx_len)
xfer->result = 0;
clear_fifo:
length = DSI_RX_FIFO_SIZE / 4;
do {
reg = DSI_READ(dsi, DSIM_RXFIFO_REG);
if (reg == DSI_RX_FIFO_EMPTY)
break;
} while (--length);
}
static void exynos_dsi_transfer_start(struct exynos_dsi *dsi)
{
unsigned long flags;
struct exynos_dsi_transfer *xfer;
bool start = false;
again:
spin_lock_irqsave(&dsi->transfer_lock, flags);
if (list_empty(&dsi->transfer_list)) {
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
return;
}
xfer = list_first_entry(&dsi->transfer_list,
struct exynos_dsi_transfer, list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
if (xfer->tx_len && xfer->tx_done == xfer->tx_len)
/* waiting for RX */
return;
exynos_dsi_send_to_fifo(dsi, xfer);
if (xfer->tx_len || xfer->rx_len)
return;
xfer->result = 0;
complete(&xfer->completed);
spin_lock_irqsave(&dsi->transfer_lock, flags);
list_del_init(&xfer->list);
start = !list_empty(&dsi->transfer_list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
if (start)
goto again;
}
static bool exynos_dsi_transfer_finish(struct exynos_dsi *dsi)
{
struct exynos_dsi_transfer *xfer;
unsigned long flags;
bool start = true;
spin_lock_irqsave(&dsi->transfer_lock, flags);
if (list_empty(&dsi->transfer_list)) {
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
return false;
}
xfer = list_first_entry(&dsi->transfer_list,
struct exynos_dsi_transfer, list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
dev_dbg(dsi->dev,
"> xfer %p, tx_len %u, tx_done %u, rx_len %u, rx_done %u\n",
xfer, xfer->tx_len, xfer->tx_done, xfer->rx_len, xfer->rx_done);
if (xfer->tx_done != xfer->tx_len)
return true;
if (xfer->rx_done != xfer->rx_len)
exynos_dsi_read_from_fifo(dsi, xfer);
if (xfer->rx_done != xfer->rx_len)
return true;
spin_lock_irqsave(&dsi->transfer_lock, flags);
list_del_init(&xfer->list);
start = !list_empty(&dsi->transfer_list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
if (!xfer->rx_len)
xfer->result = 0;
complete(&xfer->completed);
return start;
}
static void exynos_dsi_remove_transfer(struct exynos_dsi *dsi,
struct exynos_dsi_transfer *xfer)
{
unsigned long flags;
bool start;
spin_lock_irqsave(&dsi->transfer_lock, flags);
if (!list_empty(&dsi->transfer_list) &&
xfer == list_first_entry(&dsi->transfer_list,
struct exynos_dsi_transfer, list)) {
list_del_init(&xfer->list);
start = !list_empty(&dsi->transfer_list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
if (start)
exynos_dsi_transfer_start(dsi);
return;
}
list_del_init(&xfer->list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
}
static int exynos_dsi_transfer(struct exynos_dsi *dsi,
struct exynos_dsi_transfer *xfer)
{
unsigned long flags;
bool stopped;
xfer->tx_done = 0;
xfer->rx_done = 0;
xfer->result = -ETIMEDOUT;
init_completion(&xfer->completed);
spin_lock_irqsave(&dsi->transfer_lock, flags);
stopped = list_empty(&dsi->transfer_list);
list_add_tail(&xfer->list, &dsi->transfer_list);
spin_unlock_irqrestore(&dsi->transfer_lock, flags);
if (stopped)
exynos_dsi_transfer_start(dsi);
wait_for_completion_timeout(&xfer->completed,
msecs_to_jiffies(DSI_XFER_TIMEOUT_MS));
if (xfer->result == -ETIMEDOUT) {
exynos_dsi_remove_transfer(dsi, xfer);
dev_err(dsi->dev, "xfer timed out: %*ph %*ph\n", 2, xfer->data,
xfer->tx_len, xfer->tx_payload);
return -ETIMEDOUT;
}
/* Also covers hardware timeout condition */
return xfer->result;
}
static irqreturn_t exynos_dsi_irq(int irq, void *dev_id)
{
struct exynos_dsi *dsi = dev_id;
u32 status;
status = DSI_READ(dsi, DSIM_INTSRC_REG);
if (!status) {
static unsigned long int j;
if (printk_timed_ratelimit(&j, 500))
dev_warn(dsi->dev, "spurious interrupt\n");
return IRQ_HANDLED;
}
DSI_WRITE(dsi, DSIM_INTSRC_REG, status);
if (status & DSIM_INT_SW_RST_RELEASE) {
u32 mask = ~(DSIM_INT_RX_DONE | DSIM_INT_SFR_FIFO_EMPTY |
DSIM_INT_SFR_HDR_FIFO_EMPTY | DSIM_INT_FRAME_DONE |
DSIM_INT_RX_ECC_ERR | DSIM_INT_SW_RST_RELEASE);
DSI_WRITE(dsi, DSIM_INTMSK_REG, mask);
complete(&dsi->completed);
return IRQ_HANDLED;
}
if (!(status & (DSIM_INT_RX_DONE | DSIM_INT_SFR_FIFO_EMPTY |
DSIM_INT_FRAME_DONE | DSIM_INT_PLL_STABLE)))
return IRQ_HANDLED;
if (exynos_dsi_transfer_finish(dsi))
exynos_dsi_transfer_start(dsi);
return IRQ_HANDLED;
}
static irqreturn_t exynos_dsi_te_irq_handler(int irq, void *dev_id)
{
struct exynos_dsi *dsi = (struct exynos_dsi *)dev_id;
struct drm_encoder *encoder = &dsi->encoder;
if (dsi->state & DSIM_STATE_VIDOUT_AVAILABLE)
exynos_drm_crtc_te_handler(encoder->crtc);
return IRQ_HANDLED;
}
static void exynos_dsi_enable_irq(struct exynos_dsi *dsi)
{
enable_irq(dsi->irq);
if (gpio_is_valid(dsi->te_gpio))
enable_irq(gpio_to_irq(dsi->te_gpio));
}
static void exynos_dsi_disable_irq(struct exynos_dsi *dsi)
{
if (gpio_is_valid(dsi->te_gpio))
disable_irq(gpio_to_irq(dsi->te_gpio));
disable_irq(dsi->irq);
}
static int exynos_dsi_init(struct exynos_dsi *dsi)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
exynos_dsi_reset(dsi);
exynos_dsi_enable_irq(dsi);
if (driver_data->reg_values[RESET_TYPE] == DSIM_FUNCRST)
exynos_dsi_enable_lane(dsi, BIT(dsi->lanes) - 1);
exynos_dsi_enable_clock(dsi);
if (driver_data->wait_for_reset)
exynos_dsi_wait_for_reset(dsi);
exynos_dsi_set_phy_ctrl(dsi);
exynos_dsi_init_link(dsi);
return 0;
}
static int exynos_dsi_register_te_irq(struct exynos_dsi *dsi)
{
int ret;
int te_gpio_irq;
dsi->te_gpio = of_get_named_gpio(dsi->panel_node, "te-gpios", 0);
if (!gpio_is_valid(dsi->te_gpio)) {
dev_err(dsi->dev, "no te-gpios specified\n");
ret = dsi->te_gpio;
goto out;
}
ret = gpio_request(dsi->te_gpio, "te_gpio");
if (ret) {
dev_err(dsi->dev, "gpio request failed with %d\n", ret);
goto out;
}
te_gpio_irq = gpio_to_irq(dsi->te_gpio);
irq_set_status_flags(te_gpio_irq, IRQ_NOAUTOEN);
ret = request_threaded_irq(te_gpio_irq, exynos_dsi_te_irq_handler, NULL,
IRQF_TRIGGER_RISING, "TE", dsi);
if (ret) {
dev_err(dsi->dev, "request interrupt failed with %d\n", ret);
gpio_free(dsi->te_gpio);
goto out;
}
out:
return ret;
}
static void exynos_dsi_unregister_te_irq(struct exynos_dsi *dsi)
{
if (gpio_is_valid(dsi->te_gpio)) {
free_irq(gpio_to_irq(dsi->te_gpio), dsi);
gpio_free(dsi->te_gpio);
dsi->te_gpio = -ENOENT;
}
}
static int exynos_dsi_host_attach(struct mipi_dsi_host *host,
struct mipi_dsi_device *device)
{
struct exynos_dsi *dsi = host_to_dsi(host);
dsi->lanes = device->lanes;
dsi->format = device->format;
dsi->mode_flags = device->mode_flags;
dsi->panel_node = device->dev.of_node;
/*
* This is a temporary solution and should be made by more generic way.
*
* If attached panel device is for command mode one, dsi should register
* TE interrupt handler.
*/
if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO)) {
int ret = exynos_dsi_register_te_irq(dsi);
if (ret)
return ret;
}
if (dsi->connector.dev)
drm_helper_hpd_irq_event(dsi->connector.dev);
return 0;
}
static int exynos_dsi_host_detach(struct mipi_dsi_host *host,
struct mipi_dsi_device *device)
{
struct exynos_dsi *dsi = host_to_dsi(host);
exynos_dsi_unregister_te_irq(dsi);
dsi->panel_node = NULL;
if (dsi->connector.dev)
drm_helper_hpd_irq_event(dsi->connector.dev);
return 0;
}
/* distinguish between short and long DSI packet types */
static bool exynos_dsi_is_short_dsi_type(u8 type)
{
return (type & 0x0f) <= 8;
}
static ssize_t exynos_dsi_host_transfer(struct mipi_dsi_host *host,
const struct mipi_dsi_msg *msg)
{
struct exynos_dsi *dsi = host_to_dsi(host);
struct exynos_dsi_transfer xfer;
int ret;
if (!(dsi->state & DSIM_STATE_ENABLED))
return -EINVAL;
if (!(dsi->state & DSIM_STATE_INITIALIZED)) {
ret = exynos_dsi_init(dsi);
if (ret)
return ret;
dsi->state |= DSIM_STATE_INITIALIZED;
}
if (msg->tx_len == 0)
return -EINVAL;
xfer.data_id = msg->type | (msg->channel << 6);
if (exynos_dsi_is_short_dsi_type(msg->type)) {
const char *tx_buf = msg->tx_buf;
if (msg->tx_len > 2)
return -EINVAL;
xfer.tx_len = 0;
xfer.data[0] = tx_buf[0];
xfer.data[1] = (msg->tx_len == 2) ? tx_buf[1] : 0;
} else {
xfer.tx_len = msg->tx_len;
xfer.data[0] = msg->tx_len & 0xff;
xfer.data[1] = msg->tx_len >> 8;
xfer.tx_payload = msg->tx_buf;
}
xfer.rx_len = msg->rx_len;
xfer.rx_payload = msg->rx_buf;
xfer.flags = msg->flags;
ret = exynos_dsi_transfer(dsi, &xfer);
return (ret < 0) ? ret : xfer.rx_done;
}
static const struct mipi_dsi_host_ops exynos_dsi_ops = {
.attach = exynos_dsi_host_attach,
.detach = exynos_dsi_host_detach,
.transfer = exynos_dsi_host_transfer,
};
static int exynos_dsi_poweron(struct exynos_dsi *dsi)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
int ret, i;
ret = regulator_bulk_enable(ARRAY_SIZE(dsi->supplies), dsi->supplies);
if (ret < 0) {
dev_err(dsi->dev, "cannot enable regulators %d\n", ret);
return ret;
}
for (i = 0; i < driver_data->num_clks; i++) {
ret = clk_prepare_enable(dsi->clks[i]);
if (ret < 0)
goto err_clk;
}
ret = phy_power_on(dsi->phy);
if (ret < 0) {
dev_err(dsi->dev, "cannot enable phy %d\n", ret);
goto err_clk;
}
return 0;
err_clk:
while (--i > -1)
clk_disable_unprepare(dsi->clks[i]);
regulator_bulk_disable(ARRAY_SIZE(dsi->supplies), dsi->supplies);
return ret;
}
static void exynos_dsi_poweroff(struct exynos_dsi *dsi)
{
struct exynos_dsi_driver_data *driver_data = dsi->driver_data;
int ret, i;
usleep_range(10000, 20000);
if (dsi->state & DSIM_STATE_INITIALIZED) {
dsi->state &= ~DSIM_STATE_INITIALIZED;
exynos_dsi_disable_clock(dsi);
exynos_dsi_disable_irq(dsi);
}
dsi->state &= ~DSIM_STATE_CMD_LPM;
phy_power_off(dsi->phy);
for (i = driver_data->num_clks - 1; i > -1; i--)
clk_disable_unprepare(dsi->clks[i]);
ret = regulator_bulk_disable(ARRAY_SIZE(dsi->supplies), dsi->supplies);
if (ret < 0)
dev_err(dsi->dev, "cannot disable regulators %d\n", ret);
}
static void exynos_dsi_enable(struct drm_encoder *encoder)
{
struct exynos_dsi *dsi = encoder_to_dsi(encoder);
int ret;
if (dsi->state & DSIM_STATE_ENABLED)
return;
ret = exynos_dsi_poweron(dsi);
if (ret < 0)
return;
dsi->state |= DSIM_STATE_ENABLED;
ret = drm_panel_prepare(dsi->panel);
if (ret < 0) {
dsi->state &= ~DSIM_STATE_ENABLED;
exynos_dsi_poweroff(dsi);
return;
}
exynos_dsi_set_display_mode(dsi);
exynos_dsi_set_display_enable(dsi, true);
ret = drm_panel_enable(dsi->panel);
if (ret < 0) {
dsi->state &= ~DSIM_STATE_ENABLED;
exynos_dsi_set_display_enable(dsi, false);
drm_panel_unprepare(dsi->panel);
exynos_dsi_poweroff(dsi);
return;
}
dsi->state |= DSIM_STATE_VIDOUT_AVAILABLE;
}
static void exynos_dsi_disable(struct drm_encoder *encoder)
{
struct exynos_dsi *dsi = encoder_to_dsi(encoder);
if (!(dsi->state & DSIM_STATE_ENABLED))
return;
dsi->state &= ~DSIM_STATE_VIDOUT_AVAILABLE;
drm_panel_disable(dsi->panel);
exynos_dsi_set_display_enable(dsi, false);
drm_panel_unprepare(dsi->panel);
dsi->state &= ~DSIM_STATE_ENABLED;
exynos_dsi_poweroff(dsi);
}
static enum drm_connector_status
exynos_dsi_detect(struct drm_connector *connector, bool force)
{
struct exynos_dsi *dsi = connector_to_dsi(connector);
if (!dsi->panel) {
dsi->panel = of_drm_find_panel(dsi->panel_node);
if (dsi->panel)
drm_panel_attach(dsi->panel, &dsi->connector);
} else if (!dsi->panel_node) {
struct drm_encoder *encoder;
encoder = platform_get_drvdata(to_platform_device(dsi->dev));
exynos_dsi_disable(encoder);
drm_panel_detach(dsi->panel);
dsi->panel = NULL;
}
if (dsi->panel)
return connector_status_connected;
return connector_status_disconnected;
}
static void exynos_dsi_connector_destroy(struct drm_connector *connector)
{
drm_connector_unregister(connector);
drm_connector_cleanup(connector);
connector->dev = NULL;
}
static struct drm_connector_funcs exynos_dsi_connector_funcs = {
.dpms = drm_atomic_helper_connector_dpms,
.detect = exynos_dsi_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
.destroy = exynos_dsi_connector_destroy,
.reset = drm_atomic_helper_connector_reset,
.atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
.atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
};
static int exynos_dsi_get_modes(struct drm_connector *connector)
{
struct exynos_dsi *dsi = connector_to_dsi(connector);
if (dsi->panel)
return dsi->panel->funcs->get_modes(dsi->panel);
return 0;
}
static struct drm_encoder *
exynos_dsi_best_encoder(struct drm_connector *connector)
{
struct exynos_dsi *dsi = connector_to_dsi(connector);
return &dsi->encoder;
}
static struct drm_connector_helper_funcs exynos_dsi_connector_helper_funcs = {
.get_modes = exynos_dsi_get_modes,
.best_encoder = exynos_dsi_best_encoder,
};
static int exynos_dsi_create_connector(struct drm_encoder *encoder)
{
struct exynos_dsi *dsi = encoder_to_dsi(encoder);
struct drm_connector *connector = &dsi->connector;
int ret;
connector->polled = DRM_CONNECTOR_POLL_HPD;
ret = drm_connector_init(encoder->dev, connector,
&exynos_dsi_connector_funcs,
DRM_MODE_CONNECTOR_DSI);
if (ret) {
DRM_ERROR("Failed to initialize connector with drm\n");
return ret;
}
drm_connector_helper_add(connector, &exynos_dsi_connector_helper_funcs);
drm_connector_register(connector);
drm_mode_connector_attach_encoder(connector, encoder);
return 0;
}
static bool exynos_dsi_mode_fixup(struct drm_encoder *encoder,
const struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
{
return true;
}
static void exynos_dsi_mode_set(struct drm_encoder *encoder,
struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
{
struct exynos_dsi *dsi = encoder_to_dsi(encoder);
struct videomode *vm = &dsi->vm;
struct drm_display_mode *m = adjusted_mode;
vm->hactive = m->hdisplay;
vm->vactive = m->vdisplay;
vm->vfront_porch = m->vsync_start - m->vdisplay;
vm->vback_porch = m->vtotal - m->vsync_end;
vm->vsync_len = m->vsync_end - m->vsync_start;
vm->hfront_porch = m->hsync_start - m->hdisplay;
vm->hback_porch = m->htotal - m->hsync_end;
vm->hsync_len = m->hsync_end - m->hsync_start;
}
static struct drm_encoder_helper_funcs exynos_dsi_encoder_helper_funcs = {
.mode_fixup = exynos_dsi_mode_fixup,
.mode_set = exynos_dsi_mode_set,
.enable = exynos_dsi_enable,
.disable = exynos_dsi_disable,
};
static struct drm_encoder_funcs exynos_dsi_encoder_funcs = {
.destroy = drm_encoder_cleanup,
};
MODULE_DEVICE_TABLE(of, exynos_dsi_of_match);
/* of_* functions will be removed after merge of of_graph patches */
static struct device_node *
of_get_child_by_name_reg(struct device_node *parent, const char *name, u32 reg)
{
struct device_node *np;
for_each_child_of_node(parent, np) {
u32 r;
if (!np->name || of_node_cmp(np->name, name))
continue;
if (of_property_read_u32(np, "reg", &r) < 0)
r = 0;
if (reg == r)
break;
}
return np;
}
static struct device_node *of_graph_get_port_by_reg(struct device_node *parent,
u32 reg)
{
struct device_node *ports, *port;
ports = of_get_child_by_name(parent, "ports");
if (ports)
parent = ports;
port = of_get_child_by_name_reg(parent, "port", reg);
of_node_put(ports);
return port;
}
static struct device_node *
of_graph_get_endpoint_by_reg(struct device_node *port, u32 reg)
{
return of_get_child_by_name_reg(port, "endpoint", reg);
}
static int exynos_dsi_of_read_u32(const struct device_node *np,
const char *propname, u32 *out_value)
{
int ret = of_property_read_u32(np, propname, out_value);
if (ret < 0)
pr_err("%s: failed to get '%s' property\n", np->full_name,
propname);
return ret;
}
enum {
DSI_PORT_IN,
DSI_PORT_OUT
};
static int exynos_dsi_parse_dt(struct exynos_dsi *dsi)
{
struct device *dev = dsi->dev;
struct device_node *node = dev->of_node;
struct device_node *port, *ep;
int ret;
ret = exynos_dsi_of_read_u32(node, "samsung,pll-clock-frequency",
&dsi->pll_clk_rate);
if (ret < 0)
return ret;
port = of_graph_get_port_by_reg(node, DSI_PORT_OUT);
if (!port) {
dev_err(dev, "no output port specified\n");
return -EINVAL;
}
ep = of_graph_get_endpoint_by_reg(port, 0);
of_node_put(port);
if (!ep) {
dev_err(dev, "no endpoint specified in output port\n");
return -EINVAL;
}
ret = exynos_dsi_of_read_u32(ep, "samsung,burst-clock-frequency",
&dsi->burst_clk_rate);
if (ret < 0)
goto end;
ret = exynos_dsi_of_read_u32(ep, "samsung,esc-clock-frequency",
&dsi->esc_clk_rate);
if (ret < 0)
goto end;
of_node_put(ep);
ep = of_graph_get_next_endpoint(node, NULL);
if (!ep) {
ret = -ENXIO;
goto end;
}
dsi->bridge_node = of_graph_get_remote_port_parent(ep);
if (!dsi->bridge_node) {
ret = -ENXIO;
goto end;
}
end:
of_node_put(ep);
return ret;
}
static int exynos_dsi_bind(struct device *dev, struct device *master,
void *data)
{
struct drm_encoder *encoder = dev_get_drvdata(dev);
struct exynos_dsi *dsi = encoder_to_dsi(encoder);
struct drm_device *drm_dev = data;
struct drm_bridge *bridge;
int ret;
ret = exynos_drm_crtc_get_pipe_from_type(drm_dev,
EXYNOS_DISPLAY_TYPE_LCD);
if (ret < 0)
return ret;
encoder->possible_crtcs = 1 << ret;
DRM_DEBUG_KMS("possible_crtcs = 0x%x\n", encoder->possible_crtcs);
drm_encoder_init(drm_dev, encoder, &exynos_dsi_encoder_funcs,
DRM_MODE_ENCODER_TMDS);
drm_encoder_helper_add(encoder, &exynos_dsi_encoder_helper_funcs);
ret = exynos_dsi_create_connector(encoder);
if (ret) {
DRM_ERROR("failed to create connector ret = %d\n", ret);
drm_encoder_cleanup(encoder);
return ret;
}
bridge = of_drm_find_bridge(dsi->bridge_node);
if (bridge) {
drm_bridge_attach(drm_dev, bridge);
}
return mipi_dsi_host_register(&dsi->dsi_host);
}
static void exynos_dsi_unbind(struct device *dev, struct device *master,
void *data)
{
struct drm_encoder *encoder = dev_get_drvdata(dev);
struct exynos_dsi *dsi = encoder_to_dsi(encoder);
exynos_dsi_disable(encoder);
mipi_dsi_host_unregister(&dsi->dsi_host);
}
static const struct component_ops exynos_dsi_component_ops = {
.bind = exynos_dsi_bind,
.unbind = exynos_dsi_unbind,
};
static int exynos_dsi_probe(struct platform_device *pdev)
{
struct device *dev = &pdev->dev;
struct resource *res;
struct exynos_dsi *dsi;
int ret, i;
dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
if (!dsi)
return -ENOMEM;
/* To be checked as invalid one */
dsi->te_gpio = -ENOENT;
init_completion(&dsi->completed);
spin_lock_init(&dsi->transfer_lock);
INIT_LIST_HEAD(&dsi->transfer_list);
dsi->dsi_host.ops = &exynos_dsi_ops;
dsi->dsi_host.dev = dev;
dsi->dev = dev;
dsi->driver_data = exynos_dsi_get_driver_data(pdev);
ret = exynos_dsi_parse_dt(dsi);
if (ret)
return ret;
dsi->supplies[0].supply = "vddcore";
dsi->supplies[1].supply = "vddio";
ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(dsi->supplies),
dsi->supplies);
if (ret) {
dev_info(dev, "failed to get regulators: %d\n", ret);
return -EPROBE_DEFER;
}
dsi->clks = devm_kzalloc(dev,
sizeof(*dsi->clks) * dsi->driver_data->num_clks,
GFP_KERNEL);
if (!dsi->clks)
return -ENOMEM;
for (i = 0; i < dsi->driver_data->num_clks; i++) {
dsi->clks[i] = devm_clk_get(dev, clk_names[i]);
if (IS_ERR(dsi->clks[i])) {
if (strcmp(clk_names[i], "sclk_mipi") == 0) {
strcpy(clk_names[i], OLD_SCLK_MIPI_CLK_NAME);
i--;
continue;
}
dev_info(dev, "failed to get the clock: %s\n",
clk_names[i]);
return PTR_ERR(dsi->clks[i]);
}
}
res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
dsi->reg_base = devm_ioremap_resource(dev, res);
if (IS_ERR(dsi->reg_base)) {
dev_err(dev, "failed to remap io region\n");
return PTR_ERR(dsi->reg_base);
}
dsi->phy = devm_phy_get(dev, "dsim");
if (IS_ERR(dsi->phy)) {
dev_info(dev, "failed to get dsim phy\n");
return PTR_ERR(dsi->phy);
}
dsi->irq = platform_get_irq(pdev, 0);
if (dsi->irq < 0) {
dev_err(dev, "failed to request dsi irq resource\n");
return dsi->irq;
}
irq_set_status_flags(dsi->irq, IRQ_NOAUTOEN);
ret = devm_request_threaded_irq(dev, dsi->irq, NULL,
exynos_dsi_irq, IRQF_ONESHOT,
dev_name(dev), dsi);
if (ret) {
dev_err(dev, "failed to request dsi irq\n");
return ret;
}
platform_set_drvdata(pdev, &dsi->encoder);
return component_add(dev, &exynos_dsi_component_ops);
}
static int exynos_dsi_remove(struct platform_device *pdev)
{
component_del(&pdev->dev, &exynos_dsi_component_ops);
return 0;
}
struct platform_driver dsi_driver = {
.probe = exynos_dsi_probe,
.remove = exynos_dsi_remove,
.driver = {
.name = "exynos-dsi",
.owner = THIS_MODULE,
.of_match_table = exynos_dsi_of_match,
},
};
MODULE_AUTHOR("Tomasz Figa <t.figa@samsung.com>");
MODULE_AUTHOR("Andrzej Hajda <a.hajda@samsung.com>");
MODULE_DESCRIPTION("Samsung SoC MIPI DSI Master");
MODULE_LICENSE("GPL v2");