Commit 959ed538 authored by Dave Airlie's avatar Dave Airlie

Merge tag 'drm-xilinx-dpsub-20200718' of git://linuxtv.org/pinchartl/media into drm-next

Xilinx ZynqMP DisplayPort Subsystem driver
Signed-off-by: default avatarDave Airlie <airlied@redhat.com>

From: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Link: https://patchwork.freedesktop.org/patch/msgid/20200718001755.GA5962@pendragon.ideasonboard.com
parents 2646699f d76271d2
# SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
%YAML 1.2
---
$id: http://devicetree.org/schemas/display/xlnx/xlnx,zynqmp-dpsub.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Xilinx ZynqMP DisplayPort Subsystem
description: |
The DisplayPort subsystem of Xilinx ZynqMP (Zynq UltraScale+ MPSoC)
implements the display and audio pipelines based on the DisplayPort v1.2
standard. The subsystem includes multiple functional blocks as below:
+------------------------------------------------------------+
+--------+ | +----------------+ +-----------+ |
| DPDMA | --->| | --> | Video | Video +-------------+ |
| 4x vid | | | | | Rendering | -+--> | | | +------+
| 2x aud | | | Audio/Video | --> | Pipeline | | | DisplayPort |---> | PHY0 |
+--------+ | | Buffer Manager | +-----------+ | | Source | | +------+
| | and STC | +-----------+ | | Controller | | +------+
Live Video --->| | --> | Audio | Audio | |---> | PHY1 |
| | | | Mixer | --+-> | | | +------+
Live Audio --->| | --> | | || +-------------+ |
| +----------------+ +-----------+ || |
+---------------------------------------||-------------------+
vv
Blended Video and
Mixed Audio to PL
The Buffer Manager interacts with external interface such as DMA engines or
live audio/video streams from the programmable logic. The Video Rendering
Pipeline blends the video and graphics layers and performs colorspace
conversion. The Audio Mixer mixes the incoming audio streams. The DisplayPort
Source Controller handles the DisplayPort protocol and connects to external
PHYs.
The subsystem supports 2 video and 2 audio streams, and various pixel formats
and depths up to 4K@30 resolution.
Please refer to "Zynq UltraScale+ Device Technical Reference Manual"
(https://www.xilinx.com/support/documentation/user_guides/ug1085-zynq-ultrascale-trm.pdf)
for more details.
maintainers:
- Laurent Pinchart <laurent.pinchart@ideasonboard.com>
properties:
compatible:
const: xlnx,zynqmp-dpsub-1.7
reg:
maxItems: 4
reg-names:
items:
- const: dp
- const: blend
- const: av_buf
- const: aud
interrupts:
maxItems: 1
clocks:
description:
The APB clock and at least one video clock are mandatory, the audio clock
is optional.
minItems: 2
maxItems: 4
items:
- description: dp_apb_clk is the APB clock
- description: dp_aud_clk is the Audio clock
- description:
dp_vtc_pixel_clk_in is the non-live video clock (from Processing
System)
- description:
dp_live_video_in_clk is the live video clock (from Programmable
Logic)
clock-names:
oneOf:
- minItems: 2
maxItems: 3
items:
- const: dp_apb_clk
- enum: [ dp_vtc_pixel_clk_in, dp_live_video_in_clk ]
- enum: [ dp_vtc_pixel_clk_in, dp_live_video_in_clk ]
- minItems: 3
maxItems: 4
items:
- const: dp_apb_clk
- const: dp_aud_clk
- enum: [ dp_vtc_pixel_clk_in, dp_live_video_in_clk ]
- enum: [ dp_vtc_pixel_clk_in, dp_live_video_in_clk ]
power-domains:
maxItems: 1
resets:
maxItems: 1
dmas:
maxItems: 4
items:
- description: Video layer, plane 0 (RGB or luma)
- description: Video layer, plane 1 (U/V or U)
- description: Video layer, plane 2 (V)
- description: Graphics layer
dma-names:
items:
- const: vid0
- const: vid1
- const: vid2
- const: gfx0
phys:
description: PHYs for the DP data lanes
minItems: 1
maxItems: 2
phy-names:
minItems: 1
maxItems: 2
items:
- const: dp-phy0
- const: dp-phy1
required:
- compatible
- reg
- reg-names
- interrupts
- clocks
- clock-names
- power-domains
- resets
- dmas
- dma-names
- phys
- phy-names
additionalProperties: false
examples:
- |
#include <dt-bindings/phy/phy.h>
#include <dt-bindings/reset/xlnx-zynqmp-resets.h>
display@fd4a0000 {
compatible = "xlnx,zynqmp-dpsub-1.7";
reg = <0x0 0xfd4a0000 0x0 0x1000>,
<0x0 0xfd4aa000 0x0 0x1000>,
<0x0 0xfd4ab000 0x0 0x1000>,
<0x0 0xfd4ac000 0x0 0x1000>;
reg-names = "dp", "blend", "av_buf", "aud";
interrupts = <0 119 4>;
interrupt-parent = <&gic>;
clock-names = "dp_apb_clk", "dp_aud_clk", "dp_live_video_in_clk";
clocks = <&dp_aclk>, <&clkc 17>, <&si570_1>;
power-domains = <&pd_dp>;
resets = <&reset ZYNQMP_RESET_DP>;
dma-names = "vid0", "vid1", "vid2", "gfx0";
dmas = <&xlnx_dpdma 0>,
<&xlnx_dpdma 1>,
<&xlnx_dpdma 2>,
<&xlnx_dpdma 3>;
phys = <&psgtr 1 PHY_TYPE_DP 0 3 27000000>,
<&psgtr 0 PHY_TYPE_DP 1 3 27000000>;
phy-names = "dp-phy0", "dp-phy1";
};
...
# SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
%YAML 1.2
---
$id: http://devicetree.org/schemas/dma/xilinx/xlnx,zynqmp-dpdma.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Xilinx ZynqMP DisplayPort DMA Controller Device Tree Bindings
description: |
These bindings describe the DMA engine included in the Xilinx ZynqMP
DisplayPort Subsystem. The DMA engine supports up to 6 DMA channels (3
channels for a video stream, 1 channel for a graphics stream, and 2 channels
for an audio stream).
maintainers:
- Laurent Pinchart <laurent.pinchart@ideasonboard.com>
allOf:
- $ref: "../dma-controller.yaml#"
properties:
"#dma-cells":
const: 1
description: |
The cell is the DMA channel ID (see dt-bindings/dma/xlnx-zynqmp-dpdma.h
for a list of channel IDs).
compatible:
const: xlnx,zynqmp-dpdma
reg:
maxItems: 1
interrupts:
maxItems: 1
clocks:
description: The AXI clock
maxItems: 1
clock-names:
const: axi_clk
required:
- "#dma-cells"
- compatible
- reg
- interrupts
- clocks
- clock-names
additionalProperties: false
examples:
- |
#include <dt-bindings/interrupt-controller/arm-gic.h>
dma: dma-controller@fd4c0000 {
compatible = "xlnx,zynqmp-dpdma";
reg = <0x0 0xfd4c0000 0x0 0x1000>;
interrupts = <GIC_SPI 122 IRQ_TYPE_LEVEL_HIGH>;
interrupt-parent = <&gic>;
clocks = <&dpdma_clk>;
clock-names = "axi_clk";
#dma-cells = <1>;
};
...
......@@ -86,7 +86,9 @@ The details of these operations are:
- interleaved_dma: This is common to Slave as well as M2M clients. For slave
address of devices' fifo could be already known to the driver.
Various types of operations could be expressed by setting
appropriate values to the 'dma_interleaved_template' members.
appropriate values to the 'dma_interleaved_template' members. Cyclic
interleaved DMA transfers are also possible if supported by the channel by
setting the DMA_PREP_REPEAT transfer flag.
A non-NULL return of this transfer API represents a "descriptor" for
the given transaction.
......
......@@ -239,6 +239,27 @@ Currently, the types available are:
want to transfer a portion of uncompressed data directly to the
display to print it
- DMA_REPEAT
- The device supports repeated transfers. A repeated transfer, indicated by
the DMA_PREP_REPEAT transfer flag, is similar to a cyclic transfer in that
it gets automatically repeated when it ends, but can additionally be
replaced by the client.
- This feature is limited to interleaved transfers, this flag should thus not
be set if the DMA_INTERLEAVE flag isn't set. This limitation is based on
the current needs of DMA clients, support for additional transfer types
should be added in the future if and when the need arises.
- DMA_LOAD_EOT
- The device supports replacing repeated transfers at end of transfer (EOT)
by queuing a new transfer with the DMA_PREP_LOAD_EOT flag set.
- Support for replacing a currently running transfer at another point (such
as end of burst instead of end of transfer) will be added in the future
based on DMA clients needs, if and when the need arises.
These various types will also affect how the source and destination
addresses change over time.
......@@ -531,6 +552,34 @@ DMA_CTRL_REUSE
writes for which the descriptor should be in different format from
normal data descriptors.
- DMA_PREP_REPEAT
- If set, the transfer will be automatically repeated when it ends until a
new transfer is queued on the same channel with the DMA_PREP_LOAD_EOT flag.
If the next transfer to be queued on the channel does not have the
DMA_PREP_LOAD_EOT flag set, the current transfer will be repeated until the
client terminates all transfers.
- This flag is only supported if the channel reports the DMA_REPEAT
capability.
- DMA_PREP_LOAD_EOT
- If set, the transfer will replace the transfer currently being executed at
the end of the transfer.
- This is the default behaviour for non-repeated transfers, specifying
DMA_PREP_LOAD_EOT for non-repeated transfers will thus make no difference.
- When using repeated transfers, DMA clients will usually need to set the
DMA_PREP_LOAD_EOT flag on all transfers, otherwise the channel will keep
repeating the last repeated transfer and ignore the new transfers being
queued. Failure to set DMA_PREP_LOAD_EOT will appear as if the channel was
stuck on the previous transfer.
- This flag is only supported if the channel reports the DMA_LOAD_EOT
capability.
General Design Notes
====================
......
......@@ -5839,6 +5839,15 @@ T: git git://anongit.freedesktop.org/drm/drm-misc
F: Documentation/gpu/xen-front.rst
F: drivers/gpu/drm/xen/
DRM DRIVERS FOR XILINX
M: Hyun Kwon <hyun.kwon@xilinx.com>
M: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
L: dri-devel@lists.freedesktop.org
S: Maintained
T: git git://anongit.freedesktop.org/drm/drm-misc
F: Documentation/devicetree/bindings/display/xlnx/
F: drivers/gpu/drm/xlnx/
DRM DRIVERS FOR ZTE ZX
M: Shawn Guo <shawnguo@kernel.org>
L: dri-devel@lists.freedesktop.org
......@@ -18853,6 +18862,15 @@ F: Documentation/devicetree/bindings/media/xilinx/
F: drivers/media/platform/xilinx/
F: include/uapi/linux/xilinx-v4l2-controls.h
XILINX ZYNQMP DPDMA DRIVER
M: Hyun Kwon <hyun.kwon@xilinx.com>
M: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
L: dmaengine@vger.kernel.org
S: Supported
F: Documentation/devicetree/bindings/dma/xilinx/xlnx,zynqmp-dpdma.yaml
F: drivers/dma/xilinx/xilinx_dpdma.c
F: include/dt-bindings/dma/xlnx-zynqmp-dpdma.h
XILLYBUS DRIVER
M: Eli Billauer <eli.billauer@gmail.com>
L: linux-kernel@vger.kernel.org
......
......@@ -707,6 +707,16 @@ config XILINX_ZYNQMP_DMA
help
Enable support for Xilinx ZynqMP DMA controller.
config XILINX_ZYNQMP_DPDMA
tristate "Xilinx DPDMA Engine"
select DMA_ENGINE
select DMA_VIRTUAL_CHANNELS
help
Enable support for Xilinx ZynqMP DisplayPort DMA. Choose this option
if you have a Xilinx ZynqMP SoC with a DisplayPort subsystem. The
driver provides the dmaengine required by the DisplayPort subsystem
display driver.
config ZX_DMA
tristate "ZTE ZX DMA support"
depends on ARCH_ZX || COMPILE_TEST
......
# SPDX-License-Identifier: GPL-2.0-only
obj-$(CONFIG_XILINX_DMA) += xilinx_dma.o
obj-$(CONFIG_XILINX_ZYNQMP_DMA) += zynqmp_dma.o
obj-$(CONFIG_XILINX_ZYNQMP_DPDMA) += xilinx_dpdma.o
// SPDX-License-Identifier: GPL-2.0
/*
* Xilinx ZynqMP DPDMA Engine driver
*
* Copyright (C) 2015 - 2020 Xilinx, Inc.
*
* Author: Hyun Woo Kwon <hyun.kwon@xilinx.com>
*/
#include <linux/bitfield.h>
#include <linux/bits.h>
#include <linux/clk.h>
#include <linux/delay.h>
#include <linux/dmaengine.h>
#include <linux/dmapool.h>
#include <linux/interrupt.h>
#include <linux/module.h>
#include <linux/of.h>
#include <linux/of_dma.h>
#include <linux/platform_device.h>
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/spinlock.h>
#include <linux/wait.h>
#include <dt-bindings/dma/xlnx-zynqmp-dpdma.h>
#include "../dmaengine.h"
#include "../virt-dma.h"
/* DPDMA registers */
#define XILINX_DPDMA_ERR_CTRL 0x000
#define XILINX_DPDMA_ISR 0x004
#define XILINX_DPDMA_IMR 0x008
#define XILINX_DPDMA_IEN 0x00c
#define XILINX_DPDMA_IDS 0x010
#define XILINX_DPDMA_INTR_DESC_DONE(n) BIT((n) + 0)
#define XILINX_DPDMA_INTR_DESC_DONE_MASK GENMASK(5, 0)
#define XILINX_DPDMA_INTR_NO_OSTAND(n) BIT((n) + 6)
#define XILINX_DPDMA_INTR_NO_OSTAND_MASK GENMASK(11, 6)
#define XILINX_DPDMA_INTR_AXI_ERR(n) BIT((n) + 12)
#define XILINX_DPDMA_INTR_AXI_ERR_MASK GENMASK(17, 12)
#define XILINX_DPDMA_INTR_DESC_ERR(n) BIT((n) + 16)
#define XILINX_DPDMA_INTR_DESC_ERR_MASK GENMASK(23, 18)
#define XILINX_DPDMA_INTR_WR_CMD_FIFO_FULL BIT(24)
#define XILINX_DPDMA_INTR_WR_DATA_FIFO_FULL BIT(25)
#define XILINX_DPDMA_INTR_AXI_4K_CROSS BIT(26)
#define XILINX_DPDMA_INTR_VSYNC BIT(27)
#define XILINX_DPDMA_INTR_CHAN_ERR_MASK 0x00041000
#define XILINX_DPDMA_INTR_CHAN_ERR 0x00fff000
#define XILINX_DPDMA_INTR_GLOBAL_ERR 0x07000000
#define XILINX_DPDMA_INTR_ERR_ALL 0x07fff000
#define XILINX_DPDMA_INTR_CHAN_MASK 0x00041041
#define XILINX_DPDMA_INTR_GLOBAL_MASK 0x0f000000
#define XILINX_DPDMA_INTR_ALL 0x0fffffff
#define XILINX_DPDMA_EISR 0x014
#define XILINX_DPDMA_EIMR 0x018
#define XILINX_DPDMA_EIEN 0x01c
#define XILINX_DPDMA_EIDS 0x020
#define XILINX_DPDMA_EINTR_INV_APB BIT(0)
#define XILINX_DPDMA_EINTR_RD_AXI_ERR(n) BIT((n) + 1)
#define XILINX_DPDMA_EINTR_RD_AXI_ERR_MASK GENMASK(6, 1)
#define XILINX_DPDMA_EINTR_PRE_ERR(n) BIT((n) + 7)
#define XILINX_DPDMA_EINTR_PRE_ERR_MASK GENMASK(12, 7)
#define XILINX_DPDMA_EINTR_CRC_ERR(n) BIT((n) + 13)
#define XILINX_DPDMA_EINTR_CRC_ERR_MASK GENMASK(18, 13)
#define XILINX_DPDMA_EINTR_WR_AXI_ERR(n) BIT((n) + 19)
#define XILINX_DPDMA_EINTR_WR_AXI_ERR_MASK GENMASK(24, 19)
#define XILINX_DPDMA_EINTR_DESC_DONE_ERR(n) BIT((n) + 25)
#define XILINX_DPDMA_EINTR_DESC_DONE_ERR_MASK GENMASK(30, 25)
#define XILINX_DPDMA_EINTR_RD_CMD_FIFO_FULL BIT(32)
#define XILINX_DPDMA_EINTR_CHAN_ERR_MASK 0x02082082
#define XILINX_DPDMA_EINTR_CHAN_ERR 0x7ffffffe
#define XILINX_DPDMA_EINTR_GLOBAL_ERR 0x80000001
#define XILINX_DPDMA_EINTR_ALL 0xffffffff
#define XILINX_DPDMA_CNTL 0x100
#define XILINX_DPDMA_GBL 0x104
#define XILINX_DPDMA_GBL_TRIG_MASK(n) ((n) << 0)
#define XILINX_DPDMA_GBL_RETRIG_MASK(n) ((n) << 6)
#define XILINX_DPDMA_ALC0_CNTL 0x108
#define XILINX_DPDMA_ALC0_STATUS 0x10c
#define XILINX_DPDMA_ALC0_MAX 0x110
#define XILINX_DPDMA_ALC0_MIN 0x114
#define XILINX_DPDMA_ALC0_ACC 0x118
#define XILINX_DPDMA_ALC0_ACC_TRAN 0x11c
#define XILINX_DPDMA_ALC1_CNTL 0x120
#define XILINX_DPDMA_ALC1_STATUS 0x124
#define XILINX_DPDMA_ALC1_MAX 0x128
#define XILINX_DPDMA_ALC1_MIN 0x12c
#define XILINX_DPDMA_ALC1_ACC 0x130
#define XILINX_DPDMA_ALC1_ACC_TRAN 0x134
/* Channel register */
#define XILINX_DPDMA_CH_BASE 0x200
#define XILINX_DPDMA_CH_OFFSET 0x100
#define XILINX_DPDMA_CH_DESC_START_ADDRE 0x000
#define XILINX_DPDMA_CH_DESC_START_ADDRE_MASK GENMASK(15, 0)
#define XILINX_DPDMA_CH_DESC_START_ADDR 0x004
#define XILINX_DPDMA_CH_DESC_NEXT_ADDRE 0x008
#define XILINX_DPDMA_CH_DESC_NEXT_ADDR 0x00c
#define XILINX_DPDMA_CH_PYLD_CUR_ADDRE 0x010
#define XILINX_DPDMA_CH_PYLD_CUR_ADDR 0x014
#define XILINX_DPDMA_CH_CNTL 0x018
#define XILINX_DPDMA_CH_CNTL_ENABLE BIT(0)
#define XILINX_DPDMA_CH_CNTL_PAUSE BIT(1)
#define XILINX_DPDMA_CH_CNTL_QOS_DSCR_WR_MASK GENMASK(5, 2)
#define XILINX_DPDMA_CH_CNTL_QOS_DSCR_RD_MASK GENMASK(9, 6)
#define XILINX_DPDMA_CH_CNTL_QOS_DATA_RD_MASK GENMASK(13, 10)
#define XILINX_DPDMA_CH_CNTL_QOS_VID_CLASS 11
#define XILINX_DPDMA_CH_STATUS 0x01c
#define XILINX_DPDMA_CH_STATUS_OTRAN_CNT_MASK GENMASK(24, 21)
#define XILINX_DPDMA_CH_VDO 0x020
#define XILINX_DPDMA_CH_PYLD_SZ 0x024
#define XILINX_DPDMA_CH_DESC_ID 0x028
/* DPDMA descriptor fields */
#define XILINX_DPDMA_DESC_CONTROL_PREEMBLE 0xa5
#define XILINX_DPDMA_DESC_CONTROL_COMPLETE_INTR BIT(8)
#define XILINX_DPDMA_DESC_CONTROL_DESC_UPDATE BIT(9)
#define XILINX_DPDMA_DESC_CONTROL_IGNORE_DONE BIT(10)
#define XILINX_DPDMA_DESC_CONTROL_FRAG_MODE BIT(18)
#define XILINX_DPDMA_DESC_CONTROL_LAST BIT(19)
#define XILINX_DPDMA_DESC_CONTROL_ENABLE_CRC BIT(20)
#define XILINX_DPDMA_DESC_CONTROL_LAST_OF_FRAME BIT(21)
#define XILINX_DPDMA_DESC_ID_MASK GENMASK(15, 0)
#define XILINX_DPDMA_DESC_HSIZE_STRIDE_HSIZE_MASK GENMASK(17, 0)
#define XILINX_DPDMA_DESC_HSIZE_STRIDE_STRIDE_MASK GENMASK(31, 18)
#define XILINX_DPDMA_DESC_ADDR_EXT_NEXT_ADDR_MASK GENMASK(15, 0)
#define XILINX_DPDMA_DESC_ADDR_EXT_SRC_ADDR_MASK GENMASK(31, 16)
#define XILINX_DPDMA_ALIGN_BYTES 256
#define XILINX_DPDMA_LINESIZE_ALIGN_BITS 128
#define XILINX_DPDMA_NUM_CHAN 6
struct xilinx_dpdma_chan;
/**
* struct xilinx_dpdma_hw_desc - DPDMA hardware descriptor
* @control: control configuration field
* @desc_id: descriptor ID
* @xfer_size: transfer size
* @hsize_stride: horizontal size and stride
* @timestamp_lsb: LSB of time stamp
* @timestamp_msb: MSB of time stamp
* @addr_ext: upper 16 bit of 48 bit address (next_desc and src_addr)
* @next_desc: next descriptor 32 bit address
* @src_addr: payload source address (1st page, 32 LSB)
* @addr_ext_23: payload source address (3nd and 3rd pages, 16 LSBs)
* @addr_ext_45: payload source address (4th and 5th pages, 16 LSBs)
* @src_addr2: payload source address (2nd page, 32 LSB)
* @src_addr3: payload source address (3rd page, 32 LSB)
* @src_addr4: payload source address (4th page, 32 LSB)
* @src_addr5: payload source address (5th page, 32 LSB)
* @crc: descriptor CRC
*/
struct xilinx_dpdma_hw_desc {
u32 control;
u32 desc_id;
u32 xfer_size;
u32 hsize_stride;
u32 timestamp_lsb;
u32 timestamp_msb;
u32 addr_ext;
u32 next_desc;
u32 src_addr;
u32 addr_ext_23;
u32 addr_ext_45;
u32 src_addr2;
u32 src_addr3;
u32 src_addr4;
u32 src_addr5;
u32 crc;
} __aligned(XILINX_DPDMA_ALIGN_BYTES);
/**
* struct xilinx_dpdma_sw_desc - DPDMA software descriptor
* @hw: DPDMA hardware descriptor
* @node: list node for software descriptors
* @dma_addr: DMA address of the software descriptor
*/
struct xilinx_dpdma_sw_desc {
struct xilinx_dpdma_hw_desc hw;
struct list_head node;
dma_addr_t dma_addr;
};
/**
* struct xilinx_dpdma_tx_desc - DPDMA transaction descriptor
* @vdesc: virtual DMA descriptor
* @chan: DMA channel
* @descriptors: list of software descriptors
* @error: an error has been detected with this descriptor
*/
struct xilinx_dpdma_tx_desc {
struct virt_dma_desc vdesc;
struct xilinx_dpdma_chan *chan;
struct list_head descriptors;
bool error;
};
#define to_dpdma_tx_desc(_desc) \
container_of(_desc, struct xilinx_dpdma_tx_desc, vdesc)
/**
* struct xilinx_dpdma_chan - DPDMA channel
* @vchan: virtual DMA channel
* @reg: register base address
* @id: channel ID
* @wait_to_stop: queue to wait for outstanding transacitons before stopping
* @running: true if the channel is running
* @first_frame: flag for the first frame of stream
* @video_group: flag if multi-channel operation is needed for video channels
* @lock: lock to access struct xilinx_dpdma_chan
* @desc_pool: descriptor allocation pool
* @err_task: error IRQ bottom half handler
* @desc.pending: Descriptor schedule to the hardware, pending execution
* @desc.active: Descriptor being executed by the hardware
* @xdev: DPDMA device
*/
struct xilinx_dpdma_chan {
struct virt_dma_chan vchan;
void __iomem *reg;
unsigned int id;
wait_queue_head_t wait_to_stop;
bool running;
bool first_frame;
bool video_group;
spinlock_t lock; /* lock to access struct xilinx_dpdma_chan */
struct dma_pool *desc_pool;
struct tasklet_struct err_task;
struct {
struct xilinx_dpdma_tx_desc *pending;
struct xilinx_dpdma_tx_desc *active;
} desc;
struct xilinx_dpdma_device *xdev;
};
#define to_xilinx_chan(_chan) \
container_of(_chan, struct xilinx_dpdma_chan, vchan.chan)
/**
* struct xilinx_dpdma_device - DPDMA device
* @common: generic dma device structure
* @reg: register base address
* @dev: generic device structure
* @irq: the interrupt number
* @axi_clk: axi clock
* @chan: DPDMA channels
* @ext_addr: flag for 64 bit system (48 bit addressing)
*/
struct xilinx_dpdma_device {
struct dma_device common;
void __iomem *reg;
struct device *dev;
int irq;
struct clk *axi_clk;
struct xilinx_dpdma_chan *chan[XILINX_DPDMA_NUM_CHAN];
bool ext_addr;
};
/* -----------------------------------------------------------------------------
* I/O Accessors
*/
static inline u32 dpdma_read(void __iomem *base, u32 offset)
{
return ioread32(base + offset);
}
static inline void dpdma_write(void __iomem *base, u32 offset, u32 val)
{
iowrite32(val, base + offset);
}
static inline void dpdma_clr(void __iomem *base, u32 offset, u32 clr)
{
dpdma_write(base, offset, dpdma_read(base, offset) & ~clr);
}
static inline void dpdma_set(void __iomem *base, u32 offset, u32 set)
{
dpdma_write(base, offset, dpdma_read(base, offset) | set);
}
/* -----------------------------------------------------------------------------
* Descriptor Operations
*/
/**
* xilinx_dpdma_sw_desc_set_dma_addrs - Set DMA addresses in the descriptor
* @sw_desc: The software descriptor in which to set DMA addresses
* @prev: The previous descriptor
* @dma_addr: array of dma addresses
* @num_src_addr: number of addresses in @dma_addr
*
* Set all the DMA addresses in the hardware descriptor corresponding to @dev
* from @dma_addr. If a previous descriptor is specified in @prev, its next
* descriptor DMA address is set to the DMA address of @sw_desc. @prev may be
* identical to @sw_desc for cyclic transfers.
*/
static void xilinx_dpdma_sw_desc_set_dma_addrs(struct xilinx_dpdma_device *xdev,
struct xilinx_dpdma_sw_desc *sw_desc,
struct xilinx_dpdma_sw_desc *prev,
dma_addr_t dma_addr[],
unsigned int num_src_addr)
{
struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw;
unsigned int i;
hw_desc->src_addr = lower_32_bits(dma_addr[0]);
if (xdev->ext_addr)
hw_desc->addr_ext |=
FIELD_PREP(XILINX_DPDMA_DESC_ADDR_EXT_SRC_ADDR_MASK,
upper_32_bits(dma_addr[0]));
for (i = 1; i < num_src_addr; i++) {
u32 *addr = &hw_desc->src_addr2;
addr[i-1] = lower_32_bits(dma_addr[i]);
if (xdev->ext_addr) {
u32 *addr_ext = &hw_desc->addr_ext_23;
u32 addr_msb;
addr_msb = upper_32_bits(dma_addr[i]) & GENMASK(15, 0);
addr_msb <<= 16 * ((i - 1) % 2);
addr_ext[(i - 1) / 2] |= addr_msb;
}
}
if (!prev)
return;
prev->hw.next_desc = lower_32_bits(sw_desc->dma_addr);
if (xdev->ext_addr)
prev->hw.addr_ext |=
FIELD_PREP(XILINX_DPDMA_DESC_ADDR_EXT_NEXT_ADDR_MASK,
upper_32_bits(sw_desc->dma_addr));
}
/**
* xilinx_dpdma_chan_alloc_sw_desc - Allocate a software descriptor
* @chan: DPDMA channel
*
* Allocate a software descriptor from the channel's descriptor pool.
*
* Return: a software descriptor or NULL.
*/
static struct xilinx_dpdma_sw_desc *
xilinx_dpdma_chan_alloc_sw_desc(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_sw_desc *sw_desc;
dma_addr_t dma_addr;
sw_desc = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &dma_addr);
if (!sw_desc)
return NULL;
sw_desc->dma_addr = dma_addr;
return sw_desc;
}
/**
* xilinx_dpdma_chan_free_sw_desc - Free a software descriptor
* @chan: DPDMA channel
* @sw_desc: software descriptor to free
*
* Free a software descriptor from the channel's descriptor pool.
*/
static void
xilinx_dpdma_chan_free_sw_desc(struct xilinx_dpdma_chan *chan,
struct xilinx_dpdma_sw_desc *sw_desc)
{
dma_pool_free(chan->desc_pool, sw_desc, sw_desc->dma_addr);
}
/**
* xilinx_dpdma_chan_dump_tx_desc - Dump a tx descriptor
* @chan: DPDMA channel
* @tx_desc: tx descriptor to dump
*
* Dump contents of a tx descriptor
*/
static void xilinx_dpdma_chan_dump_tx_desc(struct xilinx_dpdma_chan *chan,
struct xilinx_dpdma_tx_desc *tx_desc)
{
struct xilinx_dpdma_sw_desc *sw_desc;
struct device *dev = chan->xdev->dev;
unsigned int i = 0;
dev_dbg(dev, "------- TX descriptor dump start -------\n");
dev_dbg(dev, "------- channel ID = %d -------\n", chan->id);
list_for_each_entry(sw_desc, &tx_desc->descriptors, node) {
struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw;
dev_dbg(dev, "------- HW descriptor %d -------\n", i++);
dev_dbg(dev, "descriptor DMA addr: %pad\n", &sw_desc->dma_addr);
dev_dbg(dev, "control: 0x%08x\n", hw_desc->control);
dev_dbg(dev, "desc_id: 0x%08x\n", hw_desc->desc_id);
dev_dbg(dev, "xfer_size: 0x%08x\n", hw_desc->xfer_size);
dev_dbg(dev, "hsize_stride: 0x%08x\n", hw_desc->hsize_stride);
dev_dbg(dev, "timestamp_lsb: 0x%08x\n", hw_desc->timestamp_lsb);
dev_dbg(dev, "timestamp_msb: 0x%08x\n", hw_desc->timestamp_msb);
dev_dbg(dev, "addr_ext: 0x%08x\n", hw_desc->addr_ext);
dev_dbg(dev, "next_desc: 0x%08x\n", hw_desc->next_desc);
dev_dbg(dev, "src_addr: 0x%08x\n", hw_desc->src_addr);
dev_dbg(dev, "addr_ext_23: 0x%08x\n", hw_desc->addr_ext_23);
dev_dbg(dev, "addr_ext_45: 0x%08x\n", hw_desc->addr_ext_45);
dev_dbg(dev, "src_addr2: 0x%08x\n", hw_desc->src_addr2);
dev_dbg(dev, "src_addr3: 0x%08x\n", hw_desc->src_addr3);
dev_dbg(dev, "src_addr4: 0x%08x\n", hw_desc->src_addr4);
dev_dbg(dev, "src_addr5: 0x%08x\n", hw_desc->src_addr5);
dev_dbg(dev, "crc: 0x%08x\n", hw_desc->crc);
}
dev_dbg(dev, "------- TX descriptor dump end -------\n");
}
/**
* xilinx_dpdma_chan_alloc_tx_desc - Allocate a transaction descriptor
* @chan: DPDMA channel
*
* Allocate a tx descriptor.
*
* Return: a tx descriptor or NULL.
*/
static struct xilinx_dpdma_tx_desc *
xilinx_dpdma_chan_alloc_tx_desc(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_tx_desc *tx_desc;
tx_desc = kzalloc(sizeof(*tx_desc), GFP_NOWAIT);
if (!tx_desc)
return NULL;
INIT_LIST_HEAD(&tx_desc->descriptors);
tx_desc->chan = chan;
tx_desc->error = false;
return tx_desc;
}
/**
* xilinx_dpdma_chan_free_tx_desc - Free a virtual DMA descriptor
* @vdesc: virtual DMA descriptor
*
* Free the virtual DMA descriptor @vdesc including its software descriptors.
*/
static void xilinx_dpdma_chan_free_tx_desc(struct virt_dma_desc *vdesc)
{
struct xilinx_dpdma_sw_desc *sw_desc, *next;
struct xilinx_dpdma_tx_desc *desc;
if (!vdesc)
return;
desc = to_dpdma_tx_desc(vdesc);
list_for_each_entry_safe(sw_desc, next, &desc->descriptors, node) {
list_del(&sw_desc->node);
xilinx_dpdma_chan_free_sw_desc(desc->chan, sw_desc);
}
kfree(desc);
}
/**
* xilinx_dpdma_chan_prep_interleaved_dma - Prepare an interleaved dma
* descriptor
* @chan: DPDMA channel
* @xt: dma interleaved template
*
* Prepare a tx descriptor including internal software/hardware descriptors
* based on @xt.
*
* Return: A DPDMA TX descriptor on success, or NULL.
*/
static struct xilinx_dpdma_tx_desc *
xilinx_dpdma_chan_prep_interleaved_dma(struct xilinx_dpdma_chan *chan,
struct dma_interleaved_template *xt)
{
struct xilinx_dpdma_tx_desc *tx_desc;
struct xilinx_dpdma_sw_desc *sw_desc;
struct xilinx_dpdma_hw_desc *hw_desc;
size_t hsize = xt->sgl[0].size;
size_t stride = hsize + xt->sgl[0].icg;
if (!IS_ALIGNED(xt->src_start, XILINX_DPDMA_ALIGN_BYTES)) {
dev_err(chan->xdev->dev, "buffer should be aligned at %d B\n",
XILINX_DPDMA_ALIGN_BYTES);
return NULL;
}
tx_desc = xilinx_dpdma_chan_alloc_tx_desc(chan);
if (!tx_desc)
return NULL;
sw_desc = xilinx_dpdma_chan_alloc_sw_desc(chan);
if (!sw_desc) {
xilinx_dpdma_chan_free_tx_desc(&tx_desc->vdesc);
return NULL;
}
xilinx_dpdma_sw_desc_set_dma_addrs(chan->xdev, sw_desc, sw_desc,
&xt->src_start, 1);
hw_desc = &sw_desc->hw;
hsize = ALIGN(hsize, XILINX_DPDMA_LINESIZE_ALIGN_BITS / 8);
hw_desc->xfer_size = hsize * xt->numf;
hw_desc->hsize_stride =
FIELD_PREP(XILINX_DPDMA_DESC_HSIZE_STRIDE_HSIZE_MASK, hsize) |
FIELD_PREP(XILINX_DPDMA_DESC_HSIZE_STRIDE_STRIDE_MASK,
stride / 16);
hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_PREEMBLE;
hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_COMPLETE_INTR;
hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_IGNORE_DONE;
hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_LAST_OF_FRAME;
list_add_tail(&sw_desc->node, &tx_desc->descriptors);
return tx_desc;
}
/* -----------------------------------------------------------------------------
* DPDMA Channel Operations
*/
/**
* xilinx_dpdma_chan_enable - Enable the channel
* @chan: DPDMA channel
*
* Enable the channel and its interrupts. Set the QoS values for video class.
*/
static void xilinx_dpdma_chan_enable(struct xilinx_dpdma_chan *chan)
{
u32 reg;
reg = (XILINX_DPDMA_INTR_CHAN_MASK << chan->id)
| XILINX_DPDMA_INTR_GLOBAL_MASK;
dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg);
reg = (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id)
| XILINX_DPDMA_INTR_GLOBAL_ERR;
dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg);
reg = XILINX_DPDMA_CH_CNTL_ENABLE
| FIELD_PREP(XILINX_DPDMA_CH_CNTL_QOS_DSCR_WR_MASK,
XILINX_DPDMA_CH_CNTL_QOS_VID_CLASS)
| FIELD_PREP(XILINX_DPDMA_CH_CNTL_QOS_DSCR_RD_MASK,
XILINX_DPDMA_CH_CNTL_QOS_VID_CLASS)
| FIELD_PREP(XILINX_DPDMA_CH_CNTL_QOS_DATA_RD_MASK,
XILINX_DPDMA_CH_CNTL_QOS_VID_CLASS);
dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, reg);
}
/**
* xilinx_dpdma_chan_disable - Disable the channel
* @chan: DPDMA channel
*
* Disable the channel and its interrupts.
*/
static void xilinx_dpdma_chan_disable(struct xilinx_dpdma_chan *chan)
{
u32 reg;
reg = XILINX_DPDMA_INTR_CHAN_MASK << chan->id;
dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg);
reg = XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id;
dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg);
dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_ENABLE);
}
/**
* xilinx_dpdma_chan_pause - Pause the channel
* @chan: DPDMA channel
*
* Pause the channel.
*/
static void xilinx_dpdma_chan_pause(struct xilinx_dpdma_chan *chan)
{
dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE);
}
/**
* xilinx_dpdma_chan_unpause - Unpause the channel
* @chan: DPDMA channel
*
* Unpause the channel.
*/
static void xilinx_dpdma_chan_unpause(struct xilinx_dpdma_chan *chan)
{
dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE);
}
static u32 xilinx_dpdma_chan_video_group_ready(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_device *xdev = chan->xdev;
u32 channels = 0;
unsigned int i;
for (i = ZYNQMP_DPDMA_VIDEO0; i <= ZYNQMP_DPDMA_VIDEO2; i++) {
if (xdev->chan[i]->video_group && !xdev->chan[i]->running)
return 0;
if (xdev->chan[i]->video_group)
channels |= BIT(i);
}
return channels;
}
/**
* xilinx_dpdma_chan_queue_transfer - Queue the next transfer
* @chan: DPDMA channel
*
* Queue the next descriptor, if any, to the hardware. If the channel is
* stopped, start it first. Otherwise retrigger it with the next descriptor.
*/
static void xilinx_dpdma_chan_queue_transfer(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_device *xdev = chan->xdev;
struct xilinx_dpdma_sw_desc *sw_desc;
struct xilinx_dpdma_tx_desc *desc;
struct virt_dma_desc *vdesc;
u32 reg, channels;
lockdep_assert_held(&chan->lock);
if (chan->desc.pending)
return;
if (!chan->running) {
xilinx_dpdma_chan_unpause(chan);
xilinx_dpdma_chan_enable(chan);
chan->first_frame = true;
chan->running = true;
}
if (chan->video_group)
channels = xilinx_dpdma_chan_video_group_ready(chan);
else
channels = BIT(chan->id);
if (!channels)
return;
vdesc = vchan_next_desc(&chan->vchan);
if (!vdesc)
return;
desc = to_dpdma_tx_desc(vdesc);
chan->desc.pending = desc;
list_del(&desc->vdesc.node);
/*
* Assign the cookie to descriptors in this transaction. Only 16 bit
* will be used, but it should be enough.
*/
list_for_each_entry(sw_desc, &desc->descriptors, node)
sw_desc->hw.desc_id = desc->vdesc.tx.cookie;
sw_desc = list_first_entry(&desc->descriptors,
struct xilinx_dpdma_sw_desc, node);
dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR,
lower_32_bits(sw_desc->dma_addr));
if (xdev->ext_addr)
dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE,
FIELD_PREP(XILINX_DPDMA_CH_DESC_START_ADDRE_MASK,
upper_32_bits(sw_desc->dma_addr)));
if (chan->first_frame)
reg = XILINX_DPDMA_GBL_TRIG_MASK(channels);
else
reg = XILINX_DPDMA_GBL_RETRIG_MASK(channels);
chan->first_frame = false;
dpdma_write(xdev->reg, XILINX_DPDMA_GBL, reg);
}
/**
* xilinx_dpdma_chan_ostand - Number of outstanding transactions
* @chan: DPDMA channel
*
* Read and return the number of outstanding transactions from register.
*
* Return: Number of outstanding transactions from the status register.
*/
static u32 xilinx_dpdma_chan_ostand(struct xilinx_dpdma_chan *chan)
{
return FIELD_GET(XILINX_DPDMA_CH_STATUS_OTRAN_CNT_MASK,
dpdma_read(chan->reg, XILINX_DPDMA_CH_STATUS));
}
/**
* xilinx_dpdma_chan_no_ostand - Notify no outstanding transaction event
* @chan: DPDMA channel
*
* Notify waiters for no outstanding event, so waiters can stop the channel
* safely. This function is supposed to be called when 'no outstanding'
* interrupt is generated. The 'no outstanding' interrupt is disabled and
* should be re-enabled when this event is handled. If the channel status
* register still shows some number of outstanding transactions, the interrupt
* remains enabled.
*
* Return: 0 on success. On failure, -EWOULDBLOCK if there's still outstanding
* transaction(s).
*/
static int xilinx_dpdma_chan_notify_no_ostand(struct xilinx_dpdma_chan *chan)
{
u32 cnt;
cnt = xilinx_dpdma_chan_ostand(chan);
if (cnt) {
dev_dbg(chan->xdev->dev, "%d outstanding transactions\n", cnt);
return -EWOULDBLOCK;
}
/* Disable 'no outstanding' interrupt */
dpdma_write(chan->xdev->reg, XILINX_DPDMA_IDS,
XILINX_DPDMA_INTR_NO_OSTAND(chan->id));
wake_up(&chan->wait_to_stop);
return 0;
}
/**
* xilinx_dpdma_chan_wait_no_ostand - Wait for the no outstanding irq
* @chan: DPDMA channel
*
* Wait for the no outstanding transaction interrupt. This functions can sleep
* for 50ms.
*
* Return: 0 on success. On failure, -ETIMEOUT for time out, or the error code
* from wait_event_interruptible_timeout().
*/
static int xilinx_dpdma_chan_wait_no_ostand(struct xilinx_dpdma_chan *chan)
{
int ret;
/* Wait for a no outstanding transaction interrupt upto 50msec */
ret = wait_event_interruptible_timeout(chan->wait_to_stop,
!xilinx_dpdma_chan_ostand(chan),
msecs_to_jiffies(50));
if (ret > 0) {
dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN,
XILINX_DPDMA_INTR_NO_OSTAND(chan->id));
return 0;
}
dev_err(chan->xdev->dev, "not ready to stop: %d trans\n",
xilinx_dpdma_chan_ostand(chan));
if (ret == 0)
return -ETIMEDOUT;
return ret;
}
/**
* xilinx_dpdma_chan_poll_no_ostand - Poll the outstanding transaction status
* @chan: DPDMA channel
*
* Poll the outstanding transaction status, and return when there's no
* outstanding transaction. This functions can be used in the interrupt context
* or where the atomicity is required. Calling thread may wait more than 50ms.
*
* Return: 0 on success, or -ETIMEDOUT.
*/
static int xilinx_dpdma_chan_poll_no_ostand(struct xilinx_dpdma_chan *chan)
{
u32 cnt, loop = 50000;
/* Poll at least for 50ms (20 fps). */
do {
cnt = xilinx_dpdma_chan_ostand(chan);
udelay(1);
} while (loop-- > 0 && cnt);
if (loop) {
dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN,
XILINX_DPDMA_INTR_NO_OSTAND(chan->id));
return 0;
}
dev_err(chan->xdev->dev, "not ready to stop: %d trans\n",
xilinx_dpdma_chan_ostand(chan));
return -ETIMEDOUT;
}
/**
* xilinx_dpdma_chan_stop - Stop the channel
* @chan: DPDMA channel
*
* Stop a previously paused channel by first waiting for completion of all
* outstanding transaction and then disabling the channel.
*
* Return: 0 on success, or -ETIMEDOUT if the channel failed to stop.
*/
static int xilinx_dpdma_chan_stop(struct xilinx_dpdma_chan *chan)
{
unsigned long flags;
int ret;
ret = xilinx_dpdma_chan_wait_no_ostand(chan);
if (ret)
return ret;
spin_lock_irqsave(&chan->lock, flags);
xilinx_dpdma_chan_disable(chan);
chan->running = false;
spin_unlock_irqrestore(&chan->lock, flags);
return 0;
}
/**
* xilinx_dpdma_chan_done_irq - Handle hardware descriptor completion
* @chan: DPDMA channel
*
* Handle completion of the currently active descriptor (@chan->desc.active). As
* we currently support cyclic transfers only, this just invokes the cyclic
* callback. The descriptor will be completed at the VSYNC interrupt when a new
* descriptor replaces it.
*/
static void xilinx_dpdma_chan_done_irq(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_tx_desc *active = chan->desc.active;
unsigned long flags;
spin_lock_irqsave(&chan->lock, flags);
if (active)
vchan_cyclic_callback(&active->vdesc);
else
dev_warn(chan->xdev->dev,
"DONE IRQ with no active descriptor!\n");
spin_unlock_irqrestore(&chan->lock, flags);
}
/**
* xilinx_dpdma_chan_vsync_irq - Handle hardware descriptor scheduling
* @chan: DPDMA channel
*
* At VSYNC the active descriptor may have been replaced by the pending
* descriptor. Detect this through the DESC_ID and perform appropriate
* bookkeeping.
*/
static void xilinx_dpdma_chan_vsync_irq(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_tx_desc *pending;
struct xilinx_dpdma_sw_desc *sw_desc;
unsigned long flags;
u32 desc_id;
spin_lock_irqsave(&chan->lock, flags);
pending = chan->desc.pending;
if (!chan->running || !pending)
goto out;
desc_id = dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_ID);
/* If the retrigger raced with vsync, retry at the next frame. */
sw_desc = list_first_entry(&pending->descriptors,
struct xilinx_dpdma_sw_desc, node);
if (sw_desc->hw.desc_id != desc_id)
goto out;
/*
* Complete the active descriptor, if any, promote the pending
* descriptor to active, and queue the next transfer, if any.
*/
if (chan->desc.active)
vchan_cookie_complete(&chan->desc.active->vdesc);
chan->desc.active = pending;
chan->desc.pending = NULL;
xilinx_dpdma_chan_queue_transfer(chan);
out:
spin_unlock_irqrestore(&chan->lock, flags);
}
/**
* xilinx_dpdma_chan_err - Detect any channel error
* @chan: DPDMA channel
* @isr: masked Interrupt Status Register
* @eisr: Error Interrupt Status Register
*
* Return: true if any channel error occurs, or false otherwise.
*/
static bool
xilinx_dpdma_chan_err(struct xilinx_dpdma_chan *chan, u32 isr, u32 eisr)
{
if (!chan)
return false;
if (chan->running &&
((isr & (XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id)) ||
(eisr & (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id))))
return true;
return false;
}
/**
* xilinx_dpdma_chan_handle_err - DPDMA channel error handling
* @chan: DPDMA channel
*
* This function is called when any channel error or any global error occurs.
* The function disables the paused channel by errors and determines
* if the current active descriptor can be rescheduled depending on
* the descriptor status.
*/
static void xilinx_dpdma_chan_handle_err(struct xilinx_dpdma_chan *chan)
{
struct xilinx_dpdma_device *xdev = chan->xdev;
struct xilinx_dpdma_tx_desc *active;
unsigned long flags;
spin_lock_irqsave(&chan->lock, flags);
dev_dbg(xdev->dev, "cur desc addr = 0x%04x%08x\n",
dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE),
dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR));
dev_dbg(xdev->dev, "cur payload addr = 0x%04x%08x\n",
dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDRE),
dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDR));
xilinx_dpdma_chan_disable(chan);
chan->running = false;
if (!chan->desc.active)
goto out_unlock;
active = chan->desc.active;
chan->desc.active = NULL;
xilinx_dpdma_chan_dump_tx_desc(chan, active);
if (active->error)
dev_dbg(xdev->dev, "repeated error on desc\n");
/* Reschedule if there's no new descriptor */
if (!chan->desc.pending &&
list_empty(&chan->vchan.desc_issued)) {
active->error = true;
list_add_tail(&active->vdesc.node,
&chan->vchan.desc_issued);
} else {
xilinx_dpdma_chan_free_tx_desc(&active->vdesc);
}
out_unlock:
spin_unlock_irqrestore(&chan->lock, flags);
}
/* -----------------------------------------------------------------------------
* DMA Engine Operations
*/
static struct dma_async_tx_descriptor *
xilinx_dpdma_prep_interleaved_dma(struct dma_chan *dchan,
struct dma_interleaved_template *xt,
unsigned long flags)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
struct xilinx_dpdma_tx_desc *desc;
if (xt->dir != DMA_MEM_TO_DEV)
return NULL;
if (!xt->numf || !xt->sgl[0].size)
return NULL;
if (!(flags & DMA_PREP_REPEAT) || !(flags & DMA_PREP_LOAD_EOT))
return NULL;
desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt);
if (!desc)
return NULL;
vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK);
return &desc->vdesc.tx;
}
/**
* xilinx_dpdma_alloc_chan_resources - Allocate resources for the channel
* @dchan: DMA channel
*
* Allocate a descriptor pool for the channel.
*
* Return: 0 on success, or -ENOMEM if failed to allocate a pool.
*/
static int xilinx_dpdma_alloc_chan_resources(struct dma_chan *dchan)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
size_t align = __alignof__(struct xilinx_dpdma_sw_desc);
chan->desc_pool = dma_pool_create(dev_name(chan->xdev->dev),
chan->xdev->dev,
sizeof(struct xilinx_dpdma_sw_desc),
align, 0);
if (!chan->desc_pool) {
dev_err(chan->xdev->dev,
"failed to allocate a descriptor pool\n");
return -ENOMEM;
}
return 0;
}
/**
* xilinx_dpdma_free_chan_resources - Free all resources for the channel
* @dchan: DMA channel
*
* Free resources associated with the virtual DMA channel, and destroy the
* descriptor pool.
*/
static void xilinx_dpdma_free_chan_resources(struct dma_chan *dchan)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
vchan_free_chan_resources(&chan->vchan);
dma_pool_destroy(chan->desc_pool);
chan->desc_pool = NULL;
}
static void xilinx_dpdma_issue_pending(struct dma_chan *dchan)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
unsigned long flags;
spin_lock_irqsave(&chan->vchan.lock, flags);
if (vchan_issue_pending(&chan->vchan))
xilinx_dpdma_chan_queue_transfer(chan);
spin_unlock_irqrestore(&chan->vchan.lock, flags);
}
static int xilinx_dpdma_config(struct dma_chan *dchan,
struct dma_slave_config *config)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
unsigned long flags;
/*
* The destination address doesn't need to be specified as the DPDMA is
* hardwired to the destination (the DP controller). The transfer
* width, burst size and port window size are thus meaningless, they're
* fixed both on the DPDMA side and on the DP controller side.
*/
spin_lock_irqsave(&chan->lock, flags);
/*
* Abuse the slave_id to indicate that the channel is part of a video
* group.
*/
if (chan->id >= ZYNQMP_DPDMA_VIDEO0 && chan->id <= ZYNQMP_DPDMA_VIDEO2)
chan->video_group = config->slave_id != 0;
spin_unlock_irqrestore(&chan->lock, flags);
return 0;
}
static int xilinx_dpdma_pause(struct dma_chan *dchan)
{
xilinx_dpdma_chan_pause(to_xilinx_chan(dchan));
return 0;
}
static int xilinx_dpdma_resume(struct dma_chan *dchan)
{
xilinx_dpdma_chan_unpause(to_xilinx_chan(dchan));
return 0;
}
/**
* xilinx_dpdma_terminate_all - Terminate the channel and descriptors
* @dchan: DMA channel
*
* Pause the channel without waiting for ongoing transfers to complete. Waiting
* for completion is performed by xilinx_dpdma_synchronize() that will disable
* the channel to complete the stop.
*
* All the descriptors associated with the channel that are guaranteed not to
* be touched by the hardware. The pending and active descriptor are not
* touched, and will be freed either upon completion, or by
* xilinx_dpdma_synchronize().
*
* Return: 0 on success, or -ETIMEDOUT if the channel failed to stop.
*/
static int xilinx_dpdma_terminate_all(struct dma_chan *dchan)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
struct xilinx_dpdma_device *xdev = chan->xdev;
LIST_HEAD(descriptors);
unsigned long flags;
unsigned int i;
/* Pause the channel (including the whole video group if applicable). */
if (chan->video_group) {
for (i = ZYNQMP_DPDMA_VIDEO0; i <= ZYNQMP_DPDMA_VIDEO2; i++) {
if (xdev->chan[i]->video_group &&
xdev->chan[i]->running) {
xilinx_dpdma_chan_pause(xdev->chan[i]);
xdev->chan[i]->video_group = false;
}
}
} else {
xilinx_dpdma_chan_pause(chan);
}
/* Gather all the descriptors we can free and free them. */
spin_lock_irqsave(&chan->vchan.lock, flags);
vchan_get_all_descriptors(&chan->vchan, &descriptors);
spin_unlock_irqrestore(&chan->vchan.lock, flags);
vchan_dma_desc_free_list(&chan->vchan, &descriptors);
return 0;
}
/**
* xilinx_dpdma_synchronize - Synchronize callback execution
* @dchan: DMA channel
*
* Synchronizing callback execution ensures that all previously issued
* transfers have completed and all associated callbacks have been called and
* have returned.
*
* This function waits for the DMA channel to stop. It assumes it has been
* paused by a previous call to dmaengine_terminate_async(), and that no new
* pending descriptors have been issued with dma_async_issue_pending(). The
* behaviour is undefined otherwise.
*/
static void xilinx_dpdma_synchronize(struct dma_chan *dchan)
{
struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan);
unsigned long flags;
xilinx_dpdma_chan_stop(chan);
spin_lock_irqsave(&chan->vchan.lock, flags);
if (chan->desc.pending) {
vchan_terminate_vdesc(&chan->desc.pending->vdesc);
chan->desc.pending = NULL;
}
if (chan->desc.active) {
vchan_terminate_vdesc(&chan->desc.active->vdesc);
chan->desc.active = NULL;
}
spin_unlock_irqrestore(&chan->vchan.lock, flags);
vchan_synchronize(&chan->vchan);
}
/* -----------------------------------------------------------------------------
* Interrupt and Tasklet Handling
*/
/**
* xilinx_dpdma_err - Detect any global error
* @isr: Interrupt Status Register
* @eisr: Error Interrupt Status Register
*
* Return: True if any global error occurs, or false otherwise.
*/
static bool xilinx_dpdma_err(u32 isr, u32 eisr)
{
if (isr & XILINX_DPDMA_INTR_GLOBAL_ERR ||
eisr & XILINX_DPDMA_EINTR_GLOBAL_ERR)
return true;
return false;
}
/**
* xilinx_dpdma_handle_err_irq - Handle DPDMA error interrupt
* @xdev: DPDMA device
* @isr: masked Interrupt Status Register
* @eisr: Error Interrupt Status Register
*
* Handle if any error occurs based on @isr and @eisr. This function disables
* corresponding error interrupts, and those should be re-enabled once handling
* is done.
*/
static void xilinx_dpdma_handle_err_irq(struct xilinx_dpdma_device *xdev,
u32 isr, u32 eisr)
{
bool err = xilinx_dpdma_err(isr, eisr);
unsigned int i;
dev_dbg_ratelimited(xdev->dev,
"error irq: isr = 0x%08x, eisr = 0x%08x\n",
isr, eisr);
/* Disable channel error interrupts until errors are handled. */
dpdma_write(xdev->reg, XILINX_DPDMA_IDS,
isr & ~XILINX_DPDMA_INTR_GLOBAL_ERR);
dpdma_write(xdev->reg, XILINX_DPDMA_EIDS,
eisr & ~XILINX_DPDMA_EINTR_GLOBAL_ERR);
for (i = 0; i < ARRAY_SIZE(xdev->chan); i++)
if (err || xilinx_dpdma_chan_err(xdev->chan[i], isr, eisr))
tasklet_schedule(&xdev->chan[i]->err_task);
}
/**
* xilinx_dpdma_enable_irq - Enable interrupts
* @xdev: DPDMA device
*
* Enable interrupts.
*/
static void xilinx_dpdma_enable_irq(struct xilinx_dpdma_device *xdev)
{
dpdma_write(xdev->reg, XILINX_DPDMA_IEN, XILINX_DPDMA_INTR_ALL);
dpdma_write(xdev->reg, XILINX_DPDMA_EIEN, XILINX_DPDMA_EINTR_ALL);
}
/**
* xilinx_dpdma_disable_irq - Disable interrupts
* @xdev: DPDMA device
*
* Disable interrupts.
*/
static void xilinx_dpdma_disable_irq(struct xilinx_dpdma_device *xdev)
{
dpdma_write(xdev->reg, XILINX_DPDMA_IDS, XILINX_DPDMA_INTR_ERR_ALL);
dpdma_write(xdev->reg, XILINX_DPDMA_EIDS, XILINX_DPDMA_EINTR_ALL);
}
/**
* xilinx_dpdma_chan_err_task - Per channel tasklet for error handling
* @data: tasklet data to be casted to DPDMA channel structure
*
* Per channel error handling tasklet. This function waits for the outstanding
* transaction to complete and triggers error handling. After error handling,
* re-enable channel error interrupts, and restart the channel if needed.
*/
static void xilinx_dpdma_chan_err_task(unsigned long data)
{
struct xilinx_dpdma_chan *chan = (struct xilinx_dpdma_chan *)data;
struct xilinx_dpdma_device *xdev = chan->xdev;
unsigned long flags;
/* Proceed error handling even when polling fails. */
xilinx_dpdma_chan_poll_no_ostand(chan);
xilinx_dpdma_chan_handle_err(chan);
dpdma_write(xdev->reg, XILINX_DPDMA_IEN,
XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id);
dpdma_write(xdev->reg, XILINX_DPDMA_EIEN,
XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id);
spin_lock_irqsave(&chan->lock, flags);
xilinx_dpdma_chan_queue_transfer(chan);
spin_unlock_irqrestore(&chan->lock, flags);
}
static irqreturn_t xilinx_dpdma_irq_handler(int irq, void *data)
{
struct xilinx_dpdma_device *xdev = data;
unsigned long mask;
unsigned int i;
u32 status;
u32 error;
status = dpdma_read(xdev->reg, XILINX_DPDMA_ISR);
error = dpdma_read(xdev->reg, XILINX_DPDMA_EISR);
if (!status && !error)
return IRQ_NONE;
dpdma_write(xdev->reg, XILINX_DPDMA_ISR, status);
dpdma_write(xdev->reg, XILINX_DPDMA_EISR, error);
if (status & XILINX_DPDMA_INTR_VSYNC) {
/*
* There's a single VSYNC interrupt that needs to be processed
* by each running channel to update the active descriptor.
*/
for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) {
struct xilinx_dpdma_chan *chan = xdev->chan[i];
if (chan)
xilinx_dpdma_chan_vsync_irq(chan);
}
}
mask = FIELD_GET(XILINX_DPDMA_INTR_DESC_DONE_MASK, status);
if (mask) {
for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan))
xilinx_dpdma_chan_done_irq(xdev->chan[i]);
}
mask = FIELD_GET(XILINX_DPDMA_INTR_NO_OSTAND_MASK, status);
if (mask) {
for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan))
xilinx_dpdma_chan_notify_no_ostand(xdev->chan[i]);
}
mask = status & XILINX_DPDMA_INTR_ERR_ALL;
if (mask || error)
xilinx_dpdma_handle_err_irq(xdev, mask, error);
return IRQ_HANDLED;
}
/* -----------------------------------------------------------------------------
* Initialization & Cleanup
*/
static int xilinx_dpdma_chan_init(struct xilinx_dpdma_device *xdev,
unsigned int chan_id)
{
struct xilinx_dpdma_chan *chan;
chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL);
if (!chan)
return -ENOMEM;
chan->id = chan_id;
chan->reg = xdev->reg + XILINX_DPDMA_CH_BASE
+ XILINX_DPDMA_CH_OFFSET * chan->id;
chan->running = false;
chan->xdev = xdev;
spin_lock_init(&chan->lock);
init_waitqueue_head(&chan->wait_to_stop);
tasklet_init(&chan->err_task, xilinx_dpdma_chan_err_task,
(unsigned long)chan);
chan->vchan.desc_free = xilinx_dpdma_chan_free_tx_desc;
vchan_init(&chan->vchan, &xdev->common);
xdev->chan[chan->id] = chan;
return 0;
}
static void xilinx_dpdma_chan_remove(struct xilinx_dpdma_chan *chan)
{
if (!chan)
return;
tasklet_kill(&chan->err_task);
list_del(&chan->vchan.chan.device_node);
}
static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec,
struct of_dma *ofdma)
{
struct xilinx_dpdma_device *xdev = ofdma->of_dma_data;
uint32_t chan_id = dma_spec->args[0];
if (chan_id >= ARRAY_SIZE(xdev->chan))
return NULL;
if (!xdev->chan[chan_id])
return NULL;
return dma_get_slave_channel(&xdev->chan[chan_id]->vchan.chan);
}
static int xilinx_dpdma_probe(struct platform_device *pdev)
{
struct xilinx_dpdma_device *xdev;
struct dma_device *ddev;
unsigned int i;
int ret;
xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL);
if (!xdev)
return -ENOMEM;
xdev->dev = &pdev->dev;
xdev->ext_addr = sizeof(dma_addr_t) > 4;
INIT_LIST_HEAD(&xdev->common.channels);
platform_set_drvdata(pdev, xdev);
xdev->axi_clk = devm_clk_get(xdev->dev, "axi_clk");
if (IS_ERR(xdev->axi_clk))
return PTR_ERR(xdev->axi_clk);
xdev->reg = devm_platform_ioremap_resource(pdev, 0);
if (IS_ERR(xdev->reg))
return PTR_ERR(xdev->reg);
xdev->irq = platform_get_irq(pdev, 0);
if (xdev->irq < 0) {
dev_err(xdev->dev, "failed to get platform irq\n");
return xdev->irq;
}
ret = request_irq(xdev->irq, xilinx_dpdma_irq_handler, IRQF_SHARED,
dev_name(xdev->dev), xdev);
if (ret) {
dev_err(xdev->dev, "failed to request IRQ\n");
return ret;
}
ddev = &xdev->common;
ddev->dev = &pdev->dev;
dma_cap_set(DMA_SLAVE, ddev->cap_mask);
dma_cap_set(DMA_PRIVATE, ddev->cap_mask);
dma_cap_set(DMA_INTERLEAVE, ddev->cap_mask);
dma_cap_set(DMA_REPEAT, ddev->cap_mask);
dma_cap_set(DMA_LOAD_EOT, ddev->cap_mask);
ddev->copy_align = fls(XILINX_DPDMA_ALIGN_BYTES - 1);
ddev->device_alloc_chan_resources = xilinx_dpdma_alloc_chan_resources;
ddev->device_free_chan_resources = xilinx_dpdma_free_chan_resources;
ddev->device_prep_interleaved_dma = xilinx_dpdma_prep_interleaved_dma;
/* TODO: Can we achieve better granularity ? */
ddev->device_tx_status = dma_cookie_status;
ddev->device_issue_pending = xilinx_dpdma_issue_pending;
ddev->device_config = xilinx_dpdma_config;
ddev->device_pause = xilinx_dpdma_pause;
ddev->device_resume = xilinx_dpdma_resume;
ddev->device_terminate_all = xilinx_dpdma_terminate_all;
ddev->device_synchronize = xilinx_dpdma_synchronize;
ddev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_UNDEFINED);
ddev->directions = BIT(DMA_MEM_TO_DEV);
ddev->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR;
for (i = 0; i < ARRAY_SIZE(xdev->chan); ++i) {
ret = xilinx_dpdma_chan_init(xdev, i);
if (ret < 0) {
dev_err(xdev->dev, "failed to initialize channel %u\n",
i);
goto error;
}
}
ret = clk_prepare_enable(xdev->axi_clk);
if (ret) {
dev_err(xdev->dev, "failed to enable the axi clock\n");
goto error;
}
ret = dma_async_device_register(ddev);
if (ret) {
dev_err(xdev->dev, "failed to register the dma device\n");
goto error_dma_async;
}
ret = of_dma_controller_register(xdev->dev->of_node,
of_dma_xilinx_xlate, ddev);
if (ret) {
dev_err(xdev->dev, "failed to register DMA to DT DMA helper\n");
goto error_of_dma;
}
xilinx_dpdma_enable_irq(xdev);
dev_info(&pdev->dev, "Xilinx DPDMA engine is probed\n");
return 0;
error_of_dma:
dma_async_device_unregister(ddev);
error_dma_async:
clk_disable_unprepare(xdev->axi_clk);
error:
for (i = 0; i < ARRAY_SIZE(xdev->chan); i++)
xilinx_dpdma_chan_remove(xdev->chan[i]);
free_irq(xdev->irq, xdev);
return ret;
}
static int xilinx_dpdma_remove(struct platform_device *pdev)
{
struct xilinx_dpdma_device *xdev = platform_get_drvdata(pdev);
unsigned int i;
/* Start by disabling the IRQ to avoid races during cleanup. */
free_irq(xdev->irq, xdev);
xilinx_dpdma_disable_irq(xdev);
of_dma_controller_free(pdev->dev.of_node);
dma_async_device_unregister(&xdev->common);
clk_disable_unprepare(xdev->axi_clk);
for (i = 0; i < ARRAY_SIZE(xdev->chan); i++)
xilinx_dpdma_chan_remove(xdev->chan[i]);
return 0;
}
static const struct of_device_id xilinx_dpdma_of_match[] = {
{ .compatible = "xlnx,zynqmp-dpdma",},
{ /* end of table */ },
};
MODULE_DEVICE_TABLE(of, xilinx_dpdma_of_match);
static struct platform_driver xilinx_dpdma_driver = {
.probe = xilinx_dpdma_probe,
.remove = xilinx_dpdma_remove,
.driver = {
.name = "xilinx-zynqmp-dpdma",
.of_match_table = xilinx_dpdma_of_match,
},
};
module_platform_driver(xilinx_dpdma_driver);
MODULE_AUTHOR("Xilinx, Inc.");
MODULE_DESCRIPTION("Xilinx ZynqMP DPDMA driver");
MODULE_LICENSE("GPL v2");
......@@ -386,6 +386,8 @@ source "drivers/gpu/drm/mcde/Kconfig"
source "drivers/gpu/drm/tidss/Kconfig"
source "drivers/gpu/drm/xlnx/Kconfig"
# Keep legacy drivers last
menuconfig DRM_LEGACY
......
......@@ -123,3 +123,4 @@ obj-$(CONFIG_DRM_PANFROST) += panfrost/
obj-$(CONFIG_DRM_ASPEED_GFX) += aspeed/
obj-$(CONFIG_DRM_MCDE) += mcde/
obj-$(CONFIG_DRM_TIDSS) += tidss/
obj-y += xlnx/
config DRM_ZYNQMP_DPSUB
tristate "ZynqMP DisplayPort Controller Driver"
depends on ARCH_ZYNQMP || COMPILE_TEST
depends on COMMON_CLK && DRM && OF
select DMA_ENGINE
select DRM_GEM_CMA_HELPER
select DRM_KMS_CMA_HELPER
select DRM_KMS_HELPER
select GENERIC_PHY
help
This is a DRM/KMS driver for ZynqMP DisplayPort controller. Choose
this option if you have a Xilinx ZynqMP SoC with DisplayPort
subsystem.
zynqmp-dpsub-y := zynqmp_disp.o zynqmp_dpsub.o zynqmp_dp.o
obj-$(CONFIG_DRM_ZYNQMP_DPSUB) += zynqmp-dpsub.o
// SPDX-License-Identifier: GPL-2.0
/*
* ZynqMP Display Controller Driver
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#include <drm/drm_atomic.h>
#include <drm/drm_atomic_helper.h>
#include <drm/drm_atomic_uapi.h>
#include <drm/drm_crtc.h>
#include <drm/drm_device.h>
#include <drm/drm_fb_cma_helper.h>
#include <drm/drm_fourcc.h>
#include <drm/drm_framebuffer.h>
#include <drm/drm_managed.h>
#include <drm/drm_plane.h>
#include <drm/drm_plane_helper.h>
#include <drm/drm_vblank.h>
#include <linux/clk.h>
#include <linux/delay.h>
#include <linux/dma-mapping.h>
#include <linux/dmaengine.h>
#include <linux/module.h>
#include <linux/of.h>
#include <linux/of_dma.h>
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <linux/spinlock.h>
#include "zynqmp_disp.h"
#include "zynqmp_disp_regs.h"
#include "zynqmp_dp.h"
#include "zynqmp_dpsub.h"
/*
* Overview
* --------
*
* The display controller part of ZynqMP DP subsystem, made of the Audio/Video
* Buffer Manager, the Video Rendering Pipeline (blender) and the Audio Mixer.
*
* +------------------------------------------------------------+
* +--------+ | +----------------+ +-----------+ |
* | DPDMA | --->| | --> | Video | Video +-------------+ |
* | 4x vid | | | | | Rendering | -+--> | | | +------+
* | 2x aud | | | Audio/Video | --> | Pipeline | | | DisplayPort |---> | PHY0 |
* +--------+ | | Buffer Manager | +-----------+ | | Source | | +------+
* | | and STC | +-----------+ | | Controller | | +------+
* Live Video --->| | --> | Audio | Audio | |---> | PHY1 |
* | | | | Mixer | --+-> | | | +------+
* Live Audio --->| | --> | | || +-------------+ |
* | +----------------+ +-----------+ || |
* +---------------------------------------||-------------------+
* vv
* Blended Video and
* Mixed Audio to PL
*
* Only non-live input from the DPDMA and output to the DisplayPort Source
* Controller are currently supported. Interface with the programmable logic
* for live streams is not implemented.
*
* The display controller code creates planes for the DPDMA video and graphics
* layers, and a CRTC for the Video Rendering Pipeline.
*/
#define ZYNQMP_DISP_AV_BUF_NUM_VID_GFX_BUFFERS 4
#define ZYNQMP_DISP_AV_BUF_NUM_BUFFERS 6
#define ZYNQMP_DISP_NUM_LAYERS 2
#define ZYNQMP_DISP_MAX_NUM_SUB_PLANES 3
/**
* struct zynqmp_disp_format - Display subsystem format information
* @drm_fmt: DRM format (4CC)
* @buf_fmt: AV buffer format
* @bus_fmt: Media bus formats (live formats)
* @swap: Flag to swap R & B for RGB formats, and U & V for YUV formats
* @sf: Scaling factors for color components
*/
struct zynqmp_disp_format {
u32 drm_fmt;
u32 buf_fmt;
u32 bus_fmt;
bool swap;
const u32 *sf;
};
/**
* enum zynqmp_disp_id - Layer identifier
* @ZYNQMP_DISP_LAYER_VID: Video layer
* @ZYNQMP_DISP_LAYER_GFX: Graphics layer
*/
enum zynqmp_disp_layer_id {
ZYNQMP_DISP_LAYER_VID,
ZYNQMP_DISP_LAYER_GFX
};
/**
* enum zynqmp_disp_layer_mode - Layer mode
* @ZYNQMP_DISP_LAYER_NONLIVE: non-live (memory) mode
* @ZYNQMP_DISP_LAYER_LIVE: live (stream) mode
*/
enum zynqmp_disp_layer_mode {
ZYNQMP_DISP_LAYER_NONLIVE,
ZYNQMP_DISP_LAYER_LIVE
};
/**
* struct zynqmp_disp_layer_dma - DMA channel for one data plane of a layer
* @chan: DMA channel
* @xt: Interleaved DMA descriptor template
* @sgl: Data chunk for dma_interleaved_template
*/
struct zynqmp_disp_layer_dma {
struct dma_chan *chan;
struct dma_interleaved_template xt;
struct data_chunk sgl;
};
/**
* struct zynqmp_disp_layer_info - Static layer information
* @formats: Array of supported formats
* @num_formats: Number of formats in @formats array
* @num_channels: Number of DMA channels
*/
struct zynqmp_disp_layer_info {
const struct zynqmp_disp_format *formats;
unsigned int num_formats;
unsigned int num_channels;
};
/**
* struct zynqmp_disp_layer - Display layer (DRM plane)
* @plane: DRM plane
* @id: Layer ID
* @disp: Back pointer to struct zynqmp_disp
* @info: Static layer information
* @dmas: DMA channels
* @disp_fmt: Current format information
* @drm_fmt: Current DRM format information
* @mode: Current operation mode
*/
struct zynqmp_disp_layer {
struct drm_plane plane;
enum zynqmp_disp_layer_id id;
struct zynqmp_disp *disp;
const struct zynqmp_disp_layer_info *info;
struct zynqmp_disp_layer_dma dmas[ZYNQMP_DISP_MAX_NUM_SUB_PLANES];
const struct zynqmp_disp_format *disp_fmt;
const struct drm_format_info *drm_fmt;
enum zynqmp_disp_layer_mode mode;
};
/**
* struct zynqmp_disp_blend - Blender
* @base: Registers I/O base address
*/
struct zynqmp_disp_blend {
void __iomem *base;
};
/**
* struct zynqmp_disp_avbuf - Audio/video buffer manager
* @base: Registers I/O base address
*/
struct zynqmp_disp_avbuf {
void __iomem *base;
};
/**
* struct zynqmp_disp_audio - Audio mixer
* @base: Registers I/O base address
* @clk: Audio clock
* @clk_from_ps: True of the audio clock comes from PS, false from PL
*/
struct zynqmp_disp_audio {
void __iomem *base;
struct clk *clk;
bool clk_from_ps;
};
/**
* struct zynqmp_disp - Display controller
* @dev: Device structure
* @drm: DRM core
* @dpsub: Display subsystem
* @crtc: DRM CRTC
* @blend: Blender (video rendering pipeline)
* @avbuf: Audio/video buffer manager
* @audio: Audio mixer
* @layers: Layers (planes)
* @event: Pending vblank event request
* @pclk: Pixel clock
* @pclk_from_ps: True of the video clock comes from PS, false from PL
*/
struct zynqmp_disp {
struct device *dev;
struct drm_device *drm;
struct zynqmp_dpsub *dpsub;
struct drm_crtc crtc;
struct zynqmp_disp_blend blend;
struct zynqmp_disp_avbuf avbuf;
struct zynqmp_disp_audio audio;
struct zynqmp_disp_layer layers[ZYNQMP_DISP_NUM_LAYERS];
struct drm_pending_vblank_event *event;
struct clk *pclk;
bool pclk_from_ps;
};
/* -----------------------------------------------------------------------------
* Audio/Video Buffer Manager
*/
static const u32 scaling_factors_444[] = {
ZYNQMP_DISP_AV_BUF_4BIT_SF,
ZYNQMP_DISP_AV_BUF_4BIT_SF,
ZYNQMP_DISP_AV_BUF_4BIT_SF,
};
static const u32 scaling_factors_555[] = {
ZYNQMP_DISP_AV_BUF_5BIT_SF,
ZYNQMP_DISP_AV_BUF_5BIT_SF,
ZYNQMP_DISP_AV_BUF_5BIT_SF,
};
static const u32 scaling_factors_565[] = {
ZYNQMP_DISP_AV_BUF_5BIT_SF,
ZYNQMP_DISP_AV_BUF_6BIT_SF,
ZYNQMP_DISP_AV_BUF_5BIT_SF,
};
static const u32 scaling_factors_666[] = {
ZYNQMP_DISP_AV_BUF_6BIT_SF,
ZYNQMP_DISP_AV_BUF_6BIT_SF,
ZYNQMP_DISP_AV_BUF_6BIT_SF,
};
static const u32 scaling_factors_888[] = {
ZYNQMP_DISP_AV_BUF_8BIT_SF,
ZYNQMP_DISP_AV_BUF_8BIT_SF,
ZYNQMP_DISP_AV_BUF_8BIT_SF,
};
static const u32 scaling_factors_101010[] = {
ZYNQMP_DISP_AV_BUF_10BIT_SF,
ZYNQMP_DISP_AV_BUF_10BIT_SF,
ZYNQMP_DISP_AV_BUF_10BIT_SF,
};
/* List of video layer formats */
static const struct zynqmp_disp_format avbuf_vid_fmts[] = {
{
.drm_fmt = DRM_FORMAT_VYUY,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_VYUY,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_UYVY,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_VYUY,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YUYV,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YUYV,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YVYU,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YUYV,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YUV422,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YVU422,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YUV444,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV24,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YVU444,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV24,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_NV16,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_NV61,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_BGR888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGB888,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_RGB888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGB888,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_XBGR8888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGBA8880,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_XRGB8888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGBA8880,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_XBGR2101010,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGB888_10,
.swap = false,
.sf = scaling_factors_101010,
}, {
.drm_fmt = DRM_FORMAT_XRGB2101010,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGB888_10,
.swap = true,
.sf = scaling_factors_101010,
}, {
.drm_fmt = DRM_FORMAT_YUV420,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16_420,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_YVU420,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16_420,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_NV12,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI_420,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_NV21,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI_420,
.swap = true,
.sf = scaling_factors_888,
},
};
/* List of graphics layer formats */
static const struct zynqmp_disp_format avbuf_gfx_fmts[] = {
{
.drm_fmt = DRM_FORMAT_ABGR8888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA8888,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_ARGB8888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA8888,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_RGBA8888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_ABGR8888,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_BGRA8888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_ABGR8888,
.swap = true,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_BGR888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGB888,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_RGB888,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_BGR888,
.swap = false,
.sf = scaling_factors_888,
}, {
.drm_fmt = DRM_FORMAT_RGBA5551,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA5551,
.swap = false,
.sf = scaling_factors_555,
}, {
.drm_fmt = DRM_FORMAT_BGRA5551,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA5551,
.swap = true,
.sf = scaling_factors_555,
}, {
.drm_fmt = DRM_FORMAT_RGBA4444,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA4444,
.swap = false,
.sf = scaling_factors_444,
}, {
.drm_fmt = DRM_FORMAT_BGRA4444,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA4444,
.swap = true,
.sf = scaling_factors_444,
}, {
.drm_fmt = DRM_FORMAT_RGB565,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGB565,
.swap = false,
.sf = scaling_factors_565,
}, {
.drm_fmt = DRM_FORMAT_BGR565,
.buf_fmt = ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGB565,
.swap = true,
.sf = scaling_factors_565,
},
};
static u32 zynqmp_disp_avbuf_read(struct zynqmp_disp_avbuf *avbuf, int reg)
{
return readl(avbuf->base + reg);
}
static void zynqmp_disp_avbuf_write(struct zynqmp_disp_avbuf *avbuf,
int reg, u32 val)
{
writel(val, avbuf->base + reg);
}
/**
* zynqmp_disp_avbuf_set_format - Set the input format for a layer
* @avbuf: Audio/video buffer manager
* @layer: The layer ID
* @fmt: The format information
*
* Set the video buffer manager format for @layer to @fmt.
*/
static void zynqmp_disp_avbuf_set_format(struct zynqmp_disp_avbuf *avbuf,
enum zynqmp_disp_layer_id layer,
const struct zynqmp_disp_format *fmt)
{
unsigned int i;
u32 val;
val = zynqmp_disp_avbuf_read(avbuf, ZYNQMP_DISP_AV_BUF_FMT);
val &= layer == ZYNQMP_DISP_LAYER_VID
? ~ZYNQMP_DISP_AV_BUF_FMT_NL_VID_MASK
: ~ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_MASK;
val |= fmt->buf_fmt;
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_FMT, val);
for (i = 0; i < ZYNQMP_DISP_AV_BUF_NUM_SF; i++) {
unsigned int reg = layer == ZYNQMP_DISP_LAYER_VID
? ZYNQMP_DISP_AV_BUF_VID_COMP_SF(i)
: ZYNQMP_DISP_AV_BUF_GFX_COMP_SF(i);
zynqmp_disp_avbuf_write(avbuf, reg, fmt->sf[i]);
}
}
/**
* zynqmp_disp_avbuf_set_clocks_sources - Set the clocks sources
* @avbuf: Audio/video buffer manager
* @video_from_ps: True if the video clock originates from the PS
* @audio_from_ps: True if the audio clock originates from the PS
* @timings_internal: True if video timings are generated internally
*
* Set the source for the video and audio clocks, as well as for the video
* timings. Clocks can originate from the PS or PL, and timings can be
* generated internally or externally.
*/
static void
zynqmp_disp_avbuf_set_clocks_sources(struct zynqmp_disp_avbuf *avbuf,
bool video_from_ps, bool audio_from_ps,
bool timings_internal)
{
u32 val = 0;
if (video_from_ps)
val |= ZYNQMP_DISP_AV_BUF_CLK_SRC_VID_FROM_PS;
if (audio_from_ps)
val |= ZYNQMP_DISP_AV_BUF_CLK_SRC_AUD_FROM_PS;
if (timings_internal)
val |= ZYNQMP_DISP_AV_BUF_CLK_SRC_VID_INTERNAL_TIMING;
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_CLK_SRC, val);
}
/**
* zynqmp_disp_avbuf_enable_channels - Enable buffer channels
* @avbuf: Audio/video buffer manager
*
* Enable all (video and audio) buffer channels.
*/
static void zynqmp_disp_avbuf_enable_channels(struct zynqmp_disp_avbuf *avbuf)
{
unsigned int i;
u32 val;
val = ZYNQMP_DISP_AV_BUF_CHBUF_EN |
(ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_MAX <<
ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_SHIFT);
for (i = 0; i < ZYNQMP_DISP_AV_BUF_NUM_VID_GFX_BUFFERS; i++)
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_CHBUF(i),
val);
val = ZYNQMP_DISP_AV_BUF_CHBUF_EN |
(ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_AUD_MAX <<
ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_SHIFT);
for (; i < ZYNQMP_DISP_AV_BUF_NUM_BUFFERS; i++)
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_CHBUF(i),
val);
}
/**
* zynqmp_disp_avbuf_disable_channels - Disable buffer channels
* @avbuf: Audio/video buffer manager
*
* Disable all (video and audio) buffer channels.
*/
static void zynqmp_disp_avbuf_disable_channels(struct zynqmp_disp_avbuf *avbuf)
{
unsigned int i;
for (i = 0; i < ZYNQMP_DISP_AV_BUF_NUM_BUFFERS; i++)
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_CHBUF(i),
ZYNQMP_DISP_AV_BUF_CHBUF_FLUSH);
}
/**
* zynqmp_disp_avbuf_enable_audio - Enable audio
* @avbuf: Audio/video buffer manager
*
* Enable all audio buffers with a non-live (memory) source.
*/
static void zynqmp_disp_avbuf_enable_audio(struct zynqmp_disp_avbuf *avbuf)
{
u32 val;
val = zynqmp_disp_avbuf_read(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT);
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_MASK;
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_MEM;
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_AUD2_EN;
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT, val);
}
/**
* zynqmp_disp_avbuf_disable_audio - Disable audio
* @avbuf: Audio/video buffer manager
*
* Disable all audio buffers.
*/
static void zynqmp_disp_avbuf_disable_audio(struct zynqmp_disp_avbuf *avbuf)
{
u32 val;
val = zynqmp_disp_avbuf_read(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT);
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_MASK;
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_DISABLE;
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_AUD2_EN;
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT, val);
}
/**
* zynqmp_disp_avbuf_enable_video - Enable a video layer
* @avbuf: Audio/video buffer manager
* @layer: The layer ID
* @mode: Operating mode of layer
*
* Enable the video/graphics buffer for @layer.
*/
static void zynqmp_disp_avbuf_enable_video(struct zynqmp_disp_avbuf *avbuf,
enum zynqmp_disp_layer_id layer,
enum zynqmp_disp_layer_mode mode)
{
u32 val;
val = zynqmp_disp_avbuf_read(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT);
if (layer == ZYNQMP_DISP_LAYER_VID) {
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_MASK;
if (mode == ZYNQMP_DISP_LAYER_NONLIVE)
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_MEM;
else
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_LIVE;
} else {
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_MASK;
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_MEM;
if (mode == ZYNQMP_DISP_LAYER_NONLIVE)
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_MEM;
else
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_LIVE;
}
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT, val);
}
/**
* zynqmp_disp_avbuf_disable_video - Disable a video layer
* @avbuf: Audio/video buffer manager
* @layer: The layer ID
*
* Disable the video/graphics buffer for @layer.
*/
static void zynqmp_disp_avbuf_disable_video(struct zynqmp_disp_avbuf *avbuf,
enum zynqmp_disp_layer_id layer)
{
u32 val;
val = zynqmp_disp_avbuf_read(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT);
if (layer == ZYNQMP_DISP_LAYER_VID) {
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_MASK;
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_NONE;
} else {
val &= ~ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_MASK;
val |= ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_DISABLE;
}
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_OUTPUT, val);
}
/**
* zynqmp_disp_avbuf_enable - Enable the video pipe
* @avbuf: Audio/video buffer manager
*
* De-assert the video pipe reset.
*/
static void zynqmp_disp_avbuf_enable(struct zynqmp_disp_avbuf *avbuf)
{
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_SRST_REG, 0);
}
/**
* zynqmp_disp_avbuf_disable - Disable the video pipe
* @avbuf: Audio/video buffer manager
*
* Assert the video pipe reset.
*/
static void zynqmp_disp_avbuf_disable(struct zynqmp_disp_avbuf *avbuf)
{
zynqmp_disp_avbuf_write(avbuf, ZYNQMP_DISP_AV_BUF_SRST_REG,
ZYNQMP_DISP_AV_BUF_SRST_REG_VID_RST);
}
/* -----------------------------------------------------------------------------
* Blender (Video Pipeline)
*/
static void zynqmp_disp_blend_write(struct zynqmp_disp_blend *blend,
int reg, u32 val)
{
writel(val, blend->base + reg);
}
/*
* Colorspace conversion matrices.
*
* Hardcode RGB <-> YUV conversion to full-range SDTV for now.
*/
static const u16 csc_zero_matrix[] = {
0x0, 0x0, 0x0,
0x0, 0x0, 0x0,
0x0, 0x0, 0x0
};
static const u16 csc_identity_matrix[] = {
0x1000, 0x0, 0x0,
0x0, 0x1000, 0x0,
0x0, 0x0, 0x1000
};
static const u32 csc_zero_offsets[] = {
0, 0, 0
};
static const u16 csc_rgb_to_sdtv_matrix[] = {
0x4c9, 0x864, 0x1d3,
0x7d4d, 0x7ab3, 0x800,
0x800, 0x794d, 0x7eb3
};
static const u32 csc_rgb_to_sdtv_offsets[] = {
0x0, 0x8000000, 0x8000000
};
static const u16 csc_sdtv_to_rgb_matrix[] = {
0x1000, 0x166f, 0x0,
0x1000, 0x7483, 0x7a7f,
0x1000, 0x0, 0x1c5a
};
static const u32 csc_sdtv_to_rgb_offsets[] = {
0x0, 0x1800, 0x1800
};
/**
* zynqmp_disp_blend_set_output_format - Set the output format of the blender
* @blend: Blender object
* @format: Output format
*
* Set the output format of the blender to @format.
*/
static void zynqmp_disp_blend_set_output_format(struct zynqmp_disp_blend *blend,
enum zynqmp_dpsub_format format)
{
static const unsigned int blend_output_fmts[] = {
[ZYNQMP_DPSUB_FORMAT_RGB] = ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_RGB,
[ZYNQMP_DPSUB_FORMAT_YCRCB444] = ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_YCBCR444,
[ZYNQMP_DPSUB_FORMAT_YCRCB422] = ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_YCBCR422
| ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_EN_DOWNSAMPLE,
[ZYNQMP_DPSUB_FORMAT_YONLY] = ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_YONLY,
};
u32 fmt = blend_output_fmts[format];
const u16 *coeffs;
const u32 *offsets;
unsigned int i;
zynqmp_disp_blend_write(blend, ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT, fmt);
if (fmt == ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_RGB) {
coeffs = csc_identity_matrix;
offsets = csc_zero_offsets;
} else {
coeffs = csc_rgb_to_sdtv_matrix;
offsets = csc_rgb_to_sdtv_offsets;
}
for (i = 0; i < ZYNQMP_DISP_V_BLEND_NUM_COEFF; i++)
zynqmp_disp_blend_write(blend,
ZYNQMP_DISP_V_BLEND_RGB2YCBCR_COEFF(i),
coeffs[i]);
for (i = 0; i < ZYNQMP_DISP_V_BLEND_NUM_OFFSET; i++)
zynqmp_disp_blend_write(blend,
ZYNQMP_DISP_V_BLEND_OUTCSC_OFFSET(i),
offsets[i]);
}
/**
* zynqmp_disp_blend_set_bg_color - Set the background color
* @blend: Blender object
* @rcr: Red/Cr color component
* @gy: Green/Y color component
* @bcb: Blue/Cb color component
*
* Set the background color to (@rcr, @gy, @bcb), corresponding to the R, G and
* B or Cr, Y and Cb components respectively depending on the selected output
* format.
*/
static void zynqmp_disp_blend_set_bg_color(struct zynqmp_disp_blend *blend,
u32 rcr, u32 gy, u32 bcb)
{
zynqmp_disp_blend_write(blend, ZYNQMP_DISP_V_BLEND_BG_CLR_0, rcr);
zynqmp_disp_blend_write(blend, ZYNQMP_DISP_V_BLEND_BG_CLR_1, gy);
zynqmp_disp_blend_write(blend, ZYNQMP_DISP_V_BLEND_BG_CLR_2, bcb);
}
/**
* zynqmp_disp_blend_set_global_alpha - Configure global alpha blending
* @blend: Blender object
* @enable: True to enable global alpha blending
* @alpha: Global alpha value (ignored if @enabled is false)
*/
static void zynqmp_disp_blend_set_global_alpha(struct zynqmp_disp_blend *blend,
bool enable, u32 alpha)
{
zynqmp_disp_blend_write(blend, ZYNQMP_DISP_V_BLEND_SET_GLOBAL_ALPHA,
ZYNQMP_DISP_V_BLEND_SET_GLOBAL_ALPHA_VALUE(alpha) |
(enable ? ZYNQMP_DISP_V_BLEND_SET_GLOBAL_ALPHA_EN : 0));
}
/**
* zynqmp_disp_blend_layer_set_csc - Configure colorspace conversion for layer
* @blend: Blender object
* @layer: The layer
* @coeffs: Colorspace conversion matrix
* @offsets: Colorspace conversion offsets
*
* Configure the input colorspace conversion matrix and offsets for the @layer.
* Columns of the matrix are automatically swapped based on the input format to
* handle RGB and YCrCb components permutations.
*/
static void zynqmp_disp_blend_layer_set_csc(struct zynqmp_disp_blend *blend,
struct zynqmp_disp_layer *layer,
const u16 *coeffs,
const u32 *offsets)
{
unsigned int swap[3] = { 0, 1, 2 };
unsigned int reg;
unsigned int i;
if (layer->disp_fmt->swap) {
if (layer->drm_fmt->is_yuv) {
/* Swap U and V. */
swap[1] = 2;
swap[2] = 1;
} else {
/* Swap R and B. */
swap[0] = 2;
swap[2] = 0;
}
}
if (layer->id == ZYNQMP_DISP_LAYER_VID)
reg = ZYNQMP_DISP_V_BLEND_IN1CSC_COEFF(0);
else
reg = ZYNQMP_DISP_V_BLEND_IN2CSC_COEFF(0);
for (i = 0; i < ZYNQMP_DISP_V_BLEND_NUM_COEFF; i += 3, reg += 12) {
zynqmp_disp_blend_write(blend, reg + 0, coeffs[i + swap[0]]);
zynqmp_disp_blend_write(blend, reg + 4, coeffs[i + swap[1]]);
zynqmp_disp_blend_write(blend, reg + 8, coeffs[i + swap[2]]);
}
if (layer->id == ZYNQMP_DISP_LAYER_VID)
reg = ZYNQMP_DISP_V_BLEND_IN1CSC_OFFSET(0);
else
reg = ZYNQMP_DISP_V_BLEND_IN2CSC_OFFSET(0);
for (i = 0; i < ZYNQMP_DISP_V_BLEND_NUM_OFFSET; i++)
zynqmp_disp_blend_write(blend, reg + i * 4, offsets[i]);
}
/**
* zynqmp_disp_blend_layer_enable - Enable a layer
* @blend: Blender object
* @layer: The layer
*/
static void zynqmp_disp_blend_layer_enable(struct zynqmp_disp_blend *blend,
struct zynqmp_disp_layer *layer)
{
const u16 *coeffs;
const u32 *offsets;
u32 val;
val = (layer->drm_fmt->is_yuv ?
0 : ZYNQMP_DISP_V_BLEND_LAYER_CONTROL_RGB) |
(layer->drm_fmt->hsub > 1 ?
ZYNQMP_DISP_V_BLEND_LAYER_CONTROL_EN_US : 0);
zynqmp_disp_blend_write(blend,
ZYNQMP_DISP_V_BLEND_LAYER_CONTROL(layer->id),
val);
if (layer->drm_fmt->is_yuv) {
coeffs = csc_sdtv_to_rgb_matrix;
offsets = csc_sdtv_to_rgb_offsets;
} else {
coeffs = csc_identity_matrix;
offsets = csc_zero_offsets;
}
zynqmp_disp_blend_layer_set_csc(blend, layer, coeffs, offsets);
}
/**
* zynqmp_disp_blend_layer_disable - Disable a layer
* @blend: Blender object
* @layer: The layer
*/
static void zynqmp_disp_blend_layer_disable(struct zynqmp_disp_blend *blend,
struct zynqmp_disp_layer *layer)
{
zynqmp_disp_blend_write(blend,
ZYNQMP_DISP_V_BLEND_LAYER_CONTROL(layer->id),
0);
zynqmp_disp_blend_layer_set_csc(blend, layer, csc_zero_matrix,
csc_zero_offsets);
}
/* -----------------------------------------------------------------------------
* Audio Mixer
*/
static void zynqmp_disp_audio_write(struct zynqmp_disp_audio *audio,
int reg, u32 val)
{
writel(val, audio->base + reg);
}
/**
* zynqmp_disp_audio_enable - Enable the audio mixer
* @audio: Audio mixer
*
* Enable the audio mixer by de-asserting the soft reset. The audio state is set to
* default values by the reset, set the default mixer volume explicitly.
*/
static void zynqmp_disp_audio_enable(struct zynqmp_disp_audio *audio)
{
/* Clear the audio soft reset register as it's an non-reset flop. */
zynqmp_disp_audio_write(audio, ZYNQMP_DISP_AUD_SOFT_RESET, 0);
zynqmp_disp_audio_write(audio, ZYNQMP_DISP_AUD_MIXER_VOLUME,
ZYNQMP_DISP_AUD_MIXER_VOLUME_NO_SCALE);
}
/**
* zynqmp_disp_audio_disable - Disable the audio mixer
* @audio: Audio mixer
*
* Disable the audio mixer by asserting its soft reset.
*/
static void zynqmp_disp_audio_disable(struct zynqmp_disp_audio *audio)
{
zynqmp_disp_audio_write(audio, ZYNQMP_DISP_AUD_SOFT_RESET,
ZYNQMP_DISP_AUD_SOFT_RESET_AUD_SRST);
}
static void zynqmp_disp_audio_init(struct device *dev,
struct zynqmp_disp_audio *audio)
{
/* Try the live PL audio clock. */
audio->clk = devm_clk_get(dev, "dp_live_audio_aclk");
if (!IS_ERR(audio->clk)) {
audio->clk_from_ps = false;
return;
}
/* If the live PL audio clock is not valid, fall back to PS clock. */
audio->clk = devm_clk_get(dev, "dp_aud_clk");
if (!IS_ERR(audio->clk)) {
audio->clk_from_ps = true;
return;
}
dev_err(dev, "audio disabled due to missing clock\n");
}
/* -----------------------------------------------------------------------------
* ZynqMP Display external functions for zynqmp_dp
*/
/**
* zynqmp_disp_handle_vblank - Handle the vblank event
* @disp: Display controller
*
* This function handles the vblank interrupt, and sends an event to
* CRTC object. This will be called by the DP vblank interrupt handler.
*/
void zynqmp_disp_handle_vblank(struct zynqmp_disp *disp)
{
struct drm_crtc *crtc = &disp->crtc;
drm_crtc_handle_vblank(crtc);
}
/**
* zynqmp_disp_audio_enabled - If the audio is enabled
* @disp: Display controller
*
* Return if the audio is enabled depending on the audio clock.
*
* Return: true if audio is enabled, or false.
*/
bool zynqmp_disp_audio_enabled(struct zynqmp_disp *disp)
{
return !!disp->audio.clk;
}
/**
* zynqmp_disp_get_audio_clk_rate - Get the current audio clock rate
* @disp: Display controller
*
* Return: the current audio clock rate.
*/
unsigned int zynqmp_disp_get_audio_clk_rate(struct zynqmp_disp *disp)
{
if (zynqmp_disp_audio_enabled(disp))
return 0;
return clk_get_rate(disp->audio.clk);
}
/**
* zynqmp_disp_get_crtc_mask - Return the CRTC bit mask
* @disp: Display controller
*
* Return: the crtc mask of the zyqnmp_disp CRTC.
*/
uint32_t zynqmp_disp_get_crtc_mask(struct zynqmp_disp *disp)
{
return drm_crtc_mask(&disp->crtc);
}
/* -----------------------------------------------------------------------------
* ZynqMP Display Layer & DRM Plane
*/
/**
* zynqmp_disp_layer_find_format - Find format information for a DRM format
* @layer: The layer
* @drm_fmt: DRM format to search
*
* Search display subsystem format information corresponding to the given DRM
* format @drm_fmt for the @layer, and return a pointer to the format
* descriptor.
*
* Return: A pointer to the format descriptor if found, NULL otherwise
*/
static const struct zynqmp_disp_format *
zynqmp_disp_layer_find_format(struct zynqmp_disp_layer *layer,
u32 drm_fmt)
{
unsigned int i;
for (i = 0; i < layer->info->num_formats; i++) {
if (layer->info->formats[i].drm_fmt == drm_fmt)
return &layer->info->formats[i];
}
return NULL;
}
/**
* zynqmp_disp_layer_enable - Enable a layer
* @layer: The layer
*
* Enable the @layer in the audio/video buffer manager and the blender. DMA
* channels are started separately by zynqmp_disp_layer_update().
*/
static void zynqmp_disp_layer_enable(struct zynqmp_disp_layer *layer)
{
zynqmp_disp_avbuf_enable_video(&layer->disp->avbuf, layer->id,
ZYNQMP_DISP_LAYER_NONLIVE);
zynqmp_disp_blend_layer_enable(&layer->disp->blend, layer);
layer->mode = ZYNQMP_DISP_LAYER_NONLIVE;
}
/**
* zynqmp_disp_layer_disable - Disable the layer
* @layer: The layer
*
* Disable the layer by stopping its DMA channels and disabling it in the
* audio/video buffer manager and the blender.
*/
static void zynqmp_disp_layer_disable(struct zynqmp_disp_layer *layer)
{
unsigned int i;
for (i = 0; i < layer->drm_fmt->num_planes; i++)
dmaengine_terminate_sync(layer->dmas[i].chan);
zynqmp_disp_avbuf_disable_video(&layer->disp->avbuf, layer->id);
zynqmp_disp_blend_layer_disable(&layer->disp->blend, layer);
}
/**
* zynqmp_disp_layer_set_format - Set the layer format
* @layer: The layer
* @state: The plane state
*
* Set the format for @layer based on @state->fb->format. The layer must be
* disabled.
*/
static void zynqmp_disp_layer_set_format(struct zynqmp_disp_layer *layer,
struct drm_plane_state *state)
{
const struct drm_format_info *info = state->fb->format;
unsigned int i;
layer->disp_fmt = zynqmp_disp_layer_find_format(layer, info->format);
layer->drm_fmt = info;
zynqmp_disp_avbuf_set_format(&layer->disp->avbuf, layer->id,
layer->disp_fmt);
/*
* Set slave_id for each DMA channel to indicate they're part of a
* video group.
*/
for (i = 0; i < info->num_planes; i++) {
struct zynqmp_disp_layer_dma *dma = &layer->dmas[i];
struct dma_slave_config config = {
.direction = DMA_MEM_TO_DEV,
.slave_id = 1,
};
dmaengine_slave_config(dma->chan, &config);
}
}
/**
* zynqmp_disp_layer_update - Update the layer framebuffer
* @layer: The layer
* @state: The plane state
*
* Update the framebuffer for the layer by issuing a new DMA engine transaction
* for the new framebuffer.
*
* Return: 0 on success, or the DMA descriptor failure error otherwise
*/
static int zynqmp_disp_layer_update(struct zynqmp_disp_layer *layer,
struct drm_plane_state *state)
{
const struct drm_format_info *info = layer->drm_fmt;
unsigned int i;
for (i = 0; i < layer->drm_fmt->num_planes; i++) {
unsigned int width = state->crtc_w / (i ? info->hsub : 1);
unsigned int height = state->crtc_h / (i ? info->vsub : 1);
struct zynqmp_disp_layer_dma *dma = &layer->dmas[i];
struct dma_async_tx_descriptor *desc;
dma_addr_t paddr;
paddr = drm_fb_cma_get_gem_addr(state->fb, state, i);
dma->xt.numf = height;
dma->sgl.size = width * info->cpp[i];
dma->sgl.icg = state->fb->pitches[i] - dma->sgl.size;
dma->xt.src_start = paddr;
dma->xt.frame_size = 1;
dma->xt.dir = DMA_MEM_TO_DEV;
dma->xt.src_sgl = true;
dma->xt.dst_sgl = false;
desc = dmaengine_prep_interleaved_dma(dma->chan, &dma->xt,
DMA_CTRL_ACK |
DMA_PREP_REPEAT |
DMA_PREP_LOAD_EOT);
if (!desc) {
dev_err(layer->disp->dev,
"failed to prepare DMA descriptor\n");
return -ENOMEM;
}
dmaengine_submit(desc);
dma_async_issue_pending(dma->chan);
}
return 0;
}
static inline struct zynqmp_disp_layer *plane_to_layer(struct drm_plane *plane)
{
return container_of(plane, struct zynqmp_disp_layer, plane);
}
static int
zynqmp_disp_plane_atomic_check(struct drm_plane *plane,
struct drm_plane_state *state)
{
struct drm_crtc_state *crtc_state;
if (!state->crtc)
return 0;
crtc_state = drm_atomic_get_crtc_state(state->state, state->crtc);
if (IS_ERR(crtc_state))
return PTR_ERR(crtc_state);
return drm_atomic_helper_check_plane_state(state, crtc_state,
DRM_PLANE_HELPER_NO_SCALING,
DRM_PLANE_HELPER_NO_SCALING,
false, false);
}
static void
zynqmp_disp_plane_atomic_disable(struct drm_plane *plane,
struct drm_plane_state *old_state)
{
struct zynqmp_disp_layer *layer = plane_to_layer(plane);
if (!old_state->fb)
return;
zynqmp_disp_layer_disable(layer);
}
static void
zynqmp_disp_plane_atomic_update(struct drm_plane *plane,
struct drm_plane_state *old_state)
{
struct zynqmp_disp_layer *layer = plane_to_layer(plane);
bool format_changed = false;
if (!old_state->fb ||
old_state->fb->format->format != plane->state->fb->format->format)
format_changed = true;
/*
* If the format has changed (including going from a previously
* disabled state to any format), reconfigure the format. Disable the
* plane first if needed.
*/
if (format_changed) {
if (old_state->fb)
zynqmp_disp_layer_disable(layer);
zynqmp_disp_layer_set_format(layer, plane->state);
}
zynqmp_disp_layer_update(layer, plane->state);
/* Enable or re-enable the plane is the format has changed. */
if (format_changed)
zynqmp_disp_layer_enable(layer);
}
static const struct drm_plane_helper_funcs zynqmp_disp_plane_helper_funcs = {
.atomic_check = zynqmp_disp_plane_atomic_check,
.atomic_update = zynqmp_disp_plane_atomic_update,
.atomic_disable = zynqmp_disp_plane_atomic_disable,
};
static const struct drm_plane_funcs zynqmp_disp_plane_funcs = {
.update_plane = drm_atomic_helper_update_plane,
.disable_plane = drm_atomic_helper_disable_plane,
.destroy = drm_plane_cleanup,
.reset = drm_atomic_helper_plane_reset,
.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
};
static int zynqmp_disp_create_planes(struct zynqmp_disp *disp)
{
unsigned int i, j;
int ret;
for (i = 0; i < ZYNQMP_DISP_NUM_LAYERS; i++) {
struct zynqmp_disp_layer *layer = &disp->layers[i];
enum drm_plane_type type;
u32 *drm_formats;
drm_formats = drmm_kcalloc(disp->drm, sizeof(*drm_formats),
layer->info->num_formats,
GFP_KERNEL);
if (!drm_formats)
return -ENOMEM;
for (j = 0; j < layer->info->num_formats; ++j)
drm_formats[j] = layer->info->formats[j].drm_fmt;
/* Graphics layer is primary, and video layer is overlay. */
type = i == ZYNQMP_DISP_LAYER_GFX
? DRM_PLANE_TYPE_PRIMARY : DRM_PLANE_TYPE_OVERLAY;
ret = drm_universal_plane_init(disp->drm, &layer->plane, 0,
&zynqmp_disp_plane_funcs,
drm_formats,
layer->info->num_formats,
NULL, type, NULL);
if (ret)
return ret;
drm_plane_helper_add(&layer->plane,
&zynqmp_disp_plane_helper_funcs);
}
return 0;
}
/**
* zynqmp_disp_layer_release_dma - Release DMA channels for a layer
* @disp: Display controller
* @layer: The layer
*
* Release the DMA channels associated with @layer.
*/
static void zynqmp_disp_layer_release_dma(struct zynqmp_disp *disp,
struct zynqmp_disp_layer *layer)
{
unsigned int i;
if (!layer->info)
return;
for (i = 0; i < layer->info->num_channels; i++) {
struct zynqmp_disp_layer_dma *dma = &layer->dmas[i];
if (!dma->chan)
continue;
/* Make sure the channel is terminated before release. */
dmaengine_terminate_sync(dma->chan);
dma_release_channel(dma->chan);
}
}
/**
* zynqmp_disp_destroy_layers - Destroy all layers
* @disp: Display controller
*/
static void zynqmp_disp_destroy_layers(struct zynqmp_disp *disp)
{
unsigned int i;
for (i = 0; i < ZYNQMP_DISP_NUM_LAYERS; i++)
zynqmp_disp_layer_release_dma(disp, &disp->layers[i]);
}
/**
* zynqmp_disp_layer_request_dma - Request DMA channels for a layer
* @disp: Display controller
* @layer: The layer
*
* Request all DMA engine channels needed by @layer.
*
* Return: 0 on success, or the DMA channel request error otherwise
*/
static int zynqmp_disp_layer_request_dma(struct zynqmp_disp *disp,
struct zynqmp_disp_layer *layer)
{
static const char * const dma_names[] = { "vid", "gfx" };
unsigned int i;
int ret;
for (i = 0; i < layer->info->num_channels; i++) {
struct zynqmp_disp_layer_dma *dma = &layer->dmas[i];
char dma_channel_name[16];
snprintf(dma_channel_name, sizeof(dma_channel_name),
"%s%u", dma_names[layer->id], i);
dma->chan = of_dma_request_slave_channel(disp->dev->of_node,
dma_channel_name);
if (IS_ERR(dma->chan)) {
dev_err(disp->dev, "failed to request dma channel\n");
ret = PTR_ERR(dma->chan);
dma->chan = NULL;
return ret;
}
}
return 0;
}
/**
* zynqmp_disp_create_layers - Create and initialize all layers
* @disp: Display controller
*
* Return: 0 on success, or the DMA channel request error otherwise
*/
static int zynqmp_disp_create_layers(struct zynqmp_disp *disp)
{
static const struct zynqmp_disp_layer_info layer_info[] = {
[ZYNQMP_DISP_LAYER_VID] = {
.formats = avbuf_vid_fmts,
.num_formats = ARRAY_SIZE(avbuf_vid_fmts),
.num_channels = 3,
},
[ZYNQMP_DISP_LAYER_GFX] = {
.formats = avbuf_gfx_fmts,
.num_formats = ARRAY_SIZE(avbuf_gfx_fmts),
.num_channels = 1,
},
};
unsigned int i;
int ret;
for (i = 0; i < ZYNQMP_DISP_NUM_LAYERS; i++) {
struct zynqmp_disp_layer *layer = &disp->layers[i];
layer->id = i;
layer->disp = disp;
layer->info = &layer_info[i];
ret = zynqmp_disp_layer_request_dma(disp, layer);
if (ret)
goto err;
}
return 0;
err:
zynqmp_disp_destroy_layers(disp);
return ret;
}
/* -----------------------------------------------------------------------------
* ZynqMP Display & DRM CRTC
*/
/**
* zynqmp_disp_enable - Enable the display controller
* @disp: Display controller
*/
static void zynqmp_disp_enable(struct zynqmp_disp *disp)
{
zynqmp_disp_avbuf_enable(&disp->avbuf);
/* Choose clock source based on the DT clock handle. */
zynqmp_disp_avbuf_set_clocks_sources(&disp->avbuf, disp->pclk_from_ps,
disp->audio.clk_from_ps, true);
zynqmp_disp_avbuf_enable_channels(&disp->avbuf);
zynqmp_disp_avbuf_enable_audio(&disp->avbuf);
zynqmp_disp_audio_enable(&disp->audio);
}
/**
* zynqmp_disp_disable - Disable the display controller
* @disp: Display controller
*/
static void zynqmp_disp_disable(struct zynqmp_disp *disp)
{
struct drm_crtc *crtc = &disp->crtc;
zynqmp_disp_audio_disable(&disp->audio);
zynqmp_disp_avbuf_disable_audio(&disp->avbuf);
zynqmp_disp_avbuf_disable_channels(&disp->avbuf);
zynqmp_disp_avbuf_disable(&disp->avbuf);
/* Mark the flip is done as crtc is disabled anyway */
if (crtc->state->event) {
complete_all(crtc->state->event->base.completion);
crtc->state->event = NULL;
}
}
static inline struct zynqmp_disp *crtc_to_disp(struct drm_crtc *crtc)
{
return container_of(crtc, struct zynqmp_disp, crtc);
}
static int zynqmp_disp_crtc_setup_clock(struct drm_crtc *crtc,
struct drm_display_mode *adjusted_mode)
{
struct zynqmp_disp *disp = crtc_to_disp(crtc);
unsigned long mode_clock = adjusted_mode->clock * 1000;
unsigned long rate;
long diff;
int ret;
ret = clk_set_rate(disp->pclk, mode_clock);
if (ret) {
dev_err(disp->dev, "failed to set a pixel clock\n");
return ret;
}
rate = clk_get_rate(disp->pclk);
diff = rate - mode_clock;
if (abs(diff) > mode_clock / 20)
dev_info(disp->dev,
"requested pixel rate: %lu actual rate: %lu\n",
mode_clock, rate);
else
dev_dbg(disp->dev,
"requested pixel rate: %lu actual rate: %lu\n",
mode_clock, rate);
return 0;
}
static void
zynqmp_disp_crtc_atomic_enable(struct drm_crtc *crtc,
struct drm_crtc_state *old_crtc_state)
{
struct zynqmp_disp *disp = crtc_to_disp(crtc);
struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
int ret, vrefresh;
zynqmp_disp_crtc_setup_clock(crtc, adjusted_mode);
pm_runtime_get_sync(disp->dev);
ret = clk_prepare_enable(disp->pclk);
if (ret) {
dev_err(disp->dev, "failed to enable a pixel clock\n");
pm_runtime_put_sync(disp->dev);
return;
}
zynqmp_disp_blend_set_output_format(&disp->blend,
ZYNQMP_DPSUB_FORMAT_RGB);
zynqmp_disp_blend_set_bg_color(&disp->blend, 0, 0, 0);
zynqmp_disp_blend_set_global_alpha(&disp->blend, false, 0);
zynqmp_disp_enable(disp);
/* Delay of 3 vblank intervals for timing gen to be stable */
vrefresh = (adjusted_mode->clock * 1000) /
(adjusted_mode->vtotal * adjusted_mode->htotal);
msleep(3 * 1000 / vrefresh);
}
static void
zynqmp_disp_crtc_atomic_disable(struct drm_crtc *crtc,
struct drm_crtc_state *old_crtc_state)
{
struct zynqmp_disp *disp = crtc_to_disp(crtc);
struct drm_plane_state *old_plane_state;
/*
* Disable the plane if active. The old plane state can be NULL in the
* .shutdown() path if the plane is already disabled, skip
* zynqmp_disp_plane_atomic_disable() in that case.
*/
old_plane_state = drm_atomic_get_old_plane_state(old_crtc_state->state,
crtc->primary);
if (old_plane_state)
zynqmp_disp_plane_atomic_disable(crtc->primary, old_plane_state);
zynqmp_disp_disable(disp);
drm_crtc_vblank_off(&disp->crtc);
clk_disable_unprepare(disp->pclk);
pm_runtime_put_sync(disp->dev);
}
static int zynqmp_disp_crtc_atomic_check(struct drm_crtc *crtc,
struct drm_crtc_state *state)
{
return drm_atomic_add_affected_planes(state->state, crtc);
}
static void
zynqmp_disp_crtc_atomic_begin(struct drm_crtc *crtc,
struct drm_crtc_state *old_crtc_state)
{
drm_crtc_vblank_on(crtc);
}
static void
zynqmp_disp_crtc_atomic_flush(struct drm_crtc *crtc,
struct drm_crtc_state *old_crtc_state)
{
if (crtc->state->event) {
struct drm_pending_vblank_event *event;
/* Consume the flip_done event from atomic helper. */
event = crtc->state->event;
crtc->state->event = NULL;
event->pipe = drm_crtc_index(crtc);
WARN_ON(drm_crtc_vblank_get(crtc) != 0);
spin_lock_irq(&crtc->dev->event_lock);
drm_crtc_arm_vblank_event(crtc, event);
spin_unlock_irq(&crtc->dev->event_lock);
}
}
static const struct drm_crtc_helper_funcs zynqmp_disp_crtc_helper_funcs = {
.atomic_enable = zynqmp_disp_crtc_atomic_enable,
.atomic_disable = zynqmp_disp_crtc_atomic_disable,
.atomic_check = zynqmp_disp_crtc_atomic_check,
.atomic_begin = zynqmp_disp_crtc_atomic_begin,
.atomic_flush = zynqmp_disp_crtc_atomic_flush,
};
static int zynqmp_disp_crtc_enable_vblank(struct drm_crtc *crtc)
{
struct zynqmp_disp *disp = crtc_to_disp(crtc);
zynqmp_dp_enable_vblank(disp->dpsub->dp);
return 0;
}
static void zynqmp_disp_crtc_disable_vblank(struct drm_crtc *crtc)
{
struct zynqmp_disp *disp = crtc_to_disp(crtc);
zynqmp_dp_disable_vblank(disp->dpsub->dp);
}
static const struct drm_crtc_funcs zynqmp_disp_crtc_funcs = {
.destroy = drm_crtc_cleanup,
.set_config = drm_atomic_helper_set_config,
.page_flip = drm_atomic_helper_page_flip,
.reset = drm_atomic_helper_crtc_reset,
.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
.enable_vblank = zynqmp_disp_crtc_enable_vblank,
.disable_vblank = zynqmp_disp_crtc_disable_vblank,
};
static int zynqmp_disp_create_crtc(struct zynqmp_disp *disp)
{
struct drm_plane *plane = &disp->layers[ZYNQMP_DISP_LAYER_GFX].plane;
int ret;
ret = drm_crtc_init_with_planes(disp->drm, &disp->crtc, plane,
NULL, &zynqmp_disp_crtc_funcs, NULL);
if (ret < 0)
return ret;
drm_crtc_helper_add(&disp->crtc, &zynqmp_disp_crtc_helper_funcs);
/* Start with vertical blanking interrupt reporting disabled. */
drm_crtc_vblank_off(&disp->crtc);
return 0;
}
static void zynqmp_disp_map_crtc_to_plane(struct zynqmp_disp *disp)
{
u32 possible_crtcs = drm_crtc_mask(&disp->crtc);
unsigned int i;
for (i = 0; i < ZYNQMP_DISP_NUM_LAYERS; i++)
disp->layers[i].plane.possible_crtcs = possible_crtcs;
}
/* -----------------------------------------------------------------------------
* Initialization & Cleanup
*/
int zynqmp_disp_drm_init(struct zynqmp_dpsub *dpsub)
{
struct zynqmp_disp *disp = dpsub->disp;
int ret;
ret = zynqmp_disp_create_planes(disp);
if (ret)
return ret;
ret = zynqmp_disp_create_crtc(disp);
if (ret < 0)
return ret;
zynqmp_disp_map_crtc_to_plane(disp);
return 0;
}
int zynqmp_disp_probe(struct zynqmp_dpsub *dpsub, struct drm_device *drm)
{
struct platform_device *pdev = to_platform_device(dpsub->dev);
struct zynqmp_disp *disp;
struct zynqmp_disp_layer *layer;
struct resource *res;
int ret;
disp = drmm_kzalloc(drm, sizeof(*disp), GFP_KERNEL);
if (!disp)
return -ENOMEM;
disp->dev = &pdev->dev;
disp->dpsub = dpsub;
disp->drm = drm;
dpsub->disp = disp;
res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "blend");
disp->blend.base = devm_ioremap_resource(disp->dev, res);
if (IS_ERR(disp->blend.base))
return PTR_ERR(disp->blend.base);
res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "av_buf");
disp->avbuf.base = devm_ioremap_resource(disp->dev, res);
if (IS_ERR(disp->avbuf.base))
return PTR_ERR(disp->avbuf.base);
res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "aud");
disp->audio.base = devm_ioremap_resource(disp->dev, res);
if (IS_ERR(disp->audio.base))
return PTR_ERR(disp->audio.base);
/* Try the live PL video clock */
disp->pclk = devm_clk_get(disp->dev, "dp_live_video_in_clk");
if (!IS_ERR(disp->pclk))
disp->pclk_from_ps = false;
else if (PTR_ERR(disp->pclk) == -EPROBE_DEFER)
return PTR_ERR(disp->pclk);
/* If the live PL video clock is not valid, fall back to PS clock */
if (IS_ERR_OR_NULL(disp->pclk)) {
disp->pclk = devm_clk_get(disp->dev, "dp_vtc_pixel_clk_in");
if (IS_ERR(disp->pclk)) {
dev_err(disp->dev, "failed to init any video clock\n");
return PTR_ERR(disp->pclk);
}
disp->pclk_from_ps = true;
}
zynqmp_disp_audio_init(disp->dev, &disp->audio);
ret = zynqmp_disp_create_layers(disp);
if (ret)
return ret;
layer = &disp->layers[ZYNQMP_DISP_LAYER_VID];
dpsub->dma_align = 1 << layer->dmas[0].chan->device->copy_align;
return 0;
}
void zynqmp_disp_remove(struct zynqmp_dpsub *dpsub)
{
struct zynqmp_disp *disp = dpsub->disp;
zynqmp_disp_destroy_layers(disp);
}
/* SPDX-License-Identifier: GPL-2.0 */
/*
* ZynqMP Display Driver
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#ifndef _ZYNQMP_DISP_H_
#define _ZYNQMP_DISP_H_
#include <linux/types.h>
/*
* 3840x2160 is advertised as the maximum resolution, but almost any
* resolutions under a 300Mhz pixel rate would work. Pick 4096x4096.
*/
#define ZYNQMP_DISP_MAX_WIDTH 4096
#define ZYNQMP_DISP_MAX_HEIGHT 4096
/* The DPDMA is limited to 44 bit addressing. */
#define ZYNQMP_DISP_MAX_DMA_BIT 44
struct device;
struct drm_device;
struct platform_device;
struct zynqmp_disp;
struct zynqmp_dpsub;
void zynqmp_disp_handle_vblank(struct zynqmp_disp *disp);
bool zynqmp_disp_audio_enabled(struct zynqmp_disp *disp);
unsigned int zynqmp_disp_get_audio_clk_rate(struct zynqmp_disp *disp);
uint32_t zynqmp_disp_get_crtc_mask(struct zynqmp_disp *disp);
int zynqmp_disp_drm_init(struct zynqmp_dpsub *dpsub);
int zynqmp_disp_probe(struct zynqmp_dpsub *dpsub, struct drm_device *drm);
void zynqmp_disp_remove(struct zynqmp_dpsub *dpsub);
#endif /* _ZYNQMP_DISP_H_ */
/* SPDX-License-Identifier: GPL-2.0 */
/*
* ZynqMP Display Controller Driver - Register Definitions
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#ifndef _ZYNQMP_DISP_REGS_H_
#define _ZYNQMP_DISP_REGS_H_
#include <linux/bits.h>
/* Blender registers */
#define ZYNQMP_DISP_V_BLEND_BG_CLR_0 0x0
#define ZYNQMP_DISP_V_BLEND_BG_CLR_1 0x4
#define ZYNQMP_DISP_V_BLEND_BG_CLR_2 0x8
#define ZYNQMP_DISP_V_BLEND_BG_MAX 0xfff
#define ZYNQMP_DISP_V_BLEND_SET_GLOBAL_ALPHA 0xc
#define ZYNQMP_DISP_V_BLEND_SET_GLOBAL_ALPHA_VALUE(n) ((n) << 1)
#define ZYNQMP_DISP_V_BLEND_SET_GLOBAL_ALPHA_EN BIT(0)
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT 0x14
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_RGB 0x0
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_YCBCR444 0x1
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_YCBCR422 0x2
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_YONLY 0x3
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_XVYCC 0x4
#define ZYNQMP_DISP_V_BLEND_OUTPUT_VID_FMT_EN_DOWNSAMPLE BIT(4)
#define ZYNQMP_DISP_V_BLEND_LAYER_CONTROL(n) (0x18 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_LAYER_CONTROL_EN_US BIT(0)
#define ZYNQMP_DISP_V_BLEND_LAYER_CONTROL_RGB BIT(1)
#define ZYNQMP_DISP_V_BLEND_LAYER_CONTROL_BYPASS BIT(8)
#define ZYNQMP_DISP_V_BLEND_NUM_COEFF 9
#define ZYNQMP_DISP_V_BLEND_NUM_OFFSET 3
#define ZYNQMP_DISP_V_BLEND_RGB2YCBCR_COEFF(n) (0x20 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_IN1CSC_COEFF(n) (0x44 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_IN1CSC_OFFSET(n) (0x68 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_OUTCSC_OFFSET(n) (0x74 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_IN2CSC_COEFF(n) (0x80 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_IN2CSC_OFFSET(n) (0xa4 + ((n) * 4))
#define ZYNQMP_DISP_V_BLEND_CHROMA_KEY_ENABLE 0x1d0
#define ZYNQMP_DISP_V_BLEND_CHROMA_KEY_COMP1 0x1d4
#define ZYNQMP_DISP_V_BLEND_CHROMA_KEY_COMP2 0x1d8
#define ZYNQMP_DISP_V_BLEND_CHROMA_KEY_COMP3 0x1dc
/* AV buffer manager registers */
#define ZYNQMP_DISP_AV_BUF_FMT 0x0
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_SHIFT 0
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_MASK (0x1f << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_UYVY (0 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_VYUY (1 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YVYU (2 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YUYV (3 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16 (4 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV24 (5 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI (6 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_MONO (7 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI2 (8 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YUV444 (9 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGB888 (10 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGBA8880 (11 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_RGB888_10 (12 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YUV444_10 (13 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI2_10 (14 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI_10 (15 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16_10 (16 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV24_10 (17 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YONLY_10 (18 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16_420 (19 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI_420 (20 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI2_420 (21 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16_420_10 (22 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI_420_10 (23 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_VID_YV16CI2_420_10 (24 << 0)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_SHIFT 8
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_MASK (0xf << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA8888 (0 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_ABGR8888 (1 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGB888 (2 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_BGR888 (3 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA5551 (4 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGBA4444 (5 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_RGB565 (6 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_8BPP (7 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_4BPP (8 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_2BPP (9 << 8)
#define ZYNQMP_DISP_AV_BUF_FMT_NL_GFX_1BPP (10 << 8)
#define ZYNQMP_DISP_AV_BUF_NON_LIVE_LATENCY 0x8
#define ZYNQMP_DISP_AV_BUF_CHBUF(n) (0x10 + ((n) * 4))
#define ZYNQMP_DISP_AV_BUF_CHBUF_EN BIT(0)
#define ZYNQMP_DISP_AV_BUF_CHBUF_FLUSH BIT(1)
#define ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_SHIFT 2
#define ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_MASK (0xf << 2)
#define ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_MAX 0xf
#define ZYNQMP_DISP_AV_BUF_CHBUF_BURST_LEN_AUD_MAX 0x3
#define ZYNQMP_DISP_AV_BUF_STATUS 0x28
#define ZYNQMP_DISP_AV_BUF_STC_CTRL 0x2c
#define ZYNQMP_DISP_AV_BUF_STC_CTRL_EN BIT(0)
#define ZYNQMP_DISP_AV_BUF_STC_CTRL_EVENT_SHIFT 1
#define ZYNQMP_DISP_AV_BUF_STC_CTRL_EVENT_EX_VSYNC 0
#define ZYNQMP_DISP_AV_BUF_STC_CTRL_EVENT_EX_VID 1
#define ZYNQMP_DISP_AV_BUF_STC_CTRL_EVENT_EX_AUD 2
#define ZYNQMP_DISP_AV_BUF_STC_CTRL_EVENT_INT_VSYNC 3
#define ZYNQMP_DISP_AV_BUF_STC_INIT_VALUE0 0x30
#define ZYNQMP_DISP_AV_BUF_STC_INIT_VALUE1 0x34
#define ZYNQMP_DISP_AV_BUF_STC_ADJ 0x38
#define ZYNQMP_DISP_AV_BUF_STC_VID_VSYNC_TS0 0x3c
#define ZYNQMP_DISP_AV_BUF_STC_VID_VSYNC_TS1 0x40
#define ZYNQMP_DISP_AV_BUF_STC_EXT_VSYNC_TS0 0x44
#define ZYNQMP_DISP_AV_BUF_STC_EXT_VSYNC_TS1 0x48
#define ZYNQMP_DISP_AV_BUF_STC_CUSTOM_EVENT_TS0 0x4c
#define ZYNQMP_DISP_AV_BUF_STC_CUSTOM_EVENT_TS1 0x50
#define ZYNQMP_DISP_AV_BUF_STC_CUSTOM_EVENT2_TS0 0x54
#define ZYNQMP_DISP_AV_BUF_STC_CUSTOM_EVENT2_TS1 0x58
#define ZYNQMP_DISP_AV_BUF_STC_SNAPSHOT0 0x60
#define ZYNQMP_DISP_AV_BUF_STC_SNAPSHOT1 0x64
#define ZYNQMP_DISP_AV_BUF_OUTPUT 0x70
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_SHIFT 0
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_MASK (0x3 << 0)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_LIVE (0 << 0)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_MEM (1 << 0)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_PATTERN (2 << 0)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID1_NONE (3 << 0)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_SHIFT 2
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_MASK (0x3 << 2)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_DISABLE (0 << 2)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_MEM (1 << 2)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_LIVE (2 << 2)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_VID2_NONE (3 << 2)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_SHIFT 4
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_MASK (0x3 << 4)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_PL (0 << 4)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_MEM (1 << 4)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_PATTERN (2 << 4)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD1_DISABLE (3 << 4)
#define ZYNQMP_DISP_AV_BUF_OUTPUT_AUD2_EN BIT(6)
#define ZYNQMP_DISP_AV_BUF_HCOUNT_VCOUNT_INT0 0x74
#define ZYNQMP_DISP_AV_BUF_HCOUNT_VCOUNT_INT1 0x78
#define ZYNQMP_DISP_AV_BUF_PATTERN_GEN_SELECT 0x100
#define ZYNQMP_DISP_AV_BUF_CLK_SRC 0x120
#define ZYNQMP_DISP_AV_BUF_CLK_SRC_VID_FROM_PS BIT(0)
#define ZYNQMP_DISP_AV_BUF_CLK_SRC_AUD_FROM_PS BIT(1)
#define ZYNQMP_DISP_AV_BUF_CLK_SRC_VID_INTERNAL_TIMING BIT(2)
#define ZYNQMP_DISP_AV_BUF_SRST_REG 0x124
#define ZYNQMP_DISP_AV_BUF_SRST_REG_VID_RST BIT(1)
#define ZYNQMP_DISP_AV_BUF_AUDIO_CH_CONFIG 0x12c
#define ZYNQMP_DISP_AV_BUF_GFX_COMP_SF(n) (0x200 + ((n) * 4))
#define ZYNQMP_DISP_AV_BUF_VID_COMP_SF(n) (0x20c + ((n) * 4))
#define ZYNQMP_DISP_AV_BUF_LIVD_VID_COMP_SF(n) (0x218 + ((n) * 4))
#define ZYNQMP_DISP_AV_BUF_LIVE_VID_CONFIG 0x224
#define ZYNQMP_DISP_AV_BUF_LIVD_GFX_COMP_SF(n) (0x228 + ((n) * 4))
#define ZYNQMP_DISP_AV_BUF_LIVE_GFX_CONFIG 0x234
#define ZYNQMP_DISP_AV_BUF_4BIT_SF 0x11111
#define ZYNQMP_DISP_AV_BUF_5BIT_SF 0x10842
#define ZYNQMP_DISP_AV_BUF_6BIT_SF 0x10410
#define ZYNQMP_DISP_AV_BUF_8BIT_SF 0x10101
#define ZYNQMP_DISP_AV_BUF_10BIT_SF 0x10040
#define ZYNQMP_DISP_AV_BUF_NULL_SF 0
#define ZYNQMP_DISP_AV_BUF_NUM_SF 3
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_BPC_6 0x0
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_BPC_8 0x1
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_BPC_10 0x2
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_BPC_12 0x3
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_BPC_MASK GENMASK(2, 0)
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_FMT_RGB 0x0
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_FMT_YUV444 0x1
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_FMT_YUV422 0x2
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_FMT_YONLY 0x3
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_FMT_MASK GENMASK(5, 4)
#define ZYNQMP_DISP_AV_BUF_LIVE_CONFIG_CB_FIRST BIT(8)
#define ZYNQMP_DISP_AV_BUF_PALETTE_MEMORY 0x400
/* Audio registers */
#define ZYNQMP_DISP_AUD_MIXER_VOLUME 0x0
#define ZYNQMP_DISP_AUD_MIXER_VOLUME_NO_SCALE 0x20002000
#define ZYNQMP_DISP_AUD_MIXER_META_DATA 0x4
#define ZYNQMP_DISP_AUD_CH_STATUS0 0x8
#define ZYNQMP_DISP_AUD_CH_STATUS1 0xc
#define ZYNQMP_DISP_AUD_CH_STATUS2 0x10
#define ZYNQMP_DISP_AUD_CH_STATUS3 0x14
#define ZYNQMP_DISP_AUD_CH_STATUS4 0x18
#define ZYNQMP_DISP_AUD_CH_STATUS5 0x1c
#define ZYNQMP_DISP_AUD_CH_A_DATA0 0x20
#define ZYNQMP_DISP_AUD_CH_A_DATA1 0x24
#define ZYNQMP_DISP_AUD_CH_A_DATA2 0x28
#define ZYNQMP_DISP_AUD_CH_A_DATA3 0x2c
#define ZYNQMP_DISP_AUD_CH_A_DATA4 0x30
#define ZYNQMP_DISP_AUD_CH_A_DATA5 0x34
#define ZYNQMP_DISP_AUD_CH_B_DATA0 0x38
#define ZYNQMP_DISP_AUD_CH_B_DATA1 0x3c
#define ZYNQMP_DISP_AUD_CH_B_DATA2 0x40
#define ZYNQMP_DISP_AUD_CH_B_DATA3 0x44
#define ZYNQMP_DISP_AUD_CH_B_DATA4 0x48
#define ZYNQMP_DISP_AUD_CH_B_DATA5 0x4c
#define ZYNQMP_DISP_AUD_SOFT_RESET 0xc00
#define ZYNQMP_DISP_AUD_SOFT_RESET_AUD_SRST BIT(0)
#endif /* _ZYNQMP_DISP_REGS_H_ */
// SPDX-License-Identifier: GPL-2.0
/*
* ZynqMP DisplayPort Driver
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#include <drm/drm_atomic_helper.h>
#include <drm/drm_connector.h>
#include <drm/drm_crtc.h>
#include <drm/drm_device.h>
#include <drm/drm_dp_helper.h>
#include <drm/drm_edid.h>
#include <drm/drm_encoder.h>
#include <drm/drm_managed.h>
#include <drm/drm_modes.h>
#include <drm/drm_of.h>
#include <drm/drm_probe_helper.h>
#include <drm/drm_simple_kms_helper.h>
#include <linux/clk.h>
#include <linux/delay.h>
#include <linux/device.h>
#include <linux/module.h>
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <linux/phy/phy.h>
#include <linux/reset.h>
#include "zynqmp_disp.h"
#include "zynqmp_dp.h"
#include "zynqmp_dpsub.h"
static uint zynqmp_dp_aux_timeout_ms = 50;
module_param_named(aux_timeout_ms, zynqmp_dp_aux_timeout_ms, uint, 0444);
MODULE_PARM_DESC(aux_timeout_ms, "DP aux timeout value in msec (default: 50)");
/*
* Some sink requires a delay after power on request
*/
static uint zynqmp_dp_power_on_delay_ms = 4;
module_param_named(power_on_delay_ms, zynqmp_dp_power_on_delay_ms, uint, 0444);
MODULE_PARM_DESC(aux_timeout_ms, "DP power on delay in msec (default: 4)");
/* Link configuration registers */
#define ZYNQMP_DP_LINK_BW_SET 0x0
#define ZYNQMP_DP_LANE_COUNT_SET 0x4
#define ZYNQMP_DP_ENHANCED_FRAME_EN 0x8
#define ZYNQMP_DP_TRAINING_PATTERN_SET 0xc
#define ZYNQMP_DP_SCRAMBLING_DISABLE 0x14
#define ZYNQMP_DP_DOWNSPREAD_CTL 0x18
#define ZYNQMP_DP_SOFTWARE_RESET 0x1c
#define ZYNQMP_DP_SOFTWARE_RESET_STREAM1 BIT(0)
#define ZYNQMP_DP_SOFTWARE_RESET_STREAM2 BIT(1)
#define ZYNQMP_DP_SOFTWARE_RESET_STREAM3 BIT(2)
#define ZYNQMP_DP_SOFTWARE_RESET_STREAM4 BIT(3)
#define ZYNQMP_DP_SOFTWARE_RESET_AUX BIT(7)
#define ZYNQMP_DP_SOFTWARE_RESET_ALL (ZYNQMP_DP_SOFTWARE_RESET_STREAM1 | \
ZYNQMP_DP_SOFTWARE_RESET_STREAM2 | \
ZYNQMP_DP_SOFTWARE_RESET_STREAM3 | \
ZYNQMP_DP_SOFTWARE_RESET_STREAM4 | \
ZYNQMP_DP_SOFTWARE_RESET_AUX)
/* Core enable registers */
#define ZYNQMP_DP_TRANSMITTER_ENABLE 0x80
#define ZYNQMP_DP_MAIN_STREAM_ENABLE 0x84
#define ZYNQMP_DP_FORCE_SCRAMBLER_RESET 0xc0
#define ZYNQMP_DP_VERSION 0xf8
#define ZYNQMP_DP_VERSION_MAJOR_MASK GENMASK(31, 24)
#define ZYNQMP_DP_VERSION_MAJOR_SHIFT 24
#define ZYNQMP_DP_VERSION_MINOR_MASK GENMASK(23, 16)
#define ZYNQMP_DP_VERSION_MINOR_SHIFT 16
#define ZYNQMP_DP_VERSION_REVISION_MASK GENMASK(15, 12)
#define ZYNQMP_DP_VERSION_REVISION_SHIFT 12
#define ZYNQMP_DP_VERSION_PATCH_MASK GENMASK(11, 8)
#define ZYNQMP_DP_VERSION_PATCH_SHIFT 8
#define ZYNQMP_DP_VERSION_INTERNAL_MASK GENMASK(7, 0)
#define ZYNQMP_DP_VERSION_INTERNAL_SHIFT 0
/* Core ID registers */
#define ZYNQMP_DP_CORE_ID 0xfc
#define ZYNQMP_DP_CORE_ID_MAJOR_MASK GENMASK(31, 24)
#define ZYNQMP_DP_CORE_ID_MAJOR_SHIFT 24
#define ZYNQMP_DP_CORE_ID_MINOR_MASK GENMASK(23, 16)
#define ZYNQMP_DP_CORE_ID_MINOR_SHIFT 16
#define ZYNQMP_DP_CORE_ID_REVISION_MASK GENMASK(15, 8)
#define ZYNQMP_DP_CORE_ID_REVISION_SHIFT 8
#define ZYNQMP_DP_CORE_ID_DIRECTION GENMASK(1)
/* AUX channel interface registers */
#define ZYNQMP_DP_AUX_COMMAND 0x100
#define ZYNQMP_DP_AUX_COMMAND_CMD_SHIFT 8
#define ZYNQMP_DP_AUX_COMMAND_ADDRESS_ONLY BIT(12)
#define ZYNQMP_DP_AUX_COMMAND_BYTES_SHIFT 0
#define ZYNQMP_DP_AUX_WRITE_FIFO 0x104
#define ZYNQMP_DP_AUX_ADDRESS 0x108
#define ZYNQMP_DP_AUX_CLK_DIVIDER 0x10c
#define ZYNQMP_DP_AUX_CLK_DIVIDER_AUX_FILTER_SHIFT 8
#define ZYNQMP_DP_INTERRUPT_SIGNAL_STATE 0x130
#define ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_HPD BIT(0)
#define ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_REQUEST BIT(1)
#define ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_REPLY BIT(2)
#define ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_REPLY_TIMEOUT BIT(3)
#define ZYNQMP_DP_AUX_REPLY_DATA 0x134
#define ZYNQMP_DP_AUX_REPLY_CODE 0x138
#define ZYNQMP_DP_AUX_REPLY_CODE_AUX_ACK (0)
#define ZYNQMP_DP_AUX_REPLY_CODE_AUX_NACK BIT(0)
#define ZYNQMP_DP_AUX_REPLY_CODE_AUX_DEFER BIT(1)
#define ZYNQMP_DP_AUX_REPLY_CODE_I2C_ACK (0)
#define ZYNQMP_DP_AUX_REPLY_CODE_I2C_NACK BIT(2)
#define ZYNQMP_DP_AUX_REPLY_CODE_I2C_DEFER BIT(3)
#define ZYNQMP_DP_AUX_REPLY_COUNT 0x13c
#define ZYNQMP_DP_REPLY_DATA_COUNT 0x148
#define ZYNQMP_DP_REPLY_DATA_COUNT_MASK 0xff
#define ZYNQMP_DP_INT_STATUS 0x3a0
#define ZYNQMP_DP_INT_MASK 0x3a4
#define ZYNQMP_DP_INT_EN 0x3a8
#define ZYNQMP_DP_INT_DS 0x3ac
#define ZYNQMP_DP_INT_HPD_IRQ BIT(0)
#define ZYNQMP_DP_INT_HPD_EVENT BIT(1)
#define ZYNQMP_DP_INT_REPLY_RECEIVED BIT(2)
#define ZYNQMP_DP_INT_REPLY_TIMEOUT BIT(3)
#define ZYNQMP_DP_INT_HPD_PULSE_DET BIT(4)
#define ZYNQMP_DP_INT_EXT_PKT_TXD BIT(5)
#define ZYNQMP_DP_INT_LIV_ABUF_UNDRFLW BIT(12)
#define ZYNQMP_DP_INT_VBLANK_START BIT(13)
#define ZYNQMP_DP_INT_PIXEL1_MATCH BIT(14)
#define ZYNQMP_DP_INT_PIXEL0_MATCH BIT(15)
#define ZYNQMP_DP_INT_CHBUF_UNDERFLW_MASK 0x3f0000
#define ZYNQMP_DP_INT_CHBUF_OVERFLW_MASK 0xfc00000
#define ZYNQMP_DP_INT_CUST_TS_2 BIT(28)
#define ZYNQMP_DP_INT_CUST_TS BIT(29)
#define ZYNQMP_DP_INT_EXT_VSYNC_TS BIT(30)
#define ZYNQMP_DP_INT_VSYNC_TS BIT(31)
#define ZYNQMP_DP_INT_ALL (ZYNQMP_DP_INT_HPD_IRQ | \
ZYNQMP_DP_INT_HPD_EVENT | \
ZYNQMP_DP_INT_CHBUF_UNDERFLW_MASK | \
ZYNQMP_DP_INT_CHBUF_OVERFLW_MASK)
/* Main stream attribute registers */
#define ZYNQMP_DP_MAIN_STREAM_HTOTAL 0x180
#define ZYNQMP_DP_MAIN_STREAM_VTOTAL 0x184
#define ZYNQMP_DP_MAIN_STREAM_POLARITY 0x188
#define ZYNQMP_DP_MAIN_STREAM_POLARITY_HSYNC_SHIFT 0
#define ZYNQMP_DP_MAIN_STREAM_POLARITY_VSYNC_SHIFT 1
#define ZYNQMP_DP_MAIN_STREAM_HSWIDTH 0x18c
#define ZYNQMP_DP_MAIN_STREAM_VSWIDTH 0x190
#define ZYNQMP_DP_MAIN_STREAM_HRES 0x194
#define ZYNQMP_DP_MAIN_STREAM_VRES 0x198
#define ZYNQMP_DP_MAIN_STREAM_HSTART 0x19c
#define ZYNQMP_DP_MAIN_STREAM_VSTART 0x1a0
#define ZYNQMP_DP_MAIN_STREAM_MISC0 0x1a4
#define ZYNQMP_DP_MAIN_STREAM_MISC0_SYNC_LOCK BIT(0)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_RGB (0 << 1)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_YCRCB_422 (5 << 1)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_YCRCB_444 (6 << 1)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_MASK (7 << 1)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_DYNAMIC_RANGE BIT(3)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_YCBCR_COLR BIT(4)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_6 (0 << 5)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_8 (1 << 5)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_10 (2 << 5)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_12 (3 << 5)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_16 (4 << 5)
#define ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_MASK (7 << 5)
#define ZYNQMP_DP_MAIN_STREAM_MISC1 0x1a8
#define ZYNQMP_DP_MAIN_STREAM_MISC1_Y_ONLY_EN BIT(7)
#define ZYNQMP_DP_MAIN_STREAM_M_VID 0x1ac
#define ZYNQMP_DP_MSA_TRANSFER_UNIT_SIZE 0x1b0
#define ZYNQMP_DP_MSA_TRANSFER_UNIT_SIZE_TU_SIZE_DEF 64
#define ZYNQMP_DP_MAIN_STREAM_N_VID 0x1b4
#define ZYNQMP_DP_USER_PIX_WIDTH 0x1b8
#define ZYNQMP_DP_USER_DATA_COUNT_PER_LANE 0x1bc
#define ZYNQMP_DP_MIN_BYTES_PER_TU 0x1c4
#define ZYNQMP_DP_FRAC_BYTES_PER_TU 0x1c8
#define ZYNQMP_DP_INIT_WAIT 0x1cc
/* PHY configuration and status registers */
#define ZYNQMP_DP_PHY_RESET 0x200
#define ZYNQMP_DP_PHY_RESET_PHY_RESET BIT(0)
#define ZYNQMP_DP_PHY_RESET_GTTX_RESET BIT(1)
#define ZYNQMP_DP_PHY_RESET_PHY_PMA_RESET BIT(8)
#define ZYNQMP_DP_PHY_RESET_PHY_PCS_RESET BIT(9)
#define ZYNQMP_DP_PHY_RESET_ALL_RESET (ZYNQMP_DP_PHY_RESET_PHY_RESET | \
ZYNQMP_DP_PHY_RESET_GTTX_RESET | \
ZYNQMP_DP_PHY_RESET_PHY_PMA_RESET | \
ZYNQMP_DP_PHY_RESET_PHY_PCS_RESET)
#define ZYNQMP_DP_PHY_PREEMPHASIS_LANE_0 0x210
#define ZYNQMP_DP_PHY_PREEMPHASIS_LANE_1 0x214
#define ZYNQMP_DP_PHY_PREEMPHASIS_LANE_2 0x218
#define ZYNQMP_DP_PHY_PREEMPHASIS_LANE_3 0x21c
#define ZYNQMP_DP_PHY_VOLTAGE_DIFF_LANE_0 0x220
#define ZYNQMP_DP_PHY_VOLTAGE_DIFF_LANE_1 0x224
#define ZYNQMP_DP_PHY_VOLTAGE_DIFF_LANE_2 0x228
#define ZYNQMP_DP_PHY_VOLTAGE_DIFF_LANE_3 0x22c
#define ZYNQMP_DP_PHY_CLOCK_SELECT 0x234
#define ZYNQMP_DP_PHY_CLOCK_SELECT_1_62G 0x1
#define ZYNQMP_DP_PHY_CLOCK_SELECT_2_70G 0x3
#define ZYNQMP_DP_PHY_CLOCK_SELECT_5_40G 0x5
#define ZYNQMP_DP_TX_PHY_POWER_DOWN 0x238
#define ZYNQMP_DP_TX_PHY_POWER_DOWN_LANE_0 BIT(0)
#define ZYNQMP_DP_TX_PHY_POWER_DOWN_LANE_1 BIT(1)
#define ZYNQMP_DP_TX_PHY_POWER_DOWN_LANE_2 BIT(2)
#define ZYNQMP_DP_TX_PHY_POWER_DOWN_LANE_3 BIT(3)
#define ZYNQMP_DP_TX_PHY_POWER_DOWN_ALL 0xf
#define ZYNQMP_DP_PHY_PRECURSOR_LANE_0 0x23c
#define ZYNQMP_DP_PHY_PRECURSOR_LANE_1 0x240
#define ZYNQMP_DP_PHY_PRECURSOR_LANE_2 0x244
#define ZYNQMP_DP_PHY_PRECURSOR_LANE_3 0x248
#define ZYNQMP_DP_PHY_POSTCURSOR_LANE_0 0x24c
#define ZYNQMP_DP_PHY_POSTCURSOR_LANE_1 0x250
#define ZYNQMP_DP_PHY_POSTCURSOR_LANE_2 0x254
#define ZYNQMP_DP_PHY_POSTCURSOR_LANE_3 0x258
#define ZYNQMP_DP_SUB_TX_PHY_PRECURSOR_LANE_0 0x24c
#define ZYNQMP_DP_SUB_TX_PHY_PRECURSOR_LANE_1 0x250
#define ZYNQMP_DP_PHY_STATUS 0x280
#define ZYNQMP_DP_PHY_STATUS_PLL_LOCKED_SHIFT 4
#define ZYNQMP_DP_PHY_STATUS_FPGA_PLL_LOCKED BIT(6)
/* Audio registers */
#define ZYNQMP_DP_TX_AUDIO_CONTROL 0x300
#define ZYNQMP_DP_TX_AUDIO_CHANNELS 0x304
#define ZYNQMP_DP_TX_AUDIO_INFO_DATA 0x308
#define ZYNQMP_DP_TX_M_AUD 0x328
#define ZYNQMP_DP_TX_N_AUD 0x32c
#define ZYNQMP_DP_TX_AUDIO_EXT_DATA 0x330
#define ZYNQMP_DP_MAX_LANES 2
#define ZYNQMP_MAX_FREQ 3000000
#define DP_REDUCED_BIT_RATE 162000
#define DP_HIGH_BIT_RATE 270000
#define DP_HIGH_BIT_RATE2 540000
#define DP_MAX_TRAINING_TRIES 5
#define DP_V1_2 0x12
/**
* struct zynqmp_dp_link_config - Common link config between source and sink
* @max_rate: maximum link rate
* @max_lanes: maximum number of lanes
*/
struct zynqmp_dp_link_config {
int max_rate;
u8 max_lanes;
};
/**
* struct zynqmp_dp_mode - Configured mode of DisplayPort
* @bw_code: code for bandwidth(link rate)
* @lane_cnt: number of lanes
* @pclock: pixel clock frequency of current mode
* @fmt: format identifier string
*/
struct zynqmp_dp_mode {
u8 bw_code;
u8 lane_cnt;
int pclock;
const char *fmt;
};
/**
* struct zynqmp_dp_config - Configuration of DisplayPort from DTS
* @misc0: misc0 configuration (per DP v1.2 spec)
* @misc1: misc1 configuration (per DP v1.2 spec)
* @bpp: bits per pixel
*/
struct zynqmp_dp_config {
u8 misc0;
u8 misc1;
u8 bpp;
};
/**
* struct zynqmp_dp - Xilinx DisplayPort core
* @encoder: the drm encoder structure
* @connector: the drm connector structure
* @dev: device structure
* @dpsub: Display subsystem
* @drm: DRM core
* @iomem: device I/O memory for register access
* @reset: reset controller
* @irq: irq
* @config: IP core configuration from DTS
* @aux: aux channel
* @phy: PHY handles for DP lanes
* @num_lanes: number of enabled phy lanes
* @hpd_work: hot plug detection worker
* @status: connection status
* @enabled: flag to indicate if the device is enabled
* @dpcd: DP configuration data from currently connected sink device
* @link_config: common link configuration between IP core and sink device
* @mode: current mode between IP core and sink device
* @train_set: set of training data
*/
struct zynqmp_dp {
struct drm_encoder encoder;
struct drm_connector connector;
struct device *dev;
struct zynqmp_dpsub *dpsub;
struct drm_device *drm;
void __iomem *iomem;
struct reset_control *reset;
int irq;
struct zynqmp_dp_config config;
struct drm_dp_aux aux;
struct phy *phy[ZYNQMP_DP_MAX_LANES];
u8 num_lanes;
struct delayed_work hpd_work;
enum drm_connector_status status;
bool enabled;
u8 dpcd[DP_RECEIVER_CAP_SIZE];
struct zynqmp_dp_link_config link_config;
struct zynqmp_dp_mode mode;
u8 train_set[ZYNQMP_DP_MAX_LANES];
};
static inline struct zynqmp_dp *encoder_to_dp(struct drm_encoder *encoder)
{
return container_of(encoder, struct zynqmp_dp, encoder);
}
static inline struct zynqmp_dp *connector_to_dp(struct drm_connector *connector)
{
return container_of(connector, struct zynqmp_dp, connector);
}
static void zynqmp_dp_write(struct zynqmp_dp *dp, int offset, u32 val)
{
writel(val, dp->iomem + offset);
}
static u32 zynqmp_dp_read(struct zynqmp_dp *dp, int offset)
{
return readl(dp->iomem + offset);
}
static void zynqmp_dp_clr(struct zynqmp_dp *dp, int offset, u32 clr)
{
zynqmp_dp_write(dp, offset, zynqmp_dp_read(dp, offset) & ~clr);
}
static void zynqmp_dp_set(struct zynqmp_dp *dp, int offset, u32 set)
{
zynqmp_dp_write(dp, offset, zynqmp_dp_read(dp, offset) | set);
}
/* -----------------------------------------------------------------------------
* PHY Handling
*/
#define RST_TIMEOUT_MS 1000
static int zynqmp_dp_reset(struct zynqmp_dp *dp, bool assert)
{
unsigned long timeout;
if (assert)
reset_control_assert(dp->reset);
else
reset_control_deassert(dp->reset);
/* Wait for the (de)assert to complete. */
timeout = jiffies + msecs_to_jiffies(RST_TIMEOUT_MS);
while (!time_after_eq(jiffies, timeout)) {
bool status = !!reset_control_status(dp->reset);
if (assert == status)
return 0;
cpu_relax();
}
dev_err(dp->dev, "reset %s timeout\n", assert ? "assert" : "deassert");
return -ETIMEDOUT;
}
/**
* zynqmp_dp_phy_init - Initialize the phy
* @dp: DisplayPort IP core structure
*
* Initialize the phy.
*
* Return: 0 if the phy instances are initialized correctly, or the error code
* returned from the callee functions.
*/
static int zynqmp_dp_phy_init(struct zynqmp_dp *dp)
{
int ret;
int i;
for (i = 0; i < dp->num_lanes; i++) {
ret = phy_init(dp->phy[i]);
if (ret) {
dev_err(dp->dev, "failed to init phy lane %d\n", i);
return ret;
}
}
ret = zynqmp_dp_reset(dp, false);
if (ret < 0)
return ret;
zynqmp_dp_clr(dp, ZYNQMP_DP_PHY_RESET, ZYNQMP_DP_PHY_RESET_ALL_RESET);
/*
* Power on lanes in reverse order as only lane 0 waits for the PLL to
* lock.
*/
for (i = dp->num_lanes - 1; i >= 0; i--) {
ret = phy_power_on(dp->phy[i]);
if (ret) {
dev_err(dp->dev, "failed to power on phy lane %d\n", i);
return ret;
}
}
return 0;
}
/**
* zynqmp_dp_phy_exit - Exit the phy
* @dp: DisplayPort IP core structure
*
* Exit the phy.
*/
static void zynqmp_dp_phy_exit(struct zynqmp_dp *dp)
{
unsigned int i;
int ret;
for (i = 0; i < dp->num_lanes; i++) {
ret = phy_power_off(dp->phy[i]);
if (ret)
dev_err(dp->dev, "failed to power off phy(%d) %d\n", i,
ret);
}
zynqmp_dp_reset(dp, true);
for (i = 0; i < dp->num_lanes; i++) {
ret = phy_exit(dp->phy[i]);
if (ret)
dev_err(dp->dev, "failed to exit phy(%d) %d\n", i, ret);
}
}
/**
* zynqmp_dp_phy_probe - Probe the PHYs
* @dp: DisplayPort IP core structure
*
* Probe PHYs for all lanes. Less PHYs may be available than the number of
* lanes, which is not considered an error as long as at least one PHY is
* found. The caller can check dp->num_lanes to check how many PHYs were found.
*
* Return:
* * 0 - Success
* * -ENXIO - No PHY found
* * -EPROBE_DEFER - Probe deferral requested
* * Other negative value - PHY retrieval failure
*/
static int zynqmp_dp_phy_probe(struct zynqmp_dp *dp)
{
unsigned int i;
for (i = 0; i < ZYNQMP_DP_MAX_LANES; i++) {
char phy_name[16];
struct phy *phy;
snprintf(phy_name, sizeof(phy_name), "dp-phy%d", i);
phy = devm_phy_get(dp->dev, phy_name);
if (IS_ERR(phy)) {
switch (PTR_ERR(phy)) {
case -ENODEV:
if (dp->num_lanes)
return 0;
dev_err(dp->dev, "no PHY found\n");
return -ENXIO;
case -EPROBE_DEFER:
return -EPROBE_DEFER;
default:
dev_err(dp->dev, "failed to get PHY lane %u\n",
i);
return PTR_ERR(phy);
}
}
dp->phy[i] = phy;
dp->num_lanes++;
}
return 0;
}
/**
* zynqmp_dp_phy_ready - Check if PHY is ready
* @dp: DisplayPort IP core structure
*
* Check if PHY is ready. If PHY is not ready, wait 1ms to check for 100 times.
* This amount of delay was suggested by IP designer.
*
* Return: 0 if PHY is ready, or -ENODEV if PHY is not ready.
*/
static int zynqmp_dp_phy_ready(struct zynqmp_dp *dp)
{
u32 i, reg, ready;
ready = (1 << dp->num_lanes) - 1;
/* Wait for 100 * 1ms. This should be enough time for PHY to be ready */
for (i = 0; ; i++) {
reg = zynqmp_dp_read(dp, ZYNQMP_DP_PHY_STATUS);
if ((reg & ready) == ready)
return 0;
if (i == 100) {
dev_err(dp->dev, "PHY isn't ready\n");
return -ENODEV;
}
usleep_range(1000, 1100);
}
return 0;
}
/* -----------------------------------------------------------------------------
* DisplayPort Link Training
*/
/**
* zynqmp_dp_max_rate - Calculate and return available max pixel clock
* @link_rate: link rate (Kilo-bytes / sec)
* @lane_num: number of lanes
* @bpp: bits per pixel
*
* Return: max pixel clock (KHz) supported by current link config.
*/
static inline int zynqmp_dp_max_rate(int link_rate, u8 lane_num, u8 bpp)
{
return link_rate * lane_num * 8 / bpp;
}
/**
* zynqmp_dp_mode_configure - Configure the link values
* @dp: DisplayPort IP core structure
* @pclock: pixel clock for requested display mode
* @current_bw: current link rate
*
* Find the link configuration values, rate and lane count for requested pixel
* clock @pclock. The @pclock is stored in the mode to be used in other
* functions later. The returned rate is downshifted from the current rate
* @current_bw.
*
* Return: Current link rate code, or -EINVAL.
*/
static int zynqmp_dp_mode_configure(struct zynqmp_dp *dp, int pclock,
u8 current_bw)
{
int max_rate = dp->link_config.max_rate;
u8 bws[3] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7, DP_LINK_BW_5_4 };
u8 max_lanes = dp->link_config.max_lanes;
u8 max_link_rate_code = drm_dp_link_rate_to_bw_code(max_rate);
u8 bpp = dp->config.bpp;
u8 lane_cnt;
s8 i;
if (current_bw == DP_LINK_BW_1_62) {
dev_err(dp->dev, "can't downshift. already lowest link rate\n");
return -EINVAL;
}
for (i = ARRAY_SIZE(bws) - 1; i >= 0; i--) {
if (current_bw && bws[i] >= current_bw)
continue;
if (bws[i] <= max_link_rate_code)
break;
}
for (lane_cnt = 1; lane_cnt <= max_lanes; lane_cnt <<= 1) {
int bw;
u32 rate;
bw = drm_dp_bw_code_to_link_rate(bws[i]);
rate = zynqmp_dp_max_rate(bw, lane_cnt, bpp);
if (pclock <= rate) {
dp->mode.bw_code = bws[i];
dp->mode.lane_cnt = lane_cnt;
dp->mode.pclock = pclock;
return dp->mode.bw_code;
}
}
dev_err(dp->dev, "failed to configure link values\n");
return -EINVAL;
}
/**
* zynqmp_dp_adjust_train - Adjust train values
* @dp: DisplayPort IP core structure
* @link_status: link status from sink which contains requested training values
*/
static void zynqmp_dp_adjust_train(struct zynqmp_dp *dp,
u8 link_status[DP_LINK_STATUS_SIZE])
{
u8 *train_set = dp->train_set;
u8 voltage = 0, preemphasis = 0;
u8 i;
for (i = 0; i < dp->mode.lane_cnt; i++) {
u8 v = drm_dp_get_adjust_request_voltage(link_status, i);
u8 p = drm_dp_get_adjust_request_pre_emphasis(link_status, i);
if (v > voltage)
voltage = v;
if (p > preemphasis)
preemphasis = p;
}
if (voltage >= DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
voltage |= DP_TRAIN_MAX_SWING_REACHED;
if (preemphasis >= DP_TRAIN_PRE_EMPH_LEVEL_2)
preemphasis |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
for (i = 0; i < dp->mode.lane_cnt; i++)
train_set[i] = voltage | preemphasis;
}
/**
* zynqmp_dp_update_vs_emph - Update the training values
* @dp: DisplayPort IP core structure
*
* Update the training values based on the request from sink. The mapped values
* are predefined, and values(vs, pe, pc) are from the device manual.
*
* Return: 0 if vs and emph are updated successfully, or the error code returned
* by drm_dp_dpcd_write().
*/
static int zynqmp_dp_update_vs_emph(struct zynqmp_dp *dp)
{
unsigned int i;
int ret;
ret = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, dp->train_set,
dp->mode.lane_cnt);
if (ret < 0)
return ret;
for (i = 0; i < dp->mode.lane_cnt; i++) {
u32 reg = ZYNQMP_DP_SUB_TX_PHY_PRECURSOR_LANE_0 + i * 4;
union phy_configure_opts opts = { 0 };
u8 train = dp->train_set[i];
opts.dp.voltage[0] = (train & DP_TRAIN_VOLTAGE_SWING_MASK)
>> DP_TRAIN_VOLTAGE_SWING_SHIFT;
opts.dp.pre[0] = (train & DP_TRAIN_PRE_EMPHASIS_MASK)
>> DP_TRAIN_PRE_EMPHASIS_SHIFT;
phy_configure(dp->phy[i], &opts);
zynqmp_dp_write(dp, reg, 0x2);
}
return 0;
}
/**
* zynqmp_dp_link_train_cr - Train clock recovery
* @dp: DisplayPort IP core structure
*
* Return: 0 if clock recovery train is done successfully, or corresponding
* error code.
*/
static int zynqmp_dp_link_train_cr(struct zynqmp_dp *dp)
{
u8 link_status[DP_LINK_STATUS_SIZE];
u8 lane_cnt = dp->mode.lane_cnt;
u8 vs = 0, tries = 0;
u16 max_tries, i;
bool cr_done;
int ret;
zynqmp_dp_write(dp, ZYNQMP_DP_TRAINING_PATTERN_SET,
DP_TRAINING_PATTERN_1);
ret = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET,
DP_TRAINING_PATTERN_1 |
DP_LINK_SCRAMBLING_DISABLE);
if (ret < 0)
return ret;
/*
* 256 loops should be maximum iterations for 4 lanes and 4 values.
* So, This loop should exit before 512 iterations
*/
for (max_tries = 0; max_tries < 512; max_tries++) {
ret = zynqmp_dp_update_vs_emph(dp);
if (ret)
return ret;
drm_dp_link_train_clock_recovery_delay(dp->dpcd);
ret = drm_dp_dpcd_read_link_status(&dp->aux, link_status);
if (ret < 0)
return ret;
cr_done = drm_dp_clock_recovery_ok(link_status, lane_cnt);
if (cr_done)
break;
for (i = 0; i < lane_cnt; i++)
if (!(dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED))
break;
if (i == lane_cnt)
break;
if ((dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == vs)
tries++;
else
tries = 0;
if (tries == DP_MAX_TRAINING_TRIES)
break;
vs = dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
zynqmp_dp_adjust_train(dp, link_status);
}
if (!cr_done)
return -EIO;
return 0;
}
/**
* zynqmp_dp_link_train_ce - Train channel equalization
* @dp: DisplayPort IP core structure
*
* Return: 0 if channel equalization train is done successfully, or
* corresponding error code.
*/
static int zynqmp_dp_link_train_ce(struct zynqmp_dp *dp)
{
u8 link_status[DP_LINK_STATUS_SIZE];
u8 lane_cnt = dp->mode.lane_cnt;
u32 pat, tries;
int ret;
bool ce_done;
if (dp->dpcd[DP_DPCD_REV] >= DP_V1_2 &&
dp->dpcd[DP_MAX_LANE_COUNT] & DP_TPS3_SUPPORTED)
pat = DP_TRAINING_PATTERN_3;
else
pat = DP_TRAINING_PATTERN_2;
zynqmp_dp_write(dp, ZYNQMP_DP_TRAINING_PATTERN_SET, pat);
ret = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET,
pat | DP_LINK_SCRAMBLING_DISABLE);
if (ret < 0)
return ret;
for (tries = 0; tries < DP_MAX_TRAINING_TRIES; tries++) {
ret = zynqmp_dp_update_vs_emph(dp);
if (ret)
return ret;
drm_dp_link_train_channel_eq_delay(dp->dpcd);
ret = drm_dp_dpcd_read_link_status(&dp->aux, link_status);
if (ret < 0)
return ret;
ce_done = drm_dp_channel_eq_ok(link_status, lane_cnt);
if (ce_done)
break;
zynqmp_dp_adjust_train(dp, link_status);
}
if (!ce_done)
return -EIO;
return 0;
}
/**
* zynqmp_dp_link_train - Train the link
* @dp: DisplayPort IP core structure
*
* Return: 0 if all trains are done successfully, or corresponding error code.
*/
static int zynqmp_dp_train(struct zynqmp_dp *dp)
{
u32 reg;
u8 bw_code = dp->mode.bw_code;
u8 lane_cnt = dp->mode.lane_cnt;
u8 aux_lane_cnt = lane_cnt;
bool enhanced;
int ret;
zynqmp_dp_write(dp, ZYNQMP_DP_LANE_COUNT_SET, lane_cnt);
enhanced = drm_dp_enhanced_frame_cap(dp->dpcd);
if (enhanced) {
zynqmp_dp_write(dp, ZYNQMP_DP_ENHANCED_FRAME_EN, 1);
aux_lane_cnt |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
}
if (dp->dpcd[3] & 0x1) {
zynqmp_dp_write(dp, ZYNQMP_DP_DOWNSPREAD_CTL, 1);
drm_dp_dpcd_writeb(&dp->aux, DP_DOWNSPREAD_CTRL,
DP_SPREAD_AMP_0_5);
} else {
zynqmp_dp_write(dp, ZYNQMP_DP_DOWNSPREAD_CTL, 0);
drm_dp_dpcd_writeb(&dp->aux, DP_DOWNSPREAD_CTRL, 0);
}
ret = drm_dp_dpcd_writeb(&dp->aux, DP_LANE_COUNT_SET, aux_lane_cnt);
if (ret < 0) {
dev_err(dp->dev, "failed to set lane count\n");
return ret;
}
ret = drm_dp_dpcd_writeb(&dp->aux, DP_MAIN_LINK_CHANNEL_CODING_SET,
DP_SET_ANSI_8B10B);
if (ret < 0) {
dev_err(dp->dev, "failed to set ANSI 8B/10B encoding\n");
return ret;
}
ret = drm_dp_dpcd_writeb(&dp->aux, DP_LINK_BW_SET, bw_code);
if (ret < 0) {
dev_err(dp->dev, "failed to set DP bandwidth\n");
return ret;
}
zynqmp_dp_write(dp, ZYNQMP_DP_LINK_BW_SET, bw_code);
switch (bw_code) {
case DP_LINK_BW_1_62:
reg = ZYNQMP_DP_PHY_CLOCK_SELECT_1_62G;
break;
case DP_LINK_BW_2_7:
reg = ZYNQMP_DP_PHY_CLOCK_SELECT_2_70G;
break;
case DP_LINK_BW_5_4:
default:
reg = ZYNQMP_DP_PHY_CLOCK_SELECT_5_40G;
break;
}
zynqmp_dp_write(dp, ZYNQMP_DP_PHY_CLOCK_SELECT, reg);
ret = zynqmp_dp_phy_ready(dp);
if (ret < 0)
return ret;
zynqmp_dp_write(dp, ZYNQMP_DP_SCRAMBLING_DISABLE, 1);
memset(dp->train_set, 0, 4);
ret = zynqmp_dp_link_train_cr(dp);
if (ret)
return ret;
ret = zynqmp_dp_link_train_ce(dp);
if (ret)
return ret;
ret = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET,
DP_TRAINING_PATTERN_DISABLE);
if (ret < 0) {
dev_err(dp->dev, "failed to disable training pattern\n");
return ret;
}
zynqmp_dp_write(dp, ZYNQMP_DP_TRAINING_PATTERN_SET,
DP_TRAINING_PATTERN_DISABLE);
zynqmp_dp_write(dp, ZYNQMP_DP_SCRAMBLING_DISABLE, 0);
return 0;
}
/**
* zynqmp_dp_train_loop - Downshift the link rate during training
* @dp: DisplayPort IP core structure
*
* Train the link by downshifting the link rate if training is not successful.
*/
static void zynqmp_dp_train_loop(struct zynqmp_dp *dp)
{
struct zynqmp_dp_mode *mode = &dp->mode;
u8 bw = mode->bw_code;
int ret;
do {
if (dp->status == connector_status_disconnected ||
!dp->enabled)
return;
ret = zynqmp_dp_train(dp);
if (!ret)
return;
ret = zynqmp_dp_mode_configure(dp, mode->pclock, bw);
if (ret < 0)
goto err_out;
bw = ret;
} while (bw >= DP_LINK_BW_1_62);
err_out:
dev_err(dp->dev, "failed to train the DP link\n");
}
/* -----------------------------------------------------------------------------
* DisplayPort AUX
*/
#define AUX_READ_BIT 0x1
/**
* zynqmp_dp_aux_cmd_submit - Submit aux command
* @dp: DisplayPort IP core structure
* @cmd: aux command
* @addr: aux address
* @buf: buffer for command data
* @bytes: number of bytes for @buf
* @reply: reply code to be returned
*
* Submit an aux command. All aux related commands, native or i2c aux
* read/write, are submitted through this function. The function is mapped to
* the transfer function of struct drm_dp_aux. This function involves in
* multiple register reads/writes, thus synchronization is needed, and it is
* done by drm_dp_helper using @hw_mutex. The calling thread goes into sleep
* if there's no immediate reply to the command submission. The reply code is
* returned at @reply if @reply != NULL.
*
* Return: 0 if the command is submitted properly, or corresponding error code:
* -EBUSY when there is any request already being processed
* -ETIMEDOUT when receiving reply is timed out
* -EIO when received bytes are less than requested
*/
static int zynqmp_dp_aux_cmd_submit(struct zynqmp_dp *dp, u32 cmd, u16 addr,
u8 *buf, u8 bytes, u8 *reply)
{
bool is_read = (cmd & AUX_READ_BIT) ? true : false;
u32 reg, i;
reg = zynqmp_dp_read(dp, ZYNQMP_DP_INTERRUPT_SIGNAL_STATE);
if (reg & ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_REQUEST)
return -EBUSY;
zynqmp_dp_write(dp, ZYNQMP_DP_AUX_ADDRESS, addr);
if (!is_read)
for (i = 0; i < bytes; i++)
zynqmp_dp_write(dp, ZYNQMP_DP_AUX_WRITE_FIFO,
buf[i]);
reg = cmd << ZYNQMP_DP_AUX_COMMAND_CMD_SHIFT;
if (!buf || !bytes)
reg |= ZYNQMP_DP_AUX_COMMAND_ADDRESS_ONLY;
else
reg |= (bytes - 1) << ZYNQMP_DP_AUX_COMMAND_BYTES_SHIFT;
zynqmp_dp_write(dp, ZYNQMP_DP_AUX_COMMAND, reg);
/* Wait for reply to be delivered upto 2ms */
for (i = 0; ; i++) {
reg = zynqmp_dp_read(dp, ZYNQMP_DP_INTERRUPT_SIGNAL_STATE);
if (reg & ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_REPLY)
break;
if (reg & ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_REPLY_TIMEOUT ||
i == 2)
return -ETIMEDOUT;
usleep_range(1000, 1100);
}
reg = zynqmp_dp_read(dp, ZYNQMP_DP_AUX_REPLY_CODE);
if (reply)
*reply = reg;
if (is_read &&
(reg == ZYNQMP_DP_AUX_REPLY_CODE_AUX_ACK ||
reg == ZYNQMP_DP_AUX_REPLY_CODE_I2C_ACK)) {
reg = zynqmp_dp_read(dp, ZYNQMP_DP_REPLY_DATA_COUNT);
if ((reg & ZYNQMP_DP_REPLY_DATA_COUNT_MASK) != bytes)
return -EIO;
for (i = 0; i < bytes; i++)
buf[i] = zynqmp_dp_read(dp, ZYNQMP_DP_AUX_REPLY_DATA);
}
return 0;
}
static ssize_t
zynqmp_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
{
struct zynqmp_dp *dp = container_of(aux, struct zynqmp_dp, aux);
int ret;
unsigned int i, iter;
/* Number of loops = timeout in msec / aux delay (400 usec) */
iter = zynqmp_dp_aux_timeout_ms * 1000 / 400;
iter = iter ? iter : 1;
for (i = 0; i < iter; i++) {
ret = zynqmp_dp_aux_cmd_submit(dp, msg->request, msg->address,
msg->buffer, msg->size,
&msg->reply);
if (!ret) {
dev_dbg(dp->dev, "aux %d retries\n", i);
return msg->size;
}
if (dp->status == connector_status_disconnected) {
dev_dbg(dp->dev, "no connected aux device\n");
return -ENODEV;
}
usleep_range(400, 500);
}
dev_dbg(dp->dev, "failed to do aux transfer (%d)\n", ret);
return ret;
}
/**
* zynqmp_dp_aux_init - Initialize and register the DP AUX
* @dp: DisplayPort IP core structure
*
* Program the AUX clock divider and filter and register the DP AUX adapter.
*
* Return: 0 on success, error value otherwise
*/
static int zynqmp_dp_aux_init(struct zynqmp_dp *dp)
{
unsigned long rate;
unsigned int w;
/*
* The AUX_SIGNAL_WIDTH_FILTER is the number of APB clock cycles
* corresponding to the AUX pulse. Allowable values are 8, 16, 24, 32,
* 40 and 48. The AUX pulse width must be between 0.4µs and 0.6µs,
* compute the w / 8 value corresponding to 0.4µs rounded up, and make
* sure it stays below 0.6µs and within the allowable values.
*/
rate = clk_get_rate(dp->dpsub->apb_clk);
w = DIV_ROUND_UP(4 * rate, 1000 * 1000 * 10 * 8) * 8;
if (w > 6 * rate / (1000 * 1000 * 10) || w > 48) {
dev_err(dp->dev, "aclk frequency too high\n");
return -EINVAL;
}
zynqmp_dp_write(dp, ZYNQMP_DP_AUX_CLK_DIVIDER,
(w << ZYNQMP_DP_AUX_CLK_DIVIDER_AUX_FILTER_SHIFT) |
(rate / (1000 * 1000)));
dp->aux.name = "ZynqMP DP AUX";
dp->aux.dev = dp->dev;
dp->aux.transfer = zynqmp_dp_aux_transfer;
return drm_dp_aux_register(&dp->aux);
}
/**
* zynqmp_dp_aux_cleanup - Cleanup the DP AUX
* @dp: DisplayPort IP core structure
*
* Unregister the DP AUX adapter.
*/
static void zynqmp_dp_aux_cleanup(struct zynqmp_dp *dp)
{
drm_dp_aux_unregister(&dp->aux);
}
/* -----------------------------------------------------------------------------
* DisplayPort Generic Support
*/
/**
* zynqmp_dp_update_misc - Write the misc registers
* @dp: DisplayPort IP core structure
*
* The misc register values are stored in the structure, and this
* function applies the values into the registers.
*/
static void zynqmp_dp_update_misc(struct zynqmp_dp *dp)
{
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_MISC0, dp->config.misc0);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_MISC1, dp->config.misc1);
}
/**
* zynqmp_dp_set_format - Set the input format
* @dp: DisplayPort IP core structure
* @format: input format
* @bpc: bits per component
*
* Update misc register values based on input @format and @bpc.
*
* Return: 0 on success, or -EINVAL.
*/
static int zynqmp_dp_set_format(struct zynqmp_dp *dp,
enum zynqmp_dpsub_format format,
unsigned int bpc)
{
static const struct drm_display_info *display;
struct zynqmp_dp_config *config = &dp->config;
unsigned int num_colors;
config->misc0 &= ~ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_MASK;
config->misc1 &= ~ZYNQMP_DP_MAIN_STREAM_MISC1_Y_ONLY_EN;
switch (format) {
case ZYNQMP_DPSUB_FORMAT_RGB:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_RGB;
num_colors = 3;
break;
case ZYNQMP_DPSUB_FORMAT_YCRCB444:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_YCRCB_444;
num_colors = 3;
break;
case ZYNQMP_DPSUB_FORMAT_YCRCB422:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_COMP_FORMAT_YCRCB_422;
num_colors = 2;
break;
case ZYNQMP_DPSUB_FORMAT_YONLY:
config->misc1 |= ZYNQMP_DP_MAIN_STREAM_MISC1_Y_ONLY_EN;
num_colors = 1;
break;
default:
dev_err(dp->dev, "Invalid colormetry in DT\n");
return -EINVAL;
}
display = &dp->connector.display_info;
if (display->bpc && bpc > display->bpc) {
dev_warn(dp->dev,
"downgrading requested %ubpc to display limit %ubpc\n",
bpc, display->bpc);
bpc = display->bpc;
}
config->misc0 &= ~ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_MASK;
switch (bpc) {
case 6:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_6;
break;
case 8:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_8;
break;
case 10:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_10;
break;
case 12:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_12;
break;
case 16:
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_16;
break;
default:
dev_warn(dp->dev, "Not supported bpc (%u). fall back to 8bpc\n",
bpc);
config->misc0 |= ZYNQMP_DP_MAIN_STREAM_MISC0_BPC_8;
bpc = 8;
break;
}
/* Update the current bpp based on the format. */
config->bpp = bpc * num_colors;
return 0;
}
/**
* zynqmp_dp_encoder_mode_set_transfer_unit - Set the transfer unit values
* @dp: DisplayPort IP core structure
* @mode: requested display mode
*
* Set the transfer unit, and calculate all transfer unit size related values.
* Calculation is based on DP and IP core specification.
*/
static void
zynqmp_dp_encoder_mode_set_transfer_unit(struct zynqmp_dp *dp,
struct drm_display_mode *mode)
{
u32 tu = ZYNQMP_DP_MSA_TRANSFER_UNIT_SIZE_TU_SIZE_DEF;
u32 bw, vid_kbytes, avg_bytes_per_tu, init_wait;
/* Use the max transfer unit size (default) */
zynqmp_dp_write(dp, ZYNQMP_DP_MSA_TRANSFER_UNIT_SIZE, tu);
vid_kbytes = mode->clock * (dp->config.bpp / 8);
bw = drm_dp_bw_code_to_link_rate(dp->mode.bw_code);
avg_bytes_per_tu = vid_kbytes * tu / (dp->mode.lane_cnt * bw / 1000);
zynqmp_dp_write(dp, ZYNQMP_DP_MIN_BYTES_PER_TU,
avg_bytes_per_tu / 1000);
zynqmp_dp_write(dp, ZYNQMP_DP_FRAC_BYTES_PER_TU,
avg_bytes_per_tu % 1000);
/* Configure the initial wait cycle based on transfer unit size */
if (tu < (avg_bytes_per_tu / 1000))
init_wait = 0;
else if ((avg_bytes_per_tu / 1000) <= 4)
init_wait = tu;
else
init_wait = tu - avg_bytes_per_tu / 1000;
zynqmp_dp_write(dp, ZYNQMP_DP_INIT_WAIT, init_wait);
}
/**
* zynqmp_dp_encoder_mode_set_stream - Configure the main stream
* @dp: DisplayPort IP core structure
* @mode: requested display mode
*
* Configure the main stream based on the requested mode @mode. Calculation is
* based on IP core specification.
*/
static void zynqmp_dp_encoder_mode_set_stream(struct zynqmp_dp *dp,
const struct drm_display_mode *mode)
{
u8 lane_cnt = dp->mode.lane_cnt;
u32 reg, wpl;
unsigned int rate;
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_HTOTAL, mode->htotal);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_VTOTAL, mode->vtotal);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_POLARITY,
(!!(mode->flags & DRM_MODE_FLAG_PVSYNC) <<
ZYNQMP_DP_MAIN_STREAM_POLARITY_VSYNC_SHIFT) |
(!!(mode->flags & DRM_MODE_FLAG_PHSYNC) <<
ZYNQMP_DP_MAIN_STREAM_POLARITY_HSYNC_SHIFT));
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_HSWIDTH,
mode->hsync_end - mode->hsync_start);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_VSWIDTH,
mode->vsync_end - mode->vsync_start);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_HRES, mode->hdisplay);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_VRES, mode->vdisplay);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_HSTART,
mode->htotal - mode->hsync_start);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_VSTART,
mode->vtotal - mode->vsync_start);
/* In synchronous mode, set the diviers */
if (dp->config.misc0 & ZYNQMP_DP_MAIN_STREAM_MISC0_SYNC_LOCK) {
reg = drm_dp_bw_code_to_link_rate(dp->mode.bw_code);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_N_VID, reg);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_M_VID, mode->clock);
rate = zynqmp_disp_get_audio_clk_rate(dp->dpsub->disp);
if (rate) {
dev_dbg(dp->dev, "Audio rate: %d\n", rate / 512);
zynqmp_dp_write(dp, ZYNQMP_DP_TX_N_AUD, reg);
zynqmp_dp_write(dp, ZYNQMP_DP_TX_M_AUD, rate / 1000);
}
}
/* Only 2 channel audio is supported now */
if (zynqmp_disp_audio_enabled(dp->dpsub->disp))
zynqmp_dp_write(dp, ZYNQMP_DP_TX_AUDIO_CHANNELS, 1);
zynqmp_dp_write(dp, ZYNQMP_DP_USER_PIX_WIDTH, 1);
/* Translate to the native 16 bit datapath based on IP core spec */
wpl = (mode->hdisplay * dp->config.bpp + 15) / 16;
reg = wpl + wpl % lane_cnt - lane_cnt;
zynqmp_dp_write(dp, ZYNQMP_DP_USER_DATA_COUNT_PER_LANE, reg);
}
/* -----------------------------------------------------------------------------
* DRM Connector
*/
static enum drm_connector_status
zynqmp_dp_connector_detect(struct drm_connector *connector, bool force)
{
struct zynqmp_dp *dp = connector_to_dp(connector);
struct zynqmp_dp_link_config *link_config = &dp->link_config;
u32 state, i;
int ret;
/*
* This is from heuristic. It takes some delay (ex, 100 ~ 500 msec) to
* get the HPD signal with some monitors.
*/
for (i = 0; i < 10; i++) {
state = zynqmp_dp_read(dp, ZYNQMP_DP_INTERRUPT_SIGNAL_STATE);
if (state & ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_HPD)
break;
msleep(100);
}
if (state & ZYNQMP_DP_INTERRUPT_SIGNAL_STATE_HPD) {
ret = drm_dp_dpcd_read(&dp->aux, 0x0, dp->dpcd,
sizeof(dp->dpcd));
if (ret < 0) {
dev_dbg(dp->dev, "DPCD read failes");
goto disconnected;
}
link_config->max_rate = min_t(int,
drm_dp_max_link_rate(dp->dpcd),
DP_HIGH_BIT_RATE2);
link_config->max_lanes = min_t(u8,
drm_dp_max_lane_count(dp->dpcd),
dp->num_lanes);
dp->status = connector_status_connected;
return connector_status_connected;
}
disconnected:
dp->status = connector_status_disconnected;
return connector_status_disconnected;
}
static int zynqmp_dp_connector_get_modes(struct drm_connector *connector)
{
struct zynqmp_dp *dp = connector_to_dp(connector);
struct edid *edid;
int ret;
edid = drm_get_edid(connector, &dp->aux.ddc);
if (!edid)
return 0;
drm_connector_update_edid_property(connector, edid);
ret = drm_add_edid_modes(connector, edid);
kfree(edid);
return ret;
}
static struct drm_encoder *
zynqmp_dp_connector_best_encoder(struct drm_connector *connector)
{
struct zynqmp_dp *dp = connector_to_dp(connector);
return &dp->encoder;
}
static int zynqmp_dp_connector_mode_valid(struct drm_connector *connector,
struct drm_display_mode *mode)
{
struct zynqmp_dp *dp = connector_to_dp(connector);
u8 max_lanes = dp->link_config.max_lanes;
u8 bpp = dp->config.bpp;
int max_rate = dp->link_config.max_rate;
int rate;
if (mode->clock > ZYNQMP_MAX_FREQ) {
dev_dbg(dp->dev, "filtered the mode, %s,for high pixel rate\n",
mode->name);
drm_mode_debug_printmodeline(mode);
return MODE_CLOCK_HIGH;
}
/* Check with link rate and lane count */
rate = zynqmp_dp_max_rate(max_rate, max_lanes, bpp);
if (mode->clock > rate) {
dev_dbg(dp->dev, "filtered the mode, %s,for high pixel rate\n",
mode->name);
drm_mode_debug_printmodeline(mode);
return MODE_CLOCK_HIGH;
}
return MODE_OK;
}
static const struct drm_connector_funcs zynqmp_dp_connector_funcs = {
.detect = zynqmp_dp_connector_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
.destroy = drm_connector_cleanup,
.atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
.atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
.reset = drm_atomic_helper_connector_reset,
};
static const struct drm_connector_helper_funcs
zynqmp_dp_connector_helper_funcs = {
.get_modes = zynqmp_dp_connector_get_modes,
.best_encoder = zynqmp_dp_connector_best_encoder,
.mode_valid = zynqmp_dp_connector_mode_valid,
};
/* -----------------------------------------------------------------------------
* DRM Encoder
*/
static void zynqmp_dp_encoder_enable(struct drm_encoder *encoder)
{
struct zynqmp_dp *dp = encoder_to_dp(encoder);
unsigned int i;
int ret = 0;
pm_runtime_get_sync(dp->dev);
dp->enabled = true;
zynqmp_dp_update_misc(dp);
if (zynqmp_disp_audio_enabled(dp->dpsub->disp))
zynqmp_dp_write(dp, ZYNQMP_DP_TX_AUDIO_CONTROL, 1);
zynqmp_dp_write(dp, ZYNQMP_DP_TX_PHY_POWER_DOWN, 0);
if (dp->status == connector_status_connected) {
for (i = 0; i < 3; i++) {
ret = drm_dp_dpcd_writeb(&dp->aux, DP_SET_POWER,
DP_SET_POWER_D0);
if (ret == 1)
break;
usleep_range(300, 500);
}
/* Some monitors take time to wake up properly */
msleep(zynqmp_dp_power_on_delay_ms);
}
if (ret != 1)
dev_dbg(dp->dev, "DP aux failed\n");
else
zynqmp_dp_train_loop(dp);
zynqmp_dp_write(dp, ZYNQMP_DP_SOFTWARE_RESET,
ZYNQMP_DP_SOFTWARE_RESET_ALL);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_ENABLE, 1);
}
static void zynqmp_dp_encoder_disable(struct drm_encoder *encoder)
{
struct zynqmp_dp *dp = encoder_to_dp(encoder);
dp->enabled = false;
cancel_delayed_work(&dp->hpd_work);
zynqmp_dp_write(dp, ZYNQMP_DP_MAIN_STREAM_ENABLE, 0);
drm_dp_dpcd_writeb(&dp->aux, DP_SET_POWER, DP_SET_POWER_D3);
zynqmp_dp_write(dp, ZYNQMP_DP_TX_PHY_POWER_DOWN,
ZYNQMP_DP_TX_PHY_POWER_DOWN_ALL);
if (zynqmp_disp_audio_enabled(dp->dpsub->disp))
zynqmp_dp_write(dp, ZYNQMP_DP_TX_AUDIO_CONTROL, 0);
pm_runtime_put_sync(dp->dev);
}
static void
zynqmp_dp_encoder_atomic_mode_set(struct drm_encoder *encoder,
struct drm_crtc_state *crtc_state,
struct drm_connector_state *connector_state)
{
struct zynqmp_dp *dp = encoder_to_dp(encoder);
struct drm_display_mode *mode = &crtc_state->mode;
struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
u8 max_lanes = dp->link_config.max_lanes;
u8 bpp = dp->config.bpp;
int rate, max_rate = dp->link_config.max_rate;
int ret;
zynqmp_dp_set_format(dp, ZYNQMP_DPSUB_FORMAT_RGB, 8);
/* Check again as bpp or format might have been chagned */
rate = zynqmp_dp_max_rate(max_rate, max_lanes, bpp);
if (mode->clock > rate) {
dev_err(dp->dev, "the mode, %s,has too high pixel rate\n",
mode->name);
drm_mode_debug_printmodeline(mode);
}
ret = zynqmp_dp_mode_configure(dp, adjusted_mode->clock, 0);
if (ret < 0)
return;
zynqmp_dp_encoder_mode_set_transfer_unit(dp, adjusted_mode);
zynqmp_dp_encoder_mode_set_stream(dp, adjusted_mode);
}
#define ZYNQMP_DP_MIN_H_BACKPORCH 20
static int
zynqmp_dp_encoder_atomic_check(struct drm_encoder *encoder,
struct drm_crtc_state *crtc_state,
struct drm_connector_state *conn_state)
{
struct drm_display_mode *mode = &crtc_state->mode;
struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
int diff = mode->htotal - mode->hsync_end;
/*
* ZynqMP DP requires horizontal backporch to be greater than 12.
* This limitation may not be compatible with the sink device.
*/
if (diff < ZYNQMP_DP_MIN_H_BACKPORCH) {
int vrefresh = (adjusted_mode->clock * 1000) /
(adjusted_mode->vtotal * adjusted_mode->htotal);
dev_dbg(encoder->dev->dev, "hbackporch adjusted: %d to %d",
diff, ZYNQMP_DP_MIN_H_BACKPORCH - diff);
diff = ZYNQMP_DP_MIN_H_BACKPORCH - diff;
adjusted_mode->htotal += diff;
adjusted_mode->clock = adjusted_mode->vtotal *
adjusted_mode->htotal * vrefresh / 1000;
}
return 0;
}
static const struct drm_encoder_helper_funcs zynqmp_dp_encoder_helper_funcs = {
.enable = zynqmp_dp_encoder_enable,
.disable = zynqmp_dp_encoder_disable,
.atomic_mode_set = zynqmp_dp_encoder_atomic_mode_set,
.atomic_check = zynqmp_dp_encoder_atomic_check,
};
/* -----------------------------------------------------------------------------
* Interrupt Handling
*/
/**
* zynqmp_dp_enable_vblank - Enable vblank
* @dp: DisplayPort IP core structure
*
* Enable vblank interrupt
*/
void zynqmp_dp_enable_vblank(struct zynqmp_dp *dp)
{
zynqmp_dp_write(dp, ZYNQMP_DP_INT_EN, ZYNQMP_DP_INT_VBLANK_START);
}
/**
* zynqmp_dp_disable_vblank - Disable vblank
* @dp: DisplayPort IP core structure
*
* Disable vblank interrupt
*/
void zynqmp_dp_disable_vblank(struct zynqmp_dp *dp)
{
zynqmp_dp_write(dp, ZYNQMP_DP_INT_DS, ZYNQMP_DP_INT_VBLANK_START);
}
static void zynqmp_dp_hpd_work_func(struct work_struct *work)
{
struct zynqmp_dp *dp;
dp = container_of(work, struct zynqmp_dp, hpd_work.work);
if (dp->drm)
drm_helper_hpd_irq_event(dp->drm);
}
static irqreturn_t zynqmp_dp_irq_handler(int irq, void *data)
{
struct zynqmp_dp *dp = (struct zynqmp_dp *)data;
u32 status, mask;
status = zynqmp_dp_read(dp, ZYNQMP_DP_INT_STATUS);
mask = zynqmp_dp_read(dp, ZYNQMP_DP_INT_MASK);
if (!(status & ~mask))
return IRQ_NONE;
/* dbg for diagnostic, but not much that the driver can do */
if (status & ZYNQMP_DP_INT_CHBUF_UNDERFLW_MASK)
dev_dbg_ratelimited(dp->dev, "underflow interrupt\n");
if (status & ZYNQMP_DP_INT_CHBUF_OVERFLW_MASK)
dev_dbg_ratelimited(dp->dev, "overflow interrupt\n");
zynqmp_dp_write(dp, ZYNQMP_DP_INT_STATUS, status);
if (status & ZYNQMP_DP_INT_VBLANK_START)
zynqmp_disp_handle_vblank(dp->dpsub->disp);
if (status & ZYNQMP_DP_INT_HPD_EVENT)
schedule_delayed_work(&dp->hpd_work, 0);
if (status & ZYNQMP_DP_INT_HPD_IRQ) {
int ret;
u8 status[DP_LINK_STATUS_SIZE + 2];
ret = drm_dp_dpcd_read(&dp->aux, DP_SINK_COUNT, status,
DP_LINK_STATUS_SIZE + 2);
if (ret < 0)
goto handled;
if (status[4] & DP_LINK_STATUS_UPDATED ||
!drm_dp_clock_recovery_ok(&status[2], dp->mode.lane_cnt) ||
!drm_dp_channel_eq_ok(&status[2], dp->mode.lane_cnt)) {
zynqmp_dp_train_loop(dp);
}
}
handled:
return IRQ_HANDLED;
}
/* -----------------------------------------------------------------------------
* Initialization & Cleanup
*/
int zynqmp_dp_drm_init(struct zynqmp_dpsub *dpsub)
{
struct zynqmp_dp *dp = dpsub->dp;
struct drm_encoder *encoder = &dp->encoder;
struct drm_connector *connector = &dp->connector;
int ret;
dp->config.misc0 &= ~ZYNQMP_DP_MAIN_STREAM_MISC0_SYNC_LOCK;
zynqmp_dp_set_format(dp, ZYNQMP_DPSUB_FORMAT_RGB, 8);
/* Create the DRM encoder and connector. */
encoder->possible_crtcs |= zynqmp_disp_get_crtc_mask(dpsub->disp);
drm_simple_encoder_init(dp->drm, encoder, DRM_MODE_ENCODER_TMDS);
drm_encoder_helper_add(encoder, &zynqmp_dp_encoder_helper_funcs);
connector->polled = DRM_CONNECTOR_POLL_HPD;
ret = drm_connector_init(encoder->dev, connector,
&zynqmp_dp_connector_funcs,
DRM_MODE_CONNECTOR_DisplayPort);
if (ret) {
dev_err(dp->dev, "failed to create the DRM connector\n");
return ret;
}
drm_connector_helper_add(connector, &zynqmp_dp_connector_helper_funcs);
drm_connector_register(connector);
drm_connector_attach_encoder(connector, encoder);
/* Initialize and register the AUX adapter. */
ret = zynqmp_dp_aux_init(dp);
if (ret) {
dev_err(dp->dev, "failed to initialize DP aux\n");
return ret;
}
/* Now that initialisation is complete, enable interrupts. */
zynqmp_dp_write(dp, ZYNQMP_DP_INT_EN, ZYNQMP_DP_INT_ALL);
return 0;
}
int zynqmp_dp_probe(struct zynqmp_dpsub *dpsub, struct drm_device *drm)
{
struct platform_device *pdev = to_platform_device(dpsub->dev);
struct zynqmp_dp *dp;
struct resource *res;
int ret;
dp = drmm_kzalloc(drm, sizeof(*dp), GFP_KERNEL);
if (!dp)
return -ENOMEM;
dp->dev = &pdev->dev;
dp->dpsub = dpsub;
dp->status = connector_status_disconnected;
dp->drm = drm;
INIT_DELAYED_WORK(&dp->hpd_work, zynqmp_dp_hpd_work_func);
dpsub->dp = dp;
/* Acquire all resources (IOMEM, IRQ and PHYs). */
res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "dp");
dp->iomem = devm_ioremap_resource(dp->dev, res);
if (IS_ERR(dp->iomem))
return PTR_ERR(dp->iomem);
dp->irq = platform_get_irq(pdev, 0);
if (dp->irq < 0)
return dp->irq;
dp->reset = devm_reset_control_get(dp->dev, NULL);
if (IS_ERR(dp->reset)) {
if (PTR_ERR(dp->reset) != -EPROBE_DEFER)
dev_err(dp->dev, "failed to get reset: %ld\n",
PTR_ERR(dp->reset));
return PTR_ERR(dp->reset);
}
ret = zynqmp_dp_phy_probe(dp);
if (ret)
return ret;
/* Initialize the hardware. */
zynqmp_dp_write(dp, ZYNQMP_DP_TX_PHY_POWER_DOWN,
ZYNQMP_DP_TX_PHY_POWER_DOWN_ALL);
zynqmp_dp_set(dp, ZYNQMP_DP_PHY_RESET, ZYNQMP_DP_PHY_RESET_ALL_RESET);
zynqmp_dp_write(dp, ZYNQMP_DP_FORCE_SCRAMBLER_RESET, 1);
zynqmp_dp_write(dp, ZYNQMP_DP_TRANSMITTER_ENABLE, 0);
zynqmp_dp_write(dp, ZYNQMP_DP_INT_DS, 0xffffffff);
ret = zynqmp_dp_phy_init(dp);
if (ret)
return ret;
zynqmp_dp_write(dp, ZYNQMP_DP_TRANSMITTER_ENABLE, 1);
/*
* Now that the hardware is initialized and won't generate spurious
* interrupts, request the IRQ.
*/
ret = devm_request_threaded_irq(dp->dev, dp->irq, NULL,
zynqmp_dp_irq_handler, IRQF_ONESHOT,
dev_name(dp->dev), dp);
if (ret < 0)
goto error;
dev_dbg(dp->dev, "ZynqMP DisplayPort Tx probed with %u lanes\n",
dp->num_lanes);
return 0;
error:
zynqmp_dp_phy_exit(dp);
return ret;
}
void zynqmp_dp_remove(struct zynqmp_dpsub *dpsub)
{
struct zynqmp_dp *dp = dpsub->dp;
zynqmp_dp_write(dp, ZYNQMP_DP_INT_DS, ZYNQMP_DP_INT_ALL);
disable_irq(dp->irq);
cancel_delayed_work_sync(&dp->hpd_work);
zynqmp_dp_aux_cleanup(dp);
zynqmp_dp_write(dp, ZYNQMP_DP_TRANSMITTER_ENABLE, 0);
zynqmp_dp_write(dp, ZYNQMP_DP_INT_DS, 0xffffffff);
zynqmp_dp_phy_exit(dp);
}
/* SPDX-License-Identifier: GPL-2.0 */
/*
* ZynqMP DisplayPort Driver
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#ifndef _ZYNQMP_DP_H_
#define _ZYNQMP_DP_H_
struct drm_device;
struct platform_device;
struct zynqmp_dp;
struct zynqmp_dpsub;
void zynqmp_dp_enable_vblank(struct zynqmp_dp *dp);
void zynqmp_dp_disable_vblank(struct zynqmp_dp *dp);
int zynqmp_dp_drm_init(struct zynqmp_dpsub *dpsub);
int zynqmp_dp_probe(struct zynqmp_dpsub *dpsub, struct drm_device *drm);
void zynqmp_dp_remove(struct zynqmp_dpsub *dpsub);
#endif /* _ZYNQMP_DP_H_ */
// SPDX-License-Identifier: GPL-2.0
/*
* ZynqMP DisplayPort Subsystem Driver
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#include <linux/clk.h>
#include <linux/dma-mapping.h>
#include <linux/module.h>
#include <linux/of_reserved_mem.h>
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <drm/drm_atomic_helper.h>
#include <drm/drm_device.h>
#include <drm/drm_drv.h>
#include <drm/drm_fb_helper.h>
#include <drm/drm_fourcc.h>
#include <drm/drm_gem_cma_helper.h>
#include <drm/drm_gem_framebuffer_helper.h>
#include <drm/drm_managed.h>
#include <drm/drm_mode_config.h>
#include <drm/drm_probe_helper.h>
#include <drm/drm_vblank.h>
#include "zynqmp_disp.h"
#include "zynqmp_dp.h"
#include "zynqmp_dpsub.h"
/* -----------------------------------------------------------------------------
* Dumb Buffer & Framebuffer Allocation
*/
static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
struct drm_device *drm,
struct drm_mode_create_dumb *args)
{
struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
unsigned int pitch = DIV_ROUND_UP(args->width * args->bpp, 8);
/* Enforce the alignment constraints of the DMA engine. */
args->pitch = ALIGN(pitch, dpsub->dma_align);
return drm_gem_cma_dumb_create_internal(file_priv, drm, args);
}
static struct drm_framebuffer *
zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
const struct drm_mode_fb_cmd2 *mode_cmd)
{
struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
struct drm_mode_fb_cmd2 cmd = *mode_cmd;
unsigned int i;
/* Enforce the alignment constraints of the DMA engine. */
for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
return drm_gem_fb_create(drm, file_priv, &cmd);
}
static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
.fb_create = zynqmp_dpsub_fb_create,
.atomic_check = drm_atomic_helper_check,
.atomic_commit = drm_atomic_helper_commit,
};
/* -----------------------------------------------------------------------------
* DRM/KMS Driver
*/
DEFINE_DRM_GEM_CMA_FOPS(zynqmp_dpsub_drm_fops);
static struct drm_driver zynqmp_dpsub_drm_driver = {
.driver_features = DRIVER_MODESET | DRIVER_GEM |
DRIVER_ATOMIC,
.prime_handle_to_fd = drm_gem_prime_handle_to_fd,
.prime_fd_to_handle = drm_gem_prime_fd_to_handle,
.gem_prime_export = drm_gem_prime_export,
.gem_prime_import = drm_gem_prime_import,
.gem_prime_get_sg_table = drm_gem_cma_prime_get_sg_table,
.gem_prime_import_sg_table = drm_gem_cma_prime_import_sg_table,
.gem_prime_vmap = drm_gem_cma_prime_vmap,
.gem_prime_vunmap = drm_gem_cma_prime_vunmap,
.gem_prime_mmap = drm_gem_cma_prime_mmap,
.gem_free_object_unlocked = drm_gem_cma_free_object,
.gem_vm_ops = &drm_gem_cma_vm_ops,
.dumb_create = zynqmp_dpsub_dumb_create,
.dumb_destroy = drm_gem_dumb_destroy,
.fops = &zynqmp_dpsub_drm_fops,
.name = "zynqmp-dpsub",
.desc = "Xilinx DisplayPort Subsystem Driver",
.date = "20130509",
.major = 1,
.minor = 0,
};
static int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
{
struct drm_device *drm = &dpsub->drm;
int ret;
/* Initialize mode config, vblank and the KMS poll helper. */
ret = drmm_mode_config_init(drm);
if (ret < 0)
goto err_dev_put;
drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
drm->mode_config.min_width = 0;
drm->mode_config.min_height = 0;
drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
ret = drm_vblank_init(drm, 1);
if (ret)
goto err_dev_put;
drm->irq_enabled = 1;
drm_kms_helper_poll_init(drm);
/*
* Initialize the DISP and DP components. This will creates planes,
* CRTC, encoder and connector. The DISP should be initialized first as
* the DP encoder needs the CRTC.
*/
ret = zynqmp_disp_drm_init(dpsub);
if (ret)
goto err_poll_fini;
ret = zynqmp_dp_drm_init(dpsub);
if (ret)
goto err_poll_fini;
/* Reset all components and register the DRM device. */
drm_mode_config_reset(drm);
ret = drm_dev_register(drm, 0);
if (ret < 0)
goto err_poll_fini;
/* Initialize fbdev generic emulation. */
drm_fbdev_generic_setup(drm, 24);
return 0;
err_poll_fini:
drm_kms_helper_poll_fini(drm);
err_dev_put:
drm_dev_put(drm);
return ret;
}
/* -----------------------------------------------------------------------------
* Power Management
*/
static int __maybe_unused zynqmp_dpsub_suspend(struct device *dev)
{
struct zynqmp_dpsub *dpsub = dev_get_drvdata(dev);
return drm_mode_config_helper_suspend(&dpsub->drm);
}
static int __maybe_unused zynqmp_dpsub_resume(struct device *dev)
{
struct zynqmp_dpsub *dpsub = dev_get_drvdata(dev);
return drm_mode_config_helper_resume(&dpsub->drm);
}
static const struct dev_pm_ops zynqmp_dpsub_pm_ops = {
SET_SYSTEM_SLEEP_PM_OPS(zynqmp_dpsub_suspend, zynqmp_dpsub_resume)
};
/* -----------------------------------------------------------------------------
* Probe & Remove
*/
static int zynqmp_dpsub_init_clocks(struct zynqmp_dpsub *dpsub)
{
int ret;
dpsub->apb_clk = devm_clk_get(dpsub->dev, "dp_apb_clk");
if (IS_ERR(dpsub->apb_clk))
return PTR_ERR(dpsub->apb_clk);
ret = clk_prepare_enable(dpsub->apb_clk);
if (ret) {
dev_err(dpsub->dev, "failed to enable the APB clock\n");
return ret;
}
return 0;
}
static int zynqmp_dpsub_probe(struct platform_device *pdev)
{
struct zynqmp_dpsub *dpsub;
int ret;
/* Allocate private data. */
dpsub = kzalloc(sizeof(*dpsub), GFP_KERNEL);
if (!dpsub)
return -ENOMEM;
dpsub->dev = &pdev->dev;
platform_set_drvdata(pdev, dpsub);
dma_set_mask(dpsub->dev, DMA_BIT_MASK(ZYNQMP_DISP_MAX_DMA_BIT));
/*
* Initialize the DRM device early, as the DRM core mandates usage of
* the managed memory helpers tied to the DRM device.
*/
ret = drm_dev_init(&dpsub->drm, &zynqmp_dpsub_drm_driver, &pdev->dev);
if (ret < 0) {
kfree(dpsub);
return ret;
}
drmm_add_final_kfree(&dpsub->drm, dpsub);
/* Try the reserved memory. Proceed if there's none. */
of_reserved_mem_device_init(&pdev->dev);
ret = zynqmp_dpsub_init_clocks(dpsub);
if (ret < 0)
goto err_mem;
pm_runtime_enable(&pdev->dev);
/*
* DP should be probed first so that the zynqmp_disp can set the output
* format accordingly.
*/
ret = zynqmp_dp_probe(dpsub, &dpsub->drm);
if (ret)
goto err_pm;
ret = zynqmp_disp_probe(dpsub, &dpsub->drm);
if (ret)
goto err_dp;
ret = zynqmp_dpsub_drm_init(dpsub);
if (ret)
goto err_disp;
dev_info(&pdev->dev, "ZynqMP DisplayPort Subsystem driver probed");
return 0;
err_disp:
zynqmp_disp_remove(dpsub);
err_dp:
zynqmp_dp_remove(dpsub);
err_pm:
pm_runtime_disable(&pdev->dev);
clk_disable_unprepare(dpsub->apb_clk);
err_mem:
of_reserved_mem_device_release(&pdev->dev);
return ret;
}
static int zynqmp_dpsub_remove(struct platform_device *pdev)
{
struct zynqmp_dpsub *dpsub = platform_get_drvdata(pdev);
struct drm_device *drm = &dpsub->drm;
drm_dev_unregister(drm);
drm_atomic_helper_shutdown(drm);
drm_kms_helper_poll_fini(drm);
zynqmp_disp_remove(dpsub);
zynqmp_dp_remove(dpsub);
pm_runtime_disable(&pdev->dev);
clk_disable_unprepare(dpsub->apb_clk);
of_reserved_mem_device_release(&pdev->dev);
drm_dev_put(drm);
return 0;
}
static void zynqmp_dpsub_shutdown(struct platform_device *pdev)
{
struct zynqmp_dpsub *dpsub = platform_get_drvdata(pdev);
drm_atomic_helper_shutdown(&dpsub->drm);
}
static const struct of_device_id zynqmp_dpsub_of_match[] = {
{ .compatible = "xlnx,zynqmp-dpsub-1.7", },
{ /* end of table */ },
};
MODULE_DEVICE_TABLE(of, zynqmp_dpsub_of_match);
static struct platform_driver zynqmp_dpsub_driver = {
.probe = zynqmp_dpsub_probe,
.remove = zynqmp_dpsub_remove,
.shutdown = zynqmp_dpsub_shutdown,
.driver = {
.name = "zynqmp-dpsub",
.pm = &zynqmp_dpsub_pm_ops,
.of_match_table = zynqmp_dpsub_of_match,
},
};
module_platform_driver(zynqmp_dpsub_driver);
MODULE_AUTHOR("Xilinx, Inc.");
MODULE_DESCRIPTION("ZynqMP DP Subsystem Driver");
MODULE_LICENSE("GPL v2");
/* SPDX-License-Identifier: GPL-2.0 */
/*
* ZynqMP DPSUB Subsystem Driver
*
* Copyright (C) 2017 - 2020 Xilinx, Inc.
*
* Authors:
* - Hyun Woo Kwon <hyun.kwon@xilinx.com>
* - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#ifndef _ZYNQMP_DPSUB_H_
#define _ZYNQMP_DPSUB_H_
struct clk;
struct device;
struct drm_device;
struct zynqmp_disp;
struct zynqmp_dp;
enum zynqmp_dpsub_format {
ZYNQMP_DPSUB_FORMAT_RGB,
ZYNQMP_DPSUB_FORMAT_YCRCB444,
ZYNQMP_DPSUB_FORMAT_YCRCB422,
ZYNQMP_DPSUB_FORMAT_YONLY,
};
/**
* struct zynqmp_dpsub - ZynqMP DisplayPort Subsystem
* @drm: The DRM/KMS device
* @dev: The physical device
* @apb_clk: The APB clock
* @disp: The display controller
* @dp: The DisplayPort controller
* @dma_align: DMA alignment constraint (must be a power of 2)
*/
struct zynqmp_dpsub {
struct drm_device drm;
struct device *dev;
struct clk *apb_clk;
struct zynqmp_disp *disp;
struct zynqmp_dp *dp;
unsigned int dma_align;
};
static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
{
return container_of(drm, struct zynqmp_dpsub, drm);
}
#endif /* _ZYNQMP_DPSUB_H_ */
/* SPDX-License-Identifier: (GPL-2.0 OR MIT) */
/*
* Copyright 2019 Laurent Pinchart <laurent.pinchart@ideasonboard.com>
*/
#ifndef __DT_BINDINGS_DMA_XLNX_ZYNQMP_DPDMA_H__
#define __DT_BINDINGS_DMA_XLNX_ZYNQMP_DPDMA_H__
#define ZYNQMP_DPDMA_VIDEO0 0
#define ZYNQMP_DPDMA_VIDEO1 1
#define ZYNQMP_DPDMA_VIDEO2 2
#define ZYNQMP_DPDMA_GRAPHICS 3
#define ZYNQMP_DPDMA_AUDIO0 4
#define ZYNQMP_DPDMA_AUDIO1 5
#endif /* __DT_BINDINGS_DMA_XLNX_ZYNQMP_DPDMA_H__ */
......@@ -61,6 +61,8 @@ enum dma_transaction_type {
DMA_SLAVE,
DMA_CYCLIC,
DMA_INTERLEAVE,
DMA_REPEAT,
DMA_LOAD_EOT,
/* last transaction type for creation of the capabilities mask */
DMA_TX_TYPE_END,
};
......@@ -176,6 +178,16 @@ struct dma_interleaved_template {
* @DMA_PREP_CMD: tell the driver that the data passed to DMA API is command
* data and the descriptor should be in different format from normal
* data descriptors.
* @DMA_PREP_REPEAT: tell the driver that the transaction shall be automatically
* repeated when it ends until a transaction is issued on the same channel
* with the DMA_PREP_LOAD_EOT flag set. This flag is only applicable to
* interleaved transactions and is ignored for all other transaction types.
* @DMA_PREP_LOAD_EOT: tell the driver that the transaction shall replace any
* active repeated (as indicated by DMA_PREP_REPEAT) transaction when the
* repeated transaction ends. Not setting this flag when the previously queued
* transaction is marked with DMA_PREP_REPEAT will cause the new transaction
* to never be processed and stay in the issued queue forever. The flag is
* ignored if the previous transaction is not a repeated transaction.
*/
enum dma_ctrl_flags {
DMA_PREP_INTERRUPT = (1 << 0),
......@@ -186,6 +198,8 @@ enum dma_ctrl_flags {
DMA_PREP_FENCE = (1 << 5),
DMA_CTRL_REUSE = (1 << 6),
DMA_PREP_CMD = (1 << 7),
DMA_PREP_REPEAT = (1 << 8),
DMA_PREP_LOAD_EOT = (1 << 9),
};
/**
......@@ -980,6 +994,9 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma(
{
if (!chan || !chan->device || !chan->device->device_prep_interleaved_dma)
return NULL;
if (flags & DMA_PREP_REPEAT &&
!test_bit(DMA_REPEAT, chan->device->cap_mask.bits))
return NULL;
return chan->device->device_prep_interleaved_dma(chan, xt, flags);
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment