2 * Copyright (C) Fuzhou Rockchip Electronics Co.Ltd
3 * Author: Chris Zhong <zyw@rock-chips.com>
5 * This software is licensed under the terms of the GNU General Public
6 * License version 2, as published by the Free Software Foundation, and
7 * may be copied, distributed, and modified under those terms.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
16 #include <drm/drm_atomic_helper.h>
17 #include <drm/drm_crtc_helper.h>
18 #include <drm/drm_dp_helper.h>
19 #include <drm/drm_edid.h>
20 #include <drm/drm_of.h>
22 #include <linux/clk.h>
23 #include <linux/component.h>
24 #include <linux/extcon.h>
25 #include <linux/firmware.h>
26 #include <linux/regmap.h>
27 #include <linux/reset.h>
28 #include <linux/mfd/syscon.h>
29 #include <linux/phy/phy.h>
31 #include <sound/hdmi-codec.h>
33 #include "cdn-dp-core.h"
34 #include "cdn-dp-reg.h"
35 #include "rockchip_drm_vop.h"
37 #define connector_to_dp(c) \
38 container_of(c, struct cdn_dp_device, connector)
40 #define encoder_to_dp(c) \
41 container_of(c, struct cdn_dp_device, encoder)
43 #define GRF_SOC_CON9 0x6224
44 #define DP_SEL_VOP_LIT BIT(12)
45 #define GRF_SOC_CON26 0x6268
46 #define DPTX_HPD_SEL (3 << 12)
47 #define DPTX_HPD_DEL (2 << 12)
48 #define DPTX_HPD_SEL_MASK (3 << 28)
50 #define CDN_FW_TIMEOUT_MS (64 * 1000)
51 #define CDN_DPCD_TIMEOUT_MS 5000
52 #define CDN_DP_FIRMWARE "/*(DEBLOBBED)*/"
58 struct cdn_dp_data rk3399_cdn_dp = {
62 static const struct of_device_id cdn_dp_dt_ids[] = {
63 { .compatible = "rockchip,rk3399-cdn-dp",
64 .data = (void *)&rk3399_cdn_dp },
68 MODULE_DEVICE_TABLE(of, cdn_dp_dt_ids);
70 static int cdn_dp_grf_write(struct cdn_dp_device *dp,
71 unsigned int reg, unsigned int val)
75 ret = clk_prepare_enable(dp->grf_clk);
77 DRM_DEV_ERROR(dp->dev, "Failed to prepare_enable grf clock\n");
81 ret = regmap_write(dp->grf, reg, val);
83 DRM_DEV_ERROR(dp->dev, "Could not write to GRF: %d\n", ret);
84 clk_disable_unprepare(dp->grf_clk);
88 clk_disable_unprepare(dp->grf_clk);
93 static int cdn_dp_clk_enable(struct cdn_dp_device *dp)
98 ret = clk_prepare_enable(dp->pclk);
100 DRM_DEV_ERROR(dp->dev, "cannot enable dp pclk %d\n", ret);
104 ret = clk_prepare_enable(dp->core_clk);
106 DRM_DEV_ERROR(dp->dev, "cannot enable core_clk %d\n", ret);
110 ret = pm_runtime_get_sync(dp->dev);
112 DRM_DEV_ERROR(dp->dev, "cannot get pm runtime %d\n", ret);
113 goto err_pm_runtime_get;
116 reset_control_assert(dp->core_rst);
117 reset_control_assert(dp->dptx_rst);
118 reset_control_assert(dp->apb_rst);
119 reset_control_deassert(dp->core_rst);
120 reset_control_deassert(dp->dptx_rst);
121 reset_control_deassert(dp->apb_rst);
123 rate = clk_get_rate(dp->core_clk);
125 DRM_DEV_ERROR(dp->dev, "get clk rate failed\n");
130 cdn_dp_set_fw_clk(dp, rate);
131 cdn_dp_clock_reset(dp);
136 pm_runtime_put(dp->dev);
138 clk_disable_unprepare(dp->core_clk);
140 clk_disable_unprepare(dp->pclk);
145 static void cdn_dp_clk_disable(struct cdn_dp_device *dp)
147 pm_runtime_put_sync(dp->dev);
148 clk_disable_unprepare(dp->pclk);
149 clk_disable_unprepare(dp->core_clk);
152 static int cdn_dp_get_port_lanes(struct cdn_dp_port *port)
154 struct extcon_dev *edev = port->extcon;
155 union extcon_property_value property;
159 dptx = extcon_get_state(edev, EXTCON_DISP_DP);
161 extcon_get_property(edev, EXTCON_DISP_DP,
162 EXTCON_PROP_USB_SS, &property);
174 static int cdn_dp_get_sink_count(struct cdn_dp_device *dp, u8 *sink_count)
180 ret = cdn_dp_dpcd_read(dp, DP_SINK_COUNT, &value, 1);
184 *sink_count = DP_GET_SINK_COUNT(value);
188 static struct cdn_dp_port *cdn_dp_connected_port(struct cdn_dp_device *dp)
190 struct cdn_dp_port *port;
193 for (i = 0; i < dp->ports; i++) {
195 lanes = cdn_dp_get_port_lanes(port);
202 static bool cdn_dp_check_sink_connection(struct cdn_dp_device *dp)
204 unsigned long timeout = jiffies + msecs_to_jiffies(CDN_DPCD_TIMEOUT_MS);
205 struct cdn_dp_port *port;
208 if (dp->active_port < 0 || dp->active_port >= dp->ports) {
209 DRM_DEV_ERROR(dp->dev, "active_port is wrong!\n");
213 port = dp->port[dp->active_port];
216 * Attempt to read sink count, retry in case the sink may not be ready.
218 * Sinks are *supposed* to come up within 1ms from an off state, but
219 * some docks need more time to power up.
221 while (time_before(jiffies, timeout)) {
222 if (!extcon_get_state(port->extcon, EXTCON_DISP_DP))
225 if (!cdn_dp_get_sink_count(dp, &sink_count))
226 return sink_count ? true : false;
228 usleep_range(5000, 10000);
231 DRM_DEV_ERROR(dp->dev, "Get sink capability timed out\n");
235 static enum drm_connector_status
236 cdn_dp_connector_detect(struct drm_connector *connector, bool force)
238 struct cdn_dp_device *dp = connector_to_dp(connector);
239 enum drm_connector_status status = connector_status_disconnected;
241 mutex_lock(&dp->lock);
243 status = connector_status_connected;
244 mutex_unlock(&dp->lock);
249 static void cdn_dp_connector_destroy(struct drm_connector *connector)
251 drm_connector_unregister(connector);
252 drm_connector_cleanup(connector);
255 static const struct drm_connector_funcs cdn_dp_atomic_connector_funcs = {
256 .detect = cdn_dp_connector_detect,
257 .destroy = cdn_dp_connector_destroy,
258 .fill_modes = drm_helper_probe_single_connector_modes,
259 .reset = drm_atomic_helper_connector_reset,
260 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
261 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
264 static int cdn_dp_connector_get_modes(struct drm_connector *connector)
266 struct cdn_dp_device *dp = connector_to_dp(connector);
270 mutex_lock(&dp->lock);
273 DRM_DEV_DEBUG_KMS(dp->dev, "got edid: width[%d] x height[%d]\n",
274 edid->width_cm, edid->height_cm);
276 dp->sink_has_audio = drm_detect_monitor_audio(edid);
277 ret = drm_add_edid_modes(connector, edid);
279 drm_connector_update_edid_property(connector,
282 mutex_unlock(&dp->lock);
287 static enum drm_mode_status
288 cdn_dp_connector_mode_valid(struct drm_connector *connector,
289 struct drm_display_mode *mode)
291 struct cdn_dp_device *dp = connector_to_dp(connector);
292 struct drm_display_info *display_info = &dp->connector.display_info;
293 u32 requested, actual, rate, sink_max, source_max = 0;
296 /* If DP is disconnected, every mode is invalid */
300 switch (display_info->bpc) {
312 requested = mode->clock * bpc * 3 / 1000;
314 source_max = dp->lanes;
315 sink_max = drm_dp_max_lane_count(dp->dpcd);
316 lanes = min(source_max, sink_max);
318 source_max = drm_dp_bw_code_to_link_rate(CDN_DP_MAX_LINK_RATE);
319 sink_max = drm_dp_max_link_rate(dp->dpcd);
320 rate = min(source_max, sink_max);
322 actual = rate * lanes / 100;
324 /* efficiency is about 0.8 */
325 actual = actual * 8 / 10;
327 if (requested > actual) {
328 DRM_DEV_DEBUG_KMS(dp->dev,
329 "requested=%d, actual=%d, clock=%d\n",
330 requested, actual, mode->clock);
331 return MODE_CLOCK_HIGH;
337 static struct drm_connector_helper_funcs cdn_dp_connector_helper_funcs = {
338 .get_modes = cdn_dp_connector_get_modes,
339 .mode_valid = cdn_dp_connector_mode_valid,
342 static int cdn_dp_firmware_init(struct cdn_dp_device *dp)
345 const u32 *iram_data, *dram_data;
346 const struct firmware *fw = dp->fw;
347 const struct cdn_firmware_header *hdr;
349 hdr = (struct cdn_firmware_header *)fw->data;
350 if (fw->size != le32_to_cpu(hdr->size_bytes)) {
351 DRM_DEV_ERROR(dp->dev, "firmware is invalid\n");
355 iram_data = (const u32 *)(fw->data + hdr->header_size);
356 dram_data = (const u32 *)(fw->data + hdr->header_size + hdr->iram_size);
358 ret = cdn_dp_load_firmware(dp, iram_data, hdr->iram_size,
359 dram_data, hdr->dram_size);
363 ret = cdn_dp_set_firmware_active(dp, true);
365 DRM_DEV_ERROR(dp->dev, "active ucpu failed: %d\n", ret);
369 return cdn_dp_event_config(dp);
372 static int cdn_dp_get_sink_capability(struct cdn_dp_device *dp)
376 if (!cdn_dp_check_sink_connection(dp))
379 ret = cdn_dp_dpcd_read(dp, DP_DPCD_REV, dp->dpcd,
380 DP_RECEIVER_CAP_SIZE);
382 DRM_DEV_ERROR(dp->dev, "Failed to get caps %d\n", ret);
387 dp->edid = drm_do_get_edid(&dp->connector,
388 cdn_dp_get_edid_block, dp);
392 static int cdn_dp_enable_phy(struct cdn_dp_device *dp, struct cdn_dp_port *port)
394 union extcon_property_value property;
397 if (!port->phy_enabled) {
398 ret = phy_power_on(port->phy);
400 DRM_DEV_ERROR(dp->dev, "phy power on failed: %d\n",
404 port->phy_enabled = true;
407 ret = cdn_dp_grf_write(dp, GRF_SOC_CON26,
408 DPTX_HPD_SEL_MASK | DPTX_HPD_SEL);
410 DRM_DEV_ERROR(dp->dev, "Failed to write HPD_SEL %d\n", ret);
414 ret = cdn_dp_get_hpd_status(dp);
417 DRM_DEV_ERROR(dp->dev, "hpd does not exist\n");
421 ret = extcon_get_property(port->extcon, EXTCON_DISP_DP,
422 EXTCON_PROP_USB_TYPEC_POLARITY, &property);
424 DRM_DEV_ERROR(dp->dev, "get property failed\n");
428 port->lanes = cdn_dp_get_port_lanes(port);
429 ret = cdn_dp_set_host_cap(dp, port->lanes, property.intval);
431 DRM_DEV_ERROR(dp->dev, "set host capabilities failed: %d\n",
436 dp->active_port = port->id;
440 if (phy_power_off(port->phy))
441 DRM_DEV_ERROR(dp->dev, "phy power off failed: %d", ret);
443 port->phy_enabled = false;
446 cdn_dp_grf_write(dp, GRF_SOC_CON26,
447 DPTX_HPD_SEL_MASK | DPTX_HPD_DEL);
451 static int cdn_dp_disable_phy(struct cdn_dp_device *dp,
452 struct cdn_dp_port *port)
456 if (port->phy_enabled) {
457 ret = phy_power_off(port->phy);
459 DRM_DEV_ERROR(dp->dev, "phy power off failed: %d", ret);
464 port->phy_enabled = false;
466 dp->active_port = -1;
470 static int cdn_dp_disable(struct cdn_dp_device *dp)
477 for (i = 0; i < dp->ports; i++)
478 cdn_dp_disable_phy(dp, dp->port[i]);
480 ret = cdn_dp_grf_write(dp, GRF_SOC_CON26,
481 DPTX_HPD_SEL_MASK | DPTX_HPD_DEL);
483 DRM_DEV_ERROR(dp->dev, "Failed to clear hpd sel %d\n",
488 cdn_dp_set_firmware_active(dp, false);
489 cdn_dp_clk_disable(dp);
492 dp->link.num_lanes = 0;
493 if (!dp->connected) {
501 static int cdn_dp_enable(struct cdn_dp_device *dp)
504 struct cdn_dp_port *port;
506 port = cdn_dp_connected_port(dp);
508 DRM_DEV_ERROR(dp->dev,
509 "Can't enable without connection\n");
516 ret = cdn_dp_clk_enable(dp);
520 ret = cdn_dp_firmware_init(dp);
522 DRM_DEV_ERROR(dp->dev, "firmware init failed: %d", ret);
523 goto err_clk_disable;
526 /* only enable the port that connected with downstream device */
527 for (i = port->id; i < dp->ports; i++) {
529 lanes = cdn_dp_get_port_lanes(port);
531 ret = cdn_dp_enable_phy(dp, port);
535 ret = cdn_dp_get_sink_capability(dp);
537 cdn_dp_disable_phy(dp, port);
540 dp->lanes = port->lanes;
547 cdn_dp_clk_disable(dp);
551 static void cdn_dp_encoder_mode_set(struct drm_encoder *encoder,
552 struct drm_display_mode *mode,
553 struct drm_display_mode *adjusted)
555 struct cdn_dp_device *dp = encoder_to_dp(encoder);
556 struct drm_display_info *display_info = &dp->connector.display_info;
557 struct video_info *video = &dp->video_info;
559 switch (display_info->bpc) {
561 video->color_depth = 10;
564 video->color_depth = 6;
567 video->color_depth = 8;
571 video->color_fmt = PXL_RGB;
572 video->v_sync_polarity = !!(mode->flags & DRM_MODE_FLAG_NVSYNC);
573 video->h_sync_polarity = !!(mode->flags & DRM_MODE_FLAG_NHSYNC);
575 memcpy(&dp->mode, adjusted, sizeof(*mode));
578 static bool cdn_dp_check_link_status(struct cdn_dp_device *dp)
580 u8 link_status[DP_LINK_STATUS_SIZE];
581 struct cdn_dp_port *port = cdn_dp_connected_port(dp);
582 u8 sink_lanes = drm_dp_max_lane_count(dp->dpcd);
584 if (!port || !dp->link.rate || !dp->link.num_lanes)
587 if (cdn_dp_dpcd_read(dp, DP_LANE0_1_STATUS, link_status,
588 DP_LINK_STATUS_SIZE)) {
589 DRM_ERROR("Failed to get link status\n");
593 /* if link training is requested we should perform it always */
594 return drm_dp_channel_eq_ok(link_status, min(port->lanes, sink_lanes));
597 static void cdn_dp_encoder_enable(struct drm_encoder *encoder)
599 struct cdn_dp_device *dp = encoder_to_dp(encoder);
602 ret = drm_of_encoder_active_endpoint_id(dp->dev->of_node, encoder);
604 DRM_DEV_ERROR(dp->dev, "Could not get vop id, %d", ret);
608 DRM_DEV_DEBUG_KMS(dp->dev, "vop %s output to cdn-dp\n",
609 (ret) ? "LIT" : "BIG");
611 val = DP_SEL_VOP_LIT | (DP_SEL_VOP_LIT << 16);
613 val = DP_SEL_VOP_LIT << 16;
615 ret = cdn_dp_grf_write(dp, GRF_SOC_CON9, val);
619 mutex_lock(&dp->lock);
621 ret = cdn_dp_enable(dp);
623 DRM_DEV_ERROR(dp->dev, "Failed to enable encoder %d\n",
627 if (!cdn_dp_check_link_status(dp)) {
628 ret = cdn_dp_train_link(dp);
630 DRM_DEV_ERROR(dp->dev, "Failed link train %d\n", ret);
635 ret = cdn_dp_set_video_status(dp, CONTROL_VIDEO_IDLE);
637 DRM_DEV_ERROR(dp->dev, "Failed to idle video %d\n", ret);
641 ret = cdn_dp_config_video(dp);
643 DRM_DEV_ERROR(dp->dev, "Failed to config video %d\n", ret);
647 ret = cdn_dp_set_video_status(dp, CONTROL_VIDEO_VALID);
649 DRM_DEV_ERROR(dp->dev, "Failed to valid video %d\n", ret);
653 mutex_unlock(&dp->lock);
656 static void cdn_dp_encoder_disable(struct drm_encoder *encoder)
658 struct cdn_dp_device *dp = encoder_to_dp(encoder);
661 mutex_lock(&dp->lock);
663 ret = cdn_dp_disable(dp);
665 DRM_DEV_ERROR(dp->dev, "Failed to disable encoder %d\n",
669 mutex_unlock(&dp->lock);
672 * In the following 2 cases, we need to run the event_work to re-enable
674 * 1. If there is not just one port device is connected, and remove one
675 * device from a port, the DP will be disabled here, at this case,
676 * run the event_work to re-open DP for the other port.
677 * 2. If re-training or re-config failed, the DP will be disabled here.
678 * run the event_work to re-connect it.
680 if (!dp->connected && cdn_dp_connected_port(dp))
681 schedule_work(&dp->event_work);
684 static int cdn_dp_encoder_atomic_check(struct drm_encoder *encoder,
685 struct drm_crtc_state *crtc_state,
686 struct drm_connector_state *conn_state)
688 struct rockchip_crtc_state *s = to_rockchip_crtc_state(crtc_state);
690 s->output_mode = ROCKCHIP_OUT_MODE_AAAA;
691 s->output_type = DRM_MODE_CONNECTOR_DisplayPort;
696 static const struct drm_encoder_helper_funcs cdn_dp_encoder_helper_funcs = {
697 .mode_set = cdn_dp_encoder_mode_set,
698 .enable = cdn_dp_encoder_enable,
699 .disable = cdn_dp_encoder_disable,
700 .atomic_check = cdn_dp_encoder_atomic_check,
703 static const struct drm_encoder_funcs cdn_dp_encoder_funcs = {
704 .destroy = drm_encoder_cleanup,
707 static int cdn_dp_parse_dt(struct cdn_dp_device *dp)
709 struct device *dev = dp->dev;
710 struct device_node *np = dev->of_node;
711 struct platform_device *pdev = to_platform_device(dev);
712 struct resource *res;
714 dp->grf = syscon_regmap_lookup_by_phandle(np, "rockchip,grf");
715 if (IS_ERR(dp->grf)) {
716 DRM_DEV_ERROR(dev, "cdn-dp needs rockchip,grf property\n");
717 return PTR_ERR(dp->grf);
720 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
721 dp->regs = devm_ioremap_resource(dev, res);
722 if (IS_ERR(dp->regs)) {
723 DRM_DEV_ERROR(dev, "ioremap reg failed\n");
724 return PTR_ERR(dp->regs);
727 dp->core_clk = devm_clk_get(dev, "core-clk");
728 if (IS_ERR(dp->core_clk)) {
729 DRM_DEV_ERROR(dev, "cannot get core_clk_dp\n");
730 return PTR_ERR(dp->core_clk);
733 dp->pclk = devm_clk_get(dev, "pclk");
734 if (IS_ERR(dp->pclk)) {
735 DRM_DEV_ERROR(dev, "cannot get pclk\n");
736 return PTR_ERR(dp->pclk);
739 dp->spdif_clk = devm_clk_get(dev, "spdif");
740 if (IS_ERR(dp->spdif_clk)) {
741 DRM_DEV_ERROR(dev, "cannot get spdif_clk\n");
742 return PTR_ERR(dp->spdif_clk);
745 dp->grf_clk = devm_clk_get(dev, "grf");
746 if (IS_ERR(dp->grf_clk)) {
747 DRM_DEV_ERROR(dev, "cannot get grf clk\n");
748 return PTR_ERR(dp->grf_clk);
751 dp->spdif_rst = devm_reset_control_get(dev, "spdif");
752 if (IS_ERR(dp->spdif_rst)) {
753 DRM_DEV_ERROR(dev, "no spdif reset control found\n");
754 return PTR_ERR(dp->spdif_rst);
757 dp->dptx_rst = devm_reset_control_get(dev, "dptx");
758 if (IS_ERR(dp->dptx_rst)) {
759 DRM_DEV_ERROR(dev, "no uphy reset control found\n");
760 return PTR_ERR(dp->dptx_rst);
763 dp->core_rst = devm_reset_control_get(dev, "core");
764 if (IS_ERR(dp->core_rst)) {
765 DRM_DEV_ERROR(dev, "no core reset control found\n");
766 return PTR_ERR(dp->core_rst);
769 dp->apb_rst = devm_reset_control_get(dev, "apb");
770 if (IS_ERR(dp->apb_rst)) {
771 DRM_DEV_ERROR(dev, "no apb reset control found\n");
772 return PTR_ERR(dp->apb_rst);
778 static int cdn_dp_audio_hw_params(struct device *dev, void *data,
779 struct hdmi_codec_daifmt *daifmt,
780 struct hdmi_codec_params *params)
782 struct cdn_dp_device *dp = dev_get_drvdata(dev);
783 struct audio_info audio = {
784 .sample_width = params->sample_width,
785 .sample_rate = params->sample_rate,
786 .channels = params->channels,
790 mutex_lock(&dp->lock);
796 switch (daifmt->fmt) {
798 audio.format = AFMT_I2S;
801 audio.format = AFMT_SPDIF;
804 DRM_DEV_ERROR(dev, "Invalid format %d\n", daifmt->fmt);
809 ret = cdn_dp_audio_config(dp, &audio);
811 dp->audio_info = audio;
814 mutex_unlock(&dp->lock);
818 static void cdn_dp_audio_shutdown(struct device *dev, void *data)
820 struct cdn_dp_device *dp = dev_get_drvdata(dev);
823 mutex_lock(&dp->lock);
827 ret = cdn_dp_audio_stop(dp, &dp->audio_info);
829 dp->audio_info.format = AFMT_UNUSED;
831 mutex_unlock(&dp->lock);
834 static int cdn_dp_audio_digital_mute(struct device *dev, void *data,
837 struct cdn_dp_device *dp = dev_get_drvdata(dev);
840 mutex_lock(&dp->lock);
846 ret = cdn_dp_audio_mute(dp, enable);
849 mutex_unlock(&dp->lock);
853 static int cdn_dp_audio_get_eld(struct device *dev, void *data,
856 struct cdn_dp_device *dp = dev_get_drvdata(dev);
858 memcpy(buf, dp->connector.eld, min(sizeof(dp->connector.eld), len));
863 static const struct hdmi_codec_ops audio_codec_ops = {
864 .hw_params = cdn_dp_audio_hw_params,
865 .audio_shutdown = cdn_dp_audio_shutdown,
866 .digital_mute = cdn_dp_audio_digital_mute,
867 .get_eld = cdn_dp_audio_get_eld,
870 static int cdn_dp_audio_codec_init(struct cdn_dp_device *dp,
873 struct hdmi_codec_pdata codec_data = {
876 .ops = &audio_codec_ops,
877 .max_i2s_channels = 8,
880 dp->audio_pdev = platform_device_register_data(
881 dev, HDMI_CODEC_DRV_NAME, PLATFORM_DEVID_AUTO,
882 &codec_data, sizeof(codec_data));
884 return PTR_ERR_OR_ZERO(dp->audio_pdev);
887 static int cdn_dp_request_firmware(struct cdn_dp_device *dp)
890 unsigned long timeout = jiffies + msecs_to_jiffies(CDN_FW_TIMEOUT_MS);
891 unsigned long sleep = 1000;
893 WARN_ON(!mutex_is_locked(&dp->lock));
898 /* Drop the lock before getting the firmware to avoid blocking boot */
899 mutex_unlock(&dp->lock);
901 while (time_before(jiffies, timeout)) {
902 ret = reject_firmware(&dp->fw, CDN_DP_FIRMWARE, dp->dev);
903 if (ret == -ENOENT) {
908 DRM_DEV_ERROR(dp->dev,
909 "failed to request firmware: %d\n", ret);
913 dp->fw_loaded = true;
918 DRM_DEV_ERROR(dp->dev, "Timed out trying to load firmware\n");
921 mutex_lock(&dp->lock);
925 static void cdn_dp_pd_event_work(struct work_struct *work)
927 struct cdn_dp_device *dp = container_of(work, struct cdn_dp_device,
929 struct drm_connector *connector = &dp->connector;
930 enum drm_connector_status old_status;
934 mutex_lock(&dp->lock);
939 ret = cdn_dp_request_firmware(dp);
943 dp->connected = true;
945 /* Not connected, notify userspace to disable the block */
946 if (!cdn_dp_connected_port(dp)) {
947 DRM_DEV_INFO(dp->dev, "Not connected. Disabling cdn\n");
948 dp->connected = false;
950 /* Connected but not enabled, enable the block */
951 } else if (!dp->active) {
952 DRM_DEV_INFO(dp->dev, "Connected, not enabled. Enabling cdn\n");
953 ret = cdn_dp_enable(dp);
955 DRM_DEV_ERROR(dp->dev, "Enable dp failed %d\n", ret);
956 dp->connected = false;
959 /* Enabled and connected to a dongle without a sink, notify userspace */
960 } else if (!cdn_dp_check_sink_connection(dp)) {
961 DRM_DEV_INFO(dp->dev, "Connected without sink. Assert hpd\n");
962 dp->connected = false;
964 /* Enabled and connected with a sink, re-train if requested */
965 } else if (!cdn_dp_check_link_status(dp)) {
966 unsigned int rate = dp->link.rate;
967 unsigned int lanes = dp->link.num_lanes;
968 struct drm_display_mode *mode = &dp->mode;
970 DRM_DEV_INFO(dp->dev, "Connected with sink. Re-train link\n");
971 ret = cdn_dp_train_link(dp);
973 dp->connected = false;
974 DRM_DEV_ERROR(dp->dev, "Train link failed %d\n", ret);
978 /* If training result is changed, update the video config */
980 (rate != dp->link.rate || lanes != dp->link.num_lanes)) {
981 ret = cdn_dp_config_video(dp);
983 dp->connected = false;
984 DRM_DEV_ERROR(dp->dev,
985 "Failed to config video %d\n",
992 mutex_unlock(&dp->lock);
994 old_status = connector->status;
995 connector->status = connector->funcs->detect(connector, false);
996 if (old_status != connector->status)
997 drm_kms_helper_hotplug_event(dp->drm_dev);
1000 static int cdn_dp_pd_event(struct notifier_block *nb,
1001 unsigned long event, void *priv)
1003 struct cdn_dp_port *port = container_of(nb, struct cdn_dp_port,
1005 struct cdn_dp_device *dp = port->dp;
1008 * It would be nice to be able to just do the work inline right here.
1009 * However, we need to make a bunch of calls that might sleep in order
1010 * to turn on the block/phy, so use a worker instead.
1012 schedule_work(&dp->event_work);
1017 static int cdn_dp_bind(struct device *dev, struct device *master, void *data)
1019 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1020 struct drm_encoder *encoder;
1021 struct drm_connector *connector;
1022 struct cdn_dp_port *port;
1023 struct drm_device *drm_dev = data;
1026 ret = cdn_dp_parse_dt(dp);
1030 dp->drm_dev = drm_dev;
1031 dp->connected = false;
1033 dp->active_port = -1;
1034 dp->fw_loaded = false;
1036 INIT_WORK(&dp->event_work, cdn_dp_pd_event_work);
1038 encoder = &dp->encoder;
1040 encoder->possible_crtcs = drm_of_find_possible_crtcs(drm_dev,
1042 DRM_DEBUG_KMS("possible_crtcs = 0x%x\n", encoder->possible_crtcs);
1044 ret = drm_encoder_init(drm_dev, encoder, &cdn_dp_encoder_funcs,
1045 DRM_MODE_ENCODER_TMDS, NULL);
1047 DRM_ERROR("failed to initialize encoder with drm\n");
1051 drm_encoder_helper_add(encoder, &cdn_dp_encoder_helper_funcs);
1053 connector = &dp->connector;
1054 connector->polled = DRM_CONNECTOR_POLL_HPD;
1055 connector->dpms = DRM_MODE_DPMS_OFF;
1057 ret = drm_connector_init(drm_dev, connector,
1058 &cdn_dp_atomic_connector_funcs,
1059 DRM_MODE_CONNECTOR_DisplayPort);
1061 DRM_ERROR("failed to initialize connector with drm\n");
1062 goto err_free_encoder;
1065 drm_connector_helper_add(connector, &cdn_dp_connector_helper_funcs);
1067 ret = drm_connector_attach_encoder(connector, encoder);
1069 DRM_ERROR("failed to attach connector and encoder\n");
1070 goto err_free_connector;
1073 for (i = 0; i < dp->ports; i++) {
1076 port->event_nb.notifier_call = cdn_dp_pd_event;
1077 ret = devm_extcon_register_notifier(dp->dev, port->extcon,
1082 "register EXTCON_DISP_DP notifier err\n");
1083 goto err_free_connector;
1087 pm_runtime_enable(dev);
1089 schedule_work(&dp->event_work);
1094 drm_connector_cleanup(connector);
1096 drm_encoder_cleanup(encoder);
1100 static void cdn_dp_unbind(struct device *dev, struct device *master, void *data)
1102 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1103 struct drm_encoder *encoder = &dp->encoder;
1104 struct drm_connector *connector = &dp->connector;
1106 cancel_work_sync(&dp->event_work);
1107 cdn_dp_encoder_disable(encoder);
1108 encoder->funcs->destroy(encoder);
1109 connector->funcs->destroy(connector);
1111 pm_runtime_disable(dev);
1113 release_firmware(dp->fw);
1118 static const struct component_ops cdn_dp_component_ops = {
1119 .bind = cdn_dp_bind,
1120 .unbind = cdn_dp_unbind,
1123 int cdn_dp_suspend(struct device *dev)
1125 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1128 mutex_lock(&dp->lock);
1130 ret = cdn_dp_disable(dp);
1131 dp->suspended = true;
1132 mutex_unlock(&dp->lock);
1137 int cdn_dp_resume(struct device *dev)
1139 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1141 mutex_lock(&dp->lock);
1142 dp->suspended = false;
1144 schedule_work(&dp->event_work);
1145 mutex_unlock(&dp->lock);
1150 static int cdn_dp_probe(struct platform_device *pdev)
1152 struct device *dev = &pdev->dev;
1153 const struct of_device_id *match;
1154 struct cdn_dp_data *dp_data;
1155 struct cdn_dp_port *port;
1156 struct cdn_dp_device *dp;
1157 struct extcon_dev *extcon;
1161 dp = devm_kzalloc(dev, sizeof(*dp), GFP_KERNEL);
1166 match = of_match_node(cdn_dp_dt_ids, pdev->dev.of_node);
1167 dp_data = (struct cdn_dp_data *)match->data;
1169 for (i = 0; i < dp_data->max_phy; i++) {
1170 extcon = extcon_get_edev_by_phandle(dev, i);
1171 phy = devm_of_phy_get_by_index(dev, dev->of_node, i);
1173 if (PTR_ERR(extcon) == -EPROBE_DEFER ||
1174 PTR_ERR(phy) == -EPROBE_DEFER)
1175 return -EPROBE_DEFER;
1177 if (IS_ERR(extcon) || IS_ERR(phy))
1180 port = devm_kzalloc(dev, sizeof(*port), GFP_KERNEL);
1184 port->extcon = extcon;
1188 dp->port[dp->ports++] = port;
1192 DRM_DEV_ERROR(dev, "missing extcon or phy\n");
1196 mutex_init(&dp->lock);
1197 dev_set_drvdata(dev, dp);
1199 cdn_dp_audio_codec_init(dp, dev);
1201 return component_add(dev, &cdn_dp_component_ops);
1204 static int cdn_dp_remove(struct platform_device *pdev)
1206 struct cdn_dp_device *dp = platform_get_drvdata(pdev);
1208 platform_device_unregister(dp->audio_pdev);
1209 cdn_dp_suspend(dp->dev);
1210 component_del(&pdev->dev, &cdn_dp_component_ops);
1215 static void cdn_dp_shutdown(struct platform_device *pdev)
1217 struct cdn_dp_device *dp = platform_get_drvdata(pdev);
1219 cdn_dp_suspend(dp->dev);
1222 static const struct dev_pm_ops cdn_dp_pm_ops = {
1223 SET_SYSTEM_SLEEP_PM_OPS(cdn_dp_suspend,
1227 struct platform_driver cdn_dp_driver = {
1228 .probe = cdn_dp_probe,
1229 .remove = cdn_dp_remove,
1230 .shutdown = cdn_dp_shutdown,
1233 .owner = THIS_MODULE,
1234 .of_match_table = of_match_ptr(cdn_dp_dt_ids),
1235 .pm = &cdn_dp_pm_ops,