Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

drm/msm/dp: Add basic PSR support for eDP

Add support for basic panel self refresh (PSR) feature for eDP.
Add a new interface to set PSR state in the sink from DPU.
Program the eDP controller to issue PSR enter and exit SDP to
the sink.

Signed-off-by: Sankeerth Billakanti <quic_sbillaka@quicinc.com>
Signed-off-by: Vinod Polimera <quic_vpolimer@quicinc.com>
Reviewed-by: Dmitry Baryshkov <dmitry.baryshkov@linaro.org>
Patchwork: https://patchwork.freedesktop.org/patch/524734/
Link: https://lore.kernel.org/r/1677774797-31063-10-git-send-email-quic_vpolimer@quicinc.com
Signed-off-by: Dmitry Baryshkov <dmitry.baryshkov@linaro.org>

authored by

Vinod Polimera and committed by
Dmitry Baryshkov
cd779808 cdfd0e62

+411 -1
+80
drivers/gpu/drm/msm/dp/dp_catalog.c
··· 47 47 #define DP_INTERRUPT_STATUS2_MASK \ 48 48 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT) 49 49 50 + #define DP_INTERRUPT_STATUS4 \ 51 + (PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \ 52 + PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT) 53 + 54 + #define DP_INTERRUPT_MASK4 \ 55 + (PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \ 56 + PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK) 57 + 50 58 struct dp_catalog_private { 51 59 struct device *dev; 52 60 struct drm_device *drm_dev; ··· 367 359 ln_mapping); 368 360 } 369 361 362 + void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog, 363 + bool enable) 364 + { 365 + u32 val; 366 + struct dp_catalog_private *catalog = container_of(dp_catalog, 367 + struct dp_catalog_private, dp_catalog); 368 + 369 + val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL); 370 + 371 + if (enable) 372 + val |= DP_MAINLINK_CTRL_ENABLE; 373 + else 374 + val &= ~DP_MAINLINK_CTRL_ENABLE; 375 + 376 + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val); 377 + } 378 + 370 379 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog, 371 380 bool enable) 372 381 { ··· 635 610 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN); 636 611 } 637 612 613 + static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog) 614 + { 615 + /* trigger sdp */ 616 + dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP); 617 + dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0); 618 + } 619 + 620 + void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog) 621 + { 622 + struct dp_catalog_private *catalog = container_of(dp_catalog, 623 + struct dp_catalog_private, dp_catalog); 624 + u32 config; 625 + 626 + /* enable PSR1 function */ 627 + config = dp_read_link(catalog, REG_PSR_CONFIG); 628 + config |= PSR1_SUPPORTED; 629 + dp_write_link(catalog, REG_PSR_CONFIG, config); 630 + 631 + dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4); 632 + dp_catalog_enable_sdp(catalog); 633 + } 634 + 635 + void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter) 636 + { 637 + struct dp_catalog_private *catalog = container_of(dp_catalog, 638 + struct dp_catalog_private, dp_catalog); 639 + u32 cmd; 640 + 641 + cmd = dp_read_link(catalog, REG_PSR_CMD); 642 + 643 + cmd &= ~(PSR_ENTER | PSR_EXIT); 644 + 645 + if (enter) 646 + cmd |= PSR_ENTER; 647 + else 648 + cmd |= PSR_EXIT; 649 + 650 + dp_catalog_enable_sdp(catalog); 651 + dp_write_link(catalog, REG_PSR_CMD, cmd); 652 + } 653 + 638 654 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog) 639 655 { 640 656 struct dp_catalog_private *catalog = container_of(dp_catalog, ··· 709 643 * are pending. 710 644 */ 711 645 return isr & (mask | ~DP_DP_HPD_INT_MASK); 646 + } 647 + 648 + u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog) 649 + { 650 + struct dp_catalog_private *catalog = container_of(dp_catalog, 651 + struct dp_catalog_private, dp_catalog); 652 + u32 intr, intr_ack; 653 + 654 + intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4); 655 + intr_ack = (intr & DP_INTERRUPT_STATUS4) 656 + << DP_INTERRUPT_STATUS_ACK_SHIFT; 657 + dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack); 658 + 659 + return intr; 712 660 } 713 661 714 662 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
+4
drivers/gpu/drm/msm/dp/dp_catalog.h
··· 93 93 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 config); 94 94 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog); 95 95 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog, bool enable); 96 + void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog, bool enable); 96 97 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog, u32 cc, u32 tb); 97 98 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog, u32 rate, 98 99 u32 stream_rate_khz, bool fixed_nvid); ··· 105 104 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog, 106 105 u32 intr_mask, bool en); 107 106 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog); 107 + void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog); 108 + void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter); 108 109 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog); 109 110 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog); 110 111 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog); 111 112 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog, u8 v_level, 112 113 u8 p_level); 113 114 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog); 115 + u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog); 114 116 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog, 115 117 u32 dp_tu, u32 valid_boundary, 116 118 u32 valid_boundary2);
+80
drivers/gpu/drm/msm/dp/dp_ctrl.c
··· 22 22 23 23 #define DP_KHZ_TO_HZ 1000 24 24 #define IDLE_PATTERN_COMPLETION_TIMEOUT_JIFFIES (30 * HZ / 1000) /* 30 ms */ 25 + #define PSR_OPERATION_COMPLETION_TIMEOUT_JIFFIES (300 * HZ / 1000) /* 300 ms */ 25 26 #define WAIT_FOR_VIDEO_READY_TIMEOUT_JIFFIES (HZ / 2) 26 27 27 28 #define DP_CTRL_INTR_READY_FOR_VIDEO BIT(0) ··· 81 80 struct dp_catalog *catalog; 82 81 83 82 struct completion idle_comp; 83 + struct completion psr_op_comp; 84 84 struct completion video_comp; 85 85 }; 86 86 ··· 154 152 /* sync clock & static Mvid */ 155 153 config |= DP_CONFIGURATION_CTRL_STATIC_DYNAMIC_CN; 156 154 config |= DP_CONFIGURATION_CTRL_SYNC_ASYNC_CLK; 155 + 156 + if (ctrl->panel->psr_cap.version) 157 + config |= DP_CONFIGURATION_CTRL_SEND_VSC; 157 158 158 159 dp_catalog_ctrl_config_ctrl(ctrl->catalog, config); 159 160 } ··· 1380 1375 dp_catalog_ctrl_enable_irq(ctrl->catalog, enable); 1381 1376 } 1382 1377 1378 + void dp_ctrl_config_psr(struct dp_ctrl *dp_ctrl) 1379 + { 1380 + u8 cfg; 1381 + struct dp_ctrl_private *ctrl = container_of(dp_ctrl, 1382 + struct dp_ctrl_private, dp_ctrl); 1383 + 1384 + if (!ctrl->panel->psr_cap.version) 1385 + return; 1386 + 1387 + dp_catalog_ctrl_config_psr(ctrl->catalog); 1388 + 1389 + cfg = DP_PSR_ENABLE; 1390 + drm_dp_dpcd_write(ctrl->aux, DP_PSR_EN_CFG, &cfg, 1); 1391 + } 1392 + 1393 + void dp_ctrl_set_psr(struct dp_ctrl *dp_ctrl, bool enter) 1394 + { 1395 + struct dp_ctrl_private *ctrl = container_of(dp_ctrl, 1396 + struct dp_ctrl_private, dp_ctrl); 1397 + 1398 + if (!ctrl->panel->psr_cap.version) 1399 + return; 1400 + 1401 + /* 1402 + * When entering PSR, 1403 + * 1. Send PSR enter SDP and wait for the PSR_UPDATE_INT 1404 + * 2. Turn off video 1405 + * 3. Disable the mainlink 1406 + * 1407 + * When exiting PSR, 1408 + * 1. Enable the mainlink 1409 + * 2. Send the PSR exit SDP 1410 + */ 1411 + if (enter) { 1412 + reinit_completion(&ctrl->psr_op_comp); 1413 + dp_catalog_ctrl_set_psr(ctrl->catalog, true); 1414 + 1415 + if (!wait_for_completion_timeout(&ctrl->psr_op_comp, 1416 + PSR_OPERATION_COMPLETION_TIMEOUT_JIFFIES)) { 1417 + DRM_ERROR("PSR_ENTRY timedout\n"); 1418 + dp_catalog_ctrl_set_psr(ctrl->catalog, false); 1419 + return; 1420 + } 1421 + 1422 + dp_ctrl_push_idle(dp_ctrl); 1423 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); 1424 + 1425 + dp_catalog_ctrl_psr_mainlink_enable(ctrl->catalog, false); 1426 + } else { 1427 + dp_catalog_ctrl_psr_mainlink_enable(ctrl->catalog, true); 1428 + 1429 + dp_catalog_ctrl_set_psr(ctrl->catalog, false); 1430 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, DP_STATE_CTRL_SEND_VIDEO); 1431 + dp_ctrl_wait4video_ready(ctrl); 1432 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); 1433 + } 1434 + } 1435 + 1383 1436 void dp_ctrl_phy_init(struct dp_ctrl *dp_ctrl) 1384 1437 { 1385 1438 struct dp_ctrl_private *ctrl; ··· 2052 1989 2053 1990 ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 2054 1991 1992 + if (ctrl->panel->psr_cap.version) { 1993 + isr = dp_catalog_ctrl_read_psr_interrupt_status(ctrl->catalog); 1994 + 1995 + if (isr) 1996 + complete(&ctrl->psr_op_comp); 1997 + 1998 + if (isr & PSR_EXIT_INT) 1999 + drm_dbg_dp(ctrl->drm_dev, "PSR exit done\n"); 2000 + 2001 + if (isr & PSR_UPDATE_INT) 2002 + drm_dbg_dp(ctrl->drm_dev, "PSR frame update done\n"); 2003 + 2004 + if (isr & PSR_CAPTURE_INT) 2005 + drm_dbg_dp(ctrl->drm_dev, "PSR frame capture done\n"); 2006 + } 2007 + 2055 2008 isr = dp_catalog_ctrl_get_interrupt(ctrl->catalog); 2056 2009 2057 2010 if (isr & DP_CTRL_INTR_READY_FOR_VIDEO) { ··· 2114 2035 dev_err(dev, "failed to add DP OPP table\n"); 2115 2036 2116 2037 init_completion(&ctrl->idle_comp); 2038 + init_completion(&ctrl->psr_op_comp); 2117 2039 init_completion(&ctrl->video_comp); 2118 2040 2119 2041 /* in parameters */
+3
drivers/gpu/drm/msm/dp/dp_ctrl.h
··· 37 37 void dp_ctrl_phy_exit(struct dp_ctrl *dp_ctrl); 38 38 void dp_ctrl_irq_phy_exit(struct dp_ctrl *dp_ctrl); 39 39 40 + void dp_ctrl_set_psr(struct dp_ctrl *dp_ctrl, bool enable); 41 + void dp_ctrl_config_psr(struct dp_ctrl *dp_ctrl); 42 + 40 43 #endif /* _DP_CTRL_H_ */
+19
drivers/gpu/drm/msm/dp/dp_display.c
··· 406 406 407 407 edid = dp->panel->edid; 408 408 409 + dp->dp_display.psr_supported = dp->panel->psr_cap.version; 410 + 409 411 dp->audio_supported = drm_detect_monitor_audio(edid); 410 412 dp_panel_handle_sink_request(dp->panel); 411 413 ··· 912 910 913 911 /* signal the connect event late to synchronize video and display */ 914 912 dp_display_handle_plugged_change(dp_display, true); 913 + 914 + if (dp_display->psr_supported) 915 + dp_ctrl_config_psr(dp->ctrl); 916 + 915 917 return 0; 916 918 } 917 919 ··· 1108 1102 * and never disable interrupt 1109 1103 */ 1110 1104 enable_irq(dp->irq); 1105 + } 1106 + 1107 + void dp_display_set_psr(struct msm_dp *dp_display, bool enter) 1108 + { 1109 + struct dp_display_private *dp; 1110 + 1111 + if (!dp_display) { 1112 + DRM_ERROR("invalid params\n"); 1113 + return; 1114 + } 1115 + 1116 + dp = container_of(dp_display, struct dp_display_private, dp_display); 1117 + dp_ctrl_set_psr(dp->ctrl, enter); 1111 1118 } 1112 1119 1113 1120 static int hpd_event_thread(void *data)
+2
drivers/gpu/drm/msm/dp/dp_display.h
··· 29 29 30 30 u32 max_dp_lanes; 31 31 struct dp_audio *dp_audio; 32 + bool psr_supported; 32 33 }; 33 34 34 35 int dp_display_set_plugged_cb(struct msm_dp *dp_display, ··· 40 39 int dp_display_get_test_bpp(struct msm_dp *dp_display); 41 40 void dp_display_signal_audio_start(struct msm_dp *dp_display); 42 41 void dp_display_signal_audio_complete(struct msm_dp *dp_display); 42 + void dp_display_set_psr(struct msm_dp *dp, bool enter); 43 43 44 44 #endif /* _DP_DISPLAY_H_ */
+132 -1
drivers/gpu/drm/msm/dp/dp_drm.c
··· 107 107 .hpd_notify = dp_bridge_hpd_notify, 108 108 }; 109 109 110 + static int edp_bridge_atomic_check(struct drm_bridge *drm_bridge, 111 + struct drm_bridge_state *bridge_state, 112 + struct drm_crtc_state *crtc_state, 113 + struct drm_connector_state *conn_state) 114 + { 115 + struct msm_dp *dp = to_dp_bridge(drm_bridge)->dp_display; 116 + 117 + if (WARN_ON(!conn_state)) 118 + return -ENODEV; 119 + 120 + if (!conn_state->crtc || !crtc_state) 121 + return 0; 122 + 123 + if (crtc_state->self_refresh_active && !dp->psr_supported) 124 + return -EINVAL; 125 + 126 + return 0; 127 + } 128 + 129 + static void edp_bridge_atomic_enable(struct drm_bridge *drm_bridge, 130 + struct drm_bridge_state *old_bridge_state) 131 + { 132 + struct drm_atomic_state *atomic_state = old_bridge_state->base.state; 133 + struct drm_crtc *crtc; 134 + struct drm_crtc_state *old_crtc_state; 135 + struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 136 + struct msm_dp *dp = dp_bridge->dp_display; 137 + 138 + /* 139 + * Check the old state of the crtc to determine if the panel 140 + * was put into psr state previously by the edp_bridge_atomic_disable. 141 + * If the panel is in psr, just exit psr state and skip the full 142 + * bridge enable sequence. 143 + */ 144 + crtc = drm_atomic_get_new_crtc_for_encoder(atomic_state, 145 + drm_bridge->encoder); 146 + if (!crtc) 147 + return; 148 + 149 + old_crtc_state = drm_atomic_get_old_crtc_state(atomic_state, crtc); 150 + 151 + if (old_crtc_state && old_crtc_state->self_refresh_active) { 152 + dp_display_set_psr(dp, false); 153 + return; 154 + } 155 + 156 + dp_bridge_atomic_enable(drm_bridge, old_bridge_state); 157 + } 158 + 159 + static void edp_bridge_atomic_disable(struct drm_bridge *drm_bridge, 160 + struct drm_bridge_state *old_bridge_state) 161 + { 162 + struct drm_atomic_state *atomic_state = old_bridge_state->base.state; 163 + struct drm_crtc *crtc; 164 + struct drm_crtc_state *new_crtc_state = NULL, *old_crtc_state = NULL; 165 + struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 166 + struct msm_dp *dp = dp_bridge->dp_display; 167 + 168 + crtc = drm_atomic_get_old_crtc_for_encoder(atomic_state, 169 + drm_bridge->encoder); 170 + if (!crtc) 171 + goto out; 172 + 173 + new_crtc_state = drm_atomic_get_new_crtc_state(atomic_state, crtc); 174 + if (!new_crtc_state) 175 + goto out; 176 + 177 + old_crtc_state = drm_atomic_get_old_crtc_state(atomic_state, crtc); 178 + if (!old_crtc_state) 179 + goto out; 180 + 181 + /* 182 + * Set self refresh mode if current crtc state is active. 183 + * 184 + * If old crtc state is active, then this is a display disable 185 + * call while the sink is in psr state. So, exit psr here. 186 + * The eDP controller will be disabled in the 187 + * edp_bridge_atomic_post_disable function. 188 + * 189 + * We observed sink is stuck in self refresh if psr exit is skipped 190 + * when display disable occurs while the sink is in psr state. 191 + */ 192 + if (new_crtc_state->self_refresh_active) { 193 + dp_display_set_psr(dp, true); 194 + return; 195 + } else if (old_crtc_state->self_refresh_active) { 196 + dp_display_set_psr(dp, false); 197 + return; 198 + } 199 + 200 + out: 201 + dp_bridge_atomic_disable(drm_bridge, old_bridge_state); 202 + } 203 + 204 + static void edp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge, 205 + struct drm_bridge_state *old_bridge_state) 206 + { 207 + struct drm_atomic_state *atomic_state = old_bridge_state->base.state; 208 + struct drm_crtc *crtc; 209 + struct drm_crtc_state *new_crtc_state = NULL; 210 + 211 + crtc = drm_atomic_get_old_crtc_for_encoder(atomic_state, 212 + drm_bridge->encoder); 213 + if (!crtc) 214 + return; 215 + 216 + new_crtc_state = drm_atomic_get_new_crtc_state(atomic_state, crtc); 217 + if (!new_crtc_state) 218 + return; 219 + 220 + /* 221 + * Self refresh mode is already set in edp_bridge_atomic_disable. 222 + */ 223 + if (new_crtc_state->self_refresh_active) 224 + return; 225 + 226 + dp_bridge_atomic_post_disable(drm_bridge, old_bridge_state); 227 + } 228 + 229 + static const struct drm_bridge_funcs edp_bridge_ops = { 230 + .atomic_enable = edp_bridge_atomic_enable, 231 + .atomic_disable = edp_bridge_atomic_disable, 232 + .atomic_post_disable = edp_bridge_atomic_post_disable, 233 + .mode_set = dp_bridge_mode_set, 234 + .mode_valid = dp_bridge_mode_valid, 235 + .atomic_reset = drm_atomic_helper_bridge_reset, 236 + .atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state, 237 + .atomic_destroy_state = drm_atomic_helper_bridge_destroy_state, 238 + .atomic_check = edp_bridge_atomic_check, 239 + }; 240 + 110 241 struct drm_bridge *dp_bridge_init(struct msm_dp *dp_display, struct drm_device *dev, 111 242 struct drm_encoder *encoder) 112 243 { ··· 252 121 dp_bridge->dp_display = dp_display; 253 122 254 123 bridge = &dp_bridge->bridge; 255 - bridge->funcs = &dp_bridge_ops; 124 + bridge->funcs = dp_display->is_edp ? &edp_bridge_ops : &dp_bridge_ops; 256 125 bridge->type = dp_display->connector_type; 257 126 258 127 /*
+36
drivers/gpu/drm/msm/dp/dp_link.c
··· 937 937 return 0; 938 938 } 939 939 940 + static bool dp_link_read_psr_error_status(struct dp_link_private *link) 941 + { 942 + u8 status; 943 + 944 + drm_dp_dpcd_read(link->aux, DP_PSR_ERROR_STATUS, &status, 1); 945 + 946 + if (status & DP_PSR_LINK_CRC_ERROR) 947 + DRM_ERROR("PSR LINK CRC ERROR\n"); 948 + else if (status & DP_PSR_RFB_STORAGE_ERROR) 949 + DRM_ERROR("PSR RFB STORAGE ERROR\n"); 950 + else if (status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR) 951 + DRM_ERROR("PSR VSC SDP UNCORRECTABLE ERROR\n"); 952 + else 953 + return false; 954 + 955 + return true; 956 + } 957 + 958 + static bool dp_link_psr_capability_changed(struct dp_link_private *link) 959 + { 960 + u8 status; 961 + 962 + drm_dp_dpcd_read(link->aux, DP_PSR_ESI, &status, 1); 963 + 964 + if (status & DP_PSR_CAPS_CHANGE) { 965 + drm_dbg_dp(link->drm_dev, "PSR Capability Change\n"); 966 + return true; 967 + } 968 + 969 + return false; 970 + } 971 + 940 972 static u8 get_link_status(const u8 link_status[DP_LINK_STATUS_SIZE], int r) 941 973 { 942 974 return link_status[r - DP_LANE0_1_STATUS]; ··· 1087 1055 dp_link->sink_request |= DP_TEST_LINK_TRAINING; 1088 1056 } else if (!dp_link_process_phy_test_pattern_request(link)) { 1089 1057 dp_link->sink_request |= DP_TEST_LINK_PHY_TEST_PATTERN; 1058 + } else if (dp_link_read_psr_error_status(link)) { 1059 + DRM_ERROR("PSR IRQ_HPD received\n"); 1060 + } else if (dp_link_psr_capability_changed(link)) { 1061 + drm_dbg_dp(link->drm_dev, "PSR Capabiity changed"); 1090 1062 } else { 1091 1063 ret = dp_link_process_link_status_update(link); 1092 1064 if (!ret) {
+22
drivers/gpu/drm/msm/dp/dp_panel.c
··· 20 20 bool aux_cfg_update_done; 21 21 }; 22 22 23 + static void dp_panel_read_psr_cap(struct dp_panel_private *panel) 24 + { 25 + ssize_t rlen; 26 + struct dp_panel *dp_panel; 27 + 28 + dp_panel = &panel->dp_panel; 29 + 30 + /* edp sink */ 31 + if (dp_panel->dpcd[DP_EDP_CONFIGURATION_CAP]) { 32 + rlen = drm_dp_dpcd_read(panel->aux, DP_PSR_SUPPORT, 33 + &dp_panel->psr_cap, sizeof(dp_panel->psr_cap)); 34 + if (rlen == sizeof(dp_panel->psr_cap)) { 35 + drm_dbg_dp(panel->drm_dev, 36 + "psr version: 0x%x, psr_cap: 0x%x\n", 37 + dp_panel->psr_cap.version, 38 + dp_panel->psr_cap.capabilities); 39 + } else 40 + DRM_ERROR("failed to read psr info, rlen=%zd\n", rlen); 41 + } 42 + } 43 + 23 44 static int dp_panel_read_dpcd(struct dp_panel *dp_panel) 24 45 { 25 46 int rc = 0; ··· 128 107 } 129 108 } 130 109 110 + dp_panel_read_psr_cap(panel); 131 111 end: 132 112 return rc; 133 113 }
+6
drivers/gpu/drm/msm/dp/dp_panel.h
··· 34 34 struct dp_catalog *catalog; 35 35 }; 36 36 37 + struct dp_panel_psr { 38 + u8 version; 39 + u8 capabilities; 40 + }; 41 + 37 42 struct dp_panel { 38 43 /* dpcd raw data */ 39 44 u8 dpcd[DP_RECEIVER_CAP_SIZE + 1]; ··· 51 46 struct edid *edid; 52 47 struct drm_connector *connector; 53 48 struct dp_display_mode dp_mode; 49 + struct dp_panel_psr psr_cap; 54 50 bool video_test; 55 51 56 52 u32 vic;
+27
drivers/gpu/drm/msm/dp/dp_reg.h
··· 22 22 #define REG_DP_INTR_STATUS2 (0x00000024) 23 23 #define REG_DP_INTR_STATUS3 (0x00000028) 24 24 25 + #define REG_DP_INTR_STATUS4 (0x0000002C) 26 + #define PSR_UPDATE_INT (0x00000001) 27 + #define PSR_CAPTURE_INT (0x00000004) 28 + #define PSR_EXIT_INT (0x00000010) 29 + #define PSR_UPDATE_ERROR_INT (0x00000040) 30 + #define PSR_WAKE_ERROR_INT (0x00000100) 31 + 32 + #define REG_DP_INTR_MASK4 (0x00000030) 33 + #define PSR_UPDATE_MASK (0x00000001) 34 + #define PSR_CAPTURE_MASK (0x00000002) 35 + #define PSR_EXIT_MASK (0x00000004) 36 + #define PSR_UPDATE_ERROR_MASK (0x00000008) 37 + #define PSR_WAKE_ERROR_MASK (0x00000010) 38 + 25 39 #define REG_DP_DP_HPD_CTRL (0x00000000) 26 40 #define DP_DP_HPD_CTRL_HPD_EN (0x00000001) 27 41 ··· 178 164 #define MMSS_DP_AUDIO_TIMING_RBR_48 (0x00000094) 179 165 #define MMSS_DP_AUDIO_TIMING_HBR_48 (0x00000098) 180 166 167 + #define REG_PSR_CONFIG (0x00000100) 168 + #define DISABLE_PSR (0x00000000) 169 + #define PSR1_SUPPORTED (0x00000001) 170 + #define PSR2_WITHOUT_FRAMESYNC (0x00000002) 171 + #define PSR2_WITH_FRAMESYNC (0x00000003) 172 + 173 + #define REG_PSR_CMD (0x00000110) 174 + #define PSR_ENTER (0x00000001) 175 + #define PSR_EXIT (0x00000002) 176 + 181 177 #define MMSS_DP_PSR_CRC_RG (0x00000154) 182 178 #define MMSS_DP_PSR_CRC_B (0x00000158) 183 179 ··· 207 183 208 184 #define MMSS_DP_AUDIO_STREAM_0 (0x00000240) 209 185 #define MMSS_DP_AUDIO_STREAM_1 (0x00000244) 186 + 187 + #define MMSS_DP_SDP_CFG3 (0x0000024c) 188 + #define UPDATE_SDP (0x00000001) 210 189 211 190 #define MMSS_DP_EXTENSION_0 (0x00000250) 212 191 #define MMSS_DP_EXTENSION_1 (0x00000254)