Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

drm/msm/dp: return correct connection status after suspend

During suspend, dp host controller and hpd block are disabled due to
both ahb and aux clock are disabled. Therefore hpd plug/unplug interrupts
will not be generated. At dp_pm_resume(), reinitialize both dp host
controller and hpd block so that hpd plug/unplug interrupts will be
generated and handled by driver so that hpd connection state is updated
correctly. This patch will fix link training flaky issues.

Changes in v2:
-- use container_of to cast correct dp_display_private pointer
at both dp_pm_suspend() and dp_pm_resume().

Changes in v3:
-- replace hpd_state atomic_t with u32

Changes in v4
-- call dp_display_host_deinit() at dp_pm_suspend()
-- call dp_display_host_init() at msm_dp_display_enable()
-- fix phy->init_count unbalance which causes link training failed

Changes in v5
-- add Fixes tag

Fixes: 8ede2ecc3e5e (drm/msm/dp: Add DP compliance tests on Snapdragon Chipsets)
Tested-by: Stephen Boyd <swboyd@chromium.org>
Signed-off-by: Kuogee Hsieh <khsieh@codeaurora.org>
Signed-off-by: Rob Clark <robdclark@chromium.org>

authored by

Kuogee Hsieh and committed by
Rob Clark
19e52bcb 5771de5d

+97 -68
+13
drivers/gpu/drm/msm/dp/dp_catalog.c
··· 572 572 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN); 573 573 } 574 574 575 + u32 dp_catalog_hpd_get_state_status(struct dp_catalog *dp_catalog) 576 + { 577 + struct dp_catalog_private *catalog = container_of(dp_catalog, 578 + struct dp_catalog_private, dp_catalog); 579 + u32 status; 580 + 581 + status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS); 582 + status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT; 583 + status &= DP_DP_HPD_STATE_STATUS_BITS_MASK; 584 + 585 + return status; 586 + } 587 + 575 588 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog) 576 589 { 577 590 struct dp_catalog_private *catalog = container_of(dp_catalog,
+1
drivers/gpu/drm/msm/dp/dp_catalog.h
··· 97 97 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog, 98 98 u32 intr_mask, bool en); 99 99 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog); 100 + u32 dp_catalog_hpd_get_state_status(struct dp_catalog *dp_catalog); 100 101 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog); 101 102 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog); 102 103 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog, u8 v_level,
+5
drivers/gpu/drm/msm/dp/dp_ctrl.c
··· 1403 1403 void dp_ctrl_host_deinit(struct dp_ctrl *dp_ctrl) 1404 1404 { 1405 1405 struct dp_ctrl_private *ctrl; 1406 + struct dp_io *dp_io; 1407 + struct phy *phy; 1406 1408 1407 1409 if (!dp_ctrl) { 1408 1410 DRM_ERROR("Invalid input data\n"); ··· 1412 1410 } 1413 1411 1414 1412 ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1413 + dp_io = &ctrl->parser->io; 1414 + phy = dp_io->phy; 1415 1415 1416 1416 dp_catalog_ctrl_enable_irq(ctrl->catalog, false); 1417 + phy_exit(phy); 1417 1418 1418 1419 DRM_DEBUG_DP("Host deinitialized successfully\n"); 1419 1420 }
+76 -68
drivers/gpu/drm/msm/dp/dp_display.c
··· 108 108 /* event related only access by event thread */ 109 109 struct mutex event_mutex; 110 110 wait_queue_head_t event_q; 111 - atomic_t hpd_state; 111 + u32 hpd_state; 112 112 u32 event_pndx; 113 113 u32 event_gndx; 114 114 struct dp_event event_list[DP_EVENT_Q_MAX]; 115 115 spinlock_t event_lock; 116 - 117 - struct completion resume_comp; 118 116 119 117 struct dp_audio *audio; 120 118 }; ··· 365 367 dp->core_initialized = true; 366 368 } 367 369 370 + static void dp_display_host_deinit(struct dp_display_private *dp) 371 + { 372 + if (!dp->core_initialized) { 373 + DRM_DEBUG_DP("DP core not initialized\n"); 374 + return; 375 + } 376 + 377 + dp_ctrl_host_deinit(dp->ctrl); 378 + dp_aux_deinit(dp->aux); 379 + dp_power_deinit(dp->power); 380 + 381 + dp->core_initialized = false; 382 + } 383 + 368 384 static int dp_display_usbpd_configure_cb(struct device *dev) 369 385 { 370 386 int rc = 0; ··· 503 491 504 492 mutex_lock(&dp->event_mutex); 505 493 506 - state = atomic_read(&dp->hpd_state); 494 + state = dp->hpd_state; 507 495 if (state == ST_SUSPEND_PENDING) { 508 496 mutex_unlock(&dp->event_mutex); 509 497 return 0; ··· 521 509 return 0; 522 510 } 523 511 524 - if (state == ST_SUSPENDED) 525 - tout = DP_TIMEOUT_NONE; 526 - 527 - atomic_set(&dp->hpd_state, ST_CONNECT_PENDING); 512 + dp->hpd_state = ST_CONNECT_PENDING; 528 513 529 514 hpd->hpd_high = 1; 530 515 531 516 ret = dp_display_usbpd_configure_cb(&dp->pdev->dev); 532 517 if (ret) { /* failed */ 533 518 hpd->hpd_high = 0; 534 - atomic_set(&dp->hpd_state, ST_DISCONNECTED); 519 + dp->hpd_state = ST_DISCONNECTED; 535 520 } 536 521 537 522 /* start sanity checking */ ··· 549 540 550 541 mutex_lock(&dp->event_mutex); 551 542 552 - state = atomic_read(&dp->hpd_state); 543 + state = dp->hpd_state; 553 544 if (state == ST_CONNECT_PENDING) { 554 545 dp_display_enable(dp, 0); 555 - atomic_set(&dp->hpd_state, ST_CONNECTED); 546 + dp->hpd_state = ST_CONNECTED; 556 547 } 557 548 558 549 mutex_unlock(&dp->event_mutex); ··· 577 568 578 569 mutex_lock(&dp->event_mutex); 579 570 580 - state = atomic_read(&dp->hpd_state); 571 + state = dp->hpd_state; 581 572 if (state == ST_SUSPEND_PENDING) { 582 573 mutex_unlock(&dp->event_mutex); 583 574 return 0; ··· 595 586 return 0; 596 587 } 597 588 598 - atomic_set(&dp->hpd_state, ST_DISCONNECT_PENDING); 589 + dp->hpd_state = ST_DISCONNECT_PENDING; 599 590 600 591 /* disable HPD plug interrupt until disconnect is done */ 601 592 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_PLUG_INT_MASK ··· 630 621 631 622 mutex_lock(&dp->event_mutex); 632 623 633 - state = atomic_read(&dp->hpd_state); 624 + state = dp->hpd_state; 634 625 if (state == ST_DISCONNECT_PENDING) { 635 626 dp_display_disable(dp, 0); 636 - atomic_set(&dp->hpd_state, ST_DISCONNECTED); 627 + dp->hpd_state = ST_DISCONNECTED; 637 628 } 638 629 639 630 mutex_unlock(&dp->event_mutex); ··· 648 639 mutex_lock(&dp->event_mutex); 649 640 650 641 /* irq_hpd can happen at either connected or disconnected state */ 651 - state = atomic_read(&dp->hpd_state); 642 + state = dp->hpd_state; 652 643 if (state == ST_SUSPEND_PENDING) { 653 644 mutex_unlock(&dp->event_mutex); 654 645 return 0; ··· 799 790 800 791 dp_display = g_dp_display; 801 792 802 - if (dp_display->power_on) { 803 - DRM_DEBUG_DP("Link already setup, return\n"); 804 - return 0; 805 - } 806 - 807 793 rc = dp_ctrl_on_stream(dp->ctrl); 808 794 if (!rc) 809 795 dp_display->power_on = true; 810 796 811 - /* complete resume_comp regardless it is armed or not */ 812 - complete(&dp->resume_comp); 813 797 return rc; 814 798 } 815 799 ··· 830 828 struct msm_dp *dp_display; 831 829 832 830 dp_display = g_dp_display; 833 - 834 - if (!dp_display->power_on) 835 - return -EINVAL; 836 831 837 832 /* wait only if audio was enabled */ 838 833 if (dp_display->audio_enabled) { ··· 1151 1152 } 1152 1153 1153 1154 mutex_init(&dp->event_mutex); 1154 - 1155 - init_completion(&dp->resume_comp); 1156 - 1157 1155 g_dp_display = &dp->dp_display; 1158 1156 1159 1157 /* Store DP audio handle inside DP display */ ··· 1186 1190 1187 1191 static int dp_pm_resume(struct device *dev) 1188 1192 { 1193 + struct platform_device *pdev = to_platform_device(dev); 1194 + struct msm_dp *dp_display = platform_get_drvdata(pdev); 1195 + struct dp_display_private *dp; 1196 + u32 status; 1197 + 1198 + dp = container_of(dp_display, struct dp_display_private, dp_display); 1199 + 1200 + mutex_lock(&dp->event_mutex); 1201 + 1202 + /* start from disconnected state */ 1203 + dp->hpd_state = ST_DISCONNECTED; 1204 + 1205 + /* turn on dp ctrl/phy */ 1206 + dp_display_host_init(dp); 1207 + 1208 + dp_catalog_ctrl_hpd_config(dp->catalog); 1209 + 1210 + status = dp_catalog_hpd_get_state_status(dp->catalog); 1211 + 1212 + if (status) { 1213 + dp->dp_display.is_connected = true; 1214 + } else { 1215 + dp->dp_display.is_connected = false; 1216 + /* make sure next resume host_init be called */ 1217 + dp->core_initialized = false; 1218 + } 1219 + 1220 + mutex_unlock(&dp->event_mutex); 1221 + 1189 1222 return 0; 1190 1223 } 1191 1224 1192 1225 static int dp_pm_suspend(struct device *dev) 1193 1226 { 1194 1227 struct platform_device *pdev = to_platform_device(dev); 1195 - struct dp_display_private *dp = platform_get_drvdata(pdev); 1228 + struct msm_dp *dp_display = platform_get_drvdata(pdev); 1229 + struct dp_display_private *dp; 1196 1230 1197 - if (!dp) { 1198 - DRM_ERROR("DP driver bind failed. Invalid driver data\n"); 1199 - return -EINVAL; 1200 - } 1231 + dp = container_of(dp_display, struct dp_display_private, dp_display); 1201 1232 1202 - atomic_set(&dp->hpd_state, ST_SUSPENDED); 1233 + mutex_lock(&dp->event_mutex); 1234 + 1235 + if (dp->core_initialized == true) 1236 + dp_display_host_deinit(dp); 1237 + 1238 + dp->hpd_state = ST_SUSPENDED; 1239 + 1240 + mutex_unlock(&dp->event_mutex); 1203 1241 1204 1242 return 0; 1205 1243 } ··· 1348 1318 return 0; 1349 1319 } 1350 1320 1351 - static int dp_display_wait4resume_done(struct dp_display_private *dp) 1352 - { 1353 - int ret = 0; 1354 - 1355 - reinit_completion(&dp->resume_comp); 1356 - if (!wait_for_completion_timeout(&dp->resume_comp, 1357 - WAIT_FOR_RESUME_TIMEOUT_JIFFIES)) { 1358 - DRM_ERROR("wait4resume_done timedout\n"); 1359 - ret = -ETIMEDOUT; 1360 - } 1361 - return ret; 1362 - } 1363 - 1364 1321 int msm_dp_display_enable(struct msm_dp *dp, struct drm_encoder *encoder) 1365 1322 { 1366 1323 int rc = 0; ··· 1361 1344 } 1362 1345 1363 1346 mutex_lock(&dp_display->event_mutex); 1347 + 1348 + dp_del_event(dp_display, EV_CONNECT_PENDING_TIMEOUT); 1364 1349 1365 1350 rc = dp_display_set_mode(dp, &dp_display->dp_mode); 1366 1351 if (rc) { ··· 1378 1359 return rc; 1379 1360 } 1380 1361 1381 - state = atomic_read(&dp_display->hpd_state); 1382 - if (state == ST_SUSPENDED) { 1383 - /* start link training */ 1384 - dp_add_event(dp_display, EV_HPD_PLUG_INT, 0, 0); 1385 - mutex_unlock(&dp_display->event_mutex); 1362 + state = dp_display->hpd_state; 1386 1363 1387 - /* wait until dp interface is up */ 1388 - goto resume_done; 1389 - } 1364 + if (state == ST_SUSPEND_PENDING) 1365 + dp_display_host_init(dp_display); 1390 1366 1391 1367 dp_display_enable(dp_display, 0); 1392 1368 ··· 1392 1378 dp_display_unprepare(dp); 1393 1379 } 1394 1380 1395 - dp_del_event(dp_display, EV_CONNECT_PENDING_TIMEOUT); 1396 - 1397 1381 if (state == ST_SUSPEND_PENDING) 1398 1382 dp_add_event(dp_display, EV_IRQ_HPD_INT, 0, 0); 1399 1383 1400 1384 /* completed connection */ 1401 - atomic_set(&dp_display->hpd_state, ST_CONNECTED); 1385 + dp_display->hpd_state = ST_CONNECTED; 1402 1386 1403 1387 mutex_unlock(&dp_display->event_mutex); 1404 1388 1405 - return rc; 1406 - 1407 - resume_done: 1408 - dp_display_wait4resume_done(dp_display); 1409 1389 return rc; 1410 1390 } 1411 1391 ··· 1424 1416 1425 1417 mutex_lock(&dp_display->event_mutex); 1426 1418 1419 + dp_del_event(dp_display, EV_DISCONNECT_PENDING_TIMEOUT); 1420 + 1427 1421 dp_display_disable(dp_display, 0); 1428 1422 1429 1423 rc = dp_display_unprepare(dp); 1430 1424 if (rc) 1431 1425 DRM_ERROR("DP display unprepare failed, rc=%d\n", rc); 1432 1426 1433 - dp_del_event(dp_display, EV_DISCONNECT_PENDING_TIMEOUT); 1434 - 1435 - state = atomic_read(&dp_display->hpd_state); 1427 + state = dp_display->hpd_state; 1436 1428 if (state == ST_DISCONNECT_PENDING) { 1437 1429 /* completed disconnection */ 1438 - atomic_set(&dp_display->hpd_state, ST_DISCONNECTED); 1430 + dp_display->hpd_state = ST_DISCONNECTED; 1439 1431 } else { 1440 - atomic_set(&dp_display->hpd_state, ST_SUSPEND_PENDING); 1432 + dp_display->hpd_state = ST_SUSPEND_PENDING; 1441 1433 } 1442 1434 1443 1435 mutex_unlock(&dp_display->event_mutex);
+2
drivers/gpu/drm/msm/dp/dp_reg.h
··· 32 32 #define DP_DP_IRQ_HPD_INT_ACK (0x00000002) 33 33 #define DP_DP_HPD_REPLUG_INT_ACK (0x00000004) 34 34 #define DP_DP_HPD_UNPLUG_INT_ACK (0x00000008) 35 + #define DP_DP_HPD_STATE_STATUS_BITS_MASK (0x0000000F) 36 + #define DP_DP_HPD_STATE_STATUS_BITS_SHIFT (0x1C) 35 37 36 38 #define REG_DP_DP_HPD_INT_MASK (0x0000000C) 37 39 #define DP_DP_HPD_PLUG_INT_MASK (0x00000001)