Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

drm/msm/dp: add displayPort driver support

Add the needed displayPort files to enable DP driver
on msm target.

"dp_display" module is the main module that calls into
other sub-modules. "dp_drm" file represents the interface
between DRM framework and DP driver.

Changes in v12:

-- Add support of pm ops in display port driver
-- Clear bpp depth bits before writing to MISC register
-- Fix edid read

Previous Change log:
https://lkml.kernel.org/lkml/20200818051137.21478-3-tanmay@codeaurora.org/

Signed-off-by: Chandan Uddaraju <chandanu@codeaurora.org>
Signed-off-by: Vara Reddy <varar@codeaurora.org>
Signed-off-by: Tanmay Shah <tanmay@codeaurora.org>
Co-developed-by: Abhinav Kumar <abhinavk@codeaurora.org>
Signed-off-by: Abhinav Kumar <abhinavk@codeaurora.org>
Co-developed-by: Kuogee Hsieh <khsieh@codeaurora.org>
Signed-off-by: Kuogee Hsieh <khsieh@codeaurora.org>
Co-developed-by: Guenter Roeck <groeck@chromium.org>
Signed-off-by: Guenter Roeck <groeck@chromium.org>
Co-developed-by: Stephen Boyd <swboyd@chromium.org>
Signed-off-by: Stephen Boyd <swboyd@chromium.org>
Signed-off-by: Rob Clark <robdclark@chromium.org>

authored by

Chandan Uddaraju and committed by
Rob Clark
c943b494 b22960b8

+8061 -3
+8
drivers/gpu/drm/msm/Kconfig
··· 58 58 help 59 59 Choose this option to enable HDCP state machine 60 60 61 + config DRM_MSM_DP 62 + bool "Enable DisplayPort support in MSM DRM driver" 63 + depends on DRM_MSM 64 + help 65 + Compile in support for DP driver in MSM DRM driver. DP external 66 + display support is enabled through this config option. It can 67 + be primary or secondary display on device. 68 + 61 69 config DRM_MSM_DSI 62 70 bool "Enable DSI support in MSM DRM driver" 63 71 depends on DRM_MSM
+12
drivers/gpu/drm/msm/Makefile
··· 2 2 ccflags-y := -I $(srctree)/$(src) 3 3 ccflags-y += -I $(srctree)/$(src)/disp/dpu1 4 4 ccflags-$(CONFIG_DRM_MSM_DSI) += -I $(srctree)/$(src)/dsi 5 + ccflags-$(CONFIG_DRM_MSM_DP) += -I $(srctree)/$(src)/dp 5 6 6 7 msm-y := \ 7 8 adreno/adreno_device.o \ ··· 99 98 msm-$(CONFIG_DEBUG_FS) += adreno/a5xx_debugfs.o 100 99 101 100 msm-$(CONFIG_DRM_MSM_GPU_STATE) += adreno/a6xx_gpu_state.o 101 + 102 + msm-$(CONFIG_DRM_MSM_DP)+= dp/dp_aux.o \ 103 + dp/dp_catalog.o \ 104 + dp/dp_ctrl.o \ 105 + dp/dp_display.o \ 106 + dp/dp_drm.o \ 107 + dp/dp_hpd.o \ 108 + dp/dp_link.o \ 109 + dp/dp_panel.o \ 110 + dp/dp_parser.o \ 111 + dp/dp_power.o 102 112 103 113 msm-$(CONFIG_DRM_FBDEV_EMULATION) += msm_fbdev.o 104 114 msm-$(CONFIG_COMMON_CLK) += disp/mdp4/mdp4_lvds_pll.o
+19
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
··· 1001 1001 1002 1002 trace_dpu_enc_mode_set(DRMID(drm_enc)); 1003 1003 1004 + if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS && priv->dp) 1005 + msm_dp_display_mode_set(priv->dp, drm_enc, mode, adj_mode); 1006 + 1004 1007 list_for_each_entry(conn_iter, connector_list, head) 1005 1008 if (conn_iter->encoder == drm_enc) 1006 1009 conn = conn_iter; ··· 1149 1146 { 1150 1147 struct dpu_encoder_virt *dpu_enc = NULL; 1151 1148 int ret = 0; 1149 + struct msm_drm_private *priv; 1152 1150 struct drm_display_mode *cur_mode = NULL; 1153 1151 1154 1152 if (!drm_enc) { ··· 1160 1156 1161 1157 mutex_lock(&dpu_enc->enc_lock); 1162 1158 cur_mode = &dpu_enc->base.crtc->state->adjusted_mode; 1159 + priv = drm_enc->dev->dev_private; 1163 1160 1164 1161 trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay, 1165 1162 cur_mode->vdisplay); ··· 1181 1176 1182 1177 _dpu_encoder_virt_enable_helper(drm_enc); 1183 1178 1179 + if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS && priv->dp) { 1180 + ret = msm_dp_display_enable(priv->dp, 1181 + drm_enc); 1182 + if (ret) { 1183 + DPU_ERROR_ENC(dpu_enc, "dp display enable failed: %d\n", 1184 + ret); 1185 + goto out; 1186 + } 1187 + } 1184 1188 dpu_enc->enabled = true; 1185 1189 1186 1190 out: ··· 1247 1233 } 1248 1234 1249 1235 DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n"); 1236 + 1237 + if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS && priv->dp) { 1238 + if (msm_dp_display_disable(priv->dp, drm_enc)) 1239 + DPU_ERROR_ENC(dpu_enc, "dp display disable failed\n"); 1240 + } 1250 1241 1251 1242 mutex_unlock(&dpu_enc->enc_lock); 1252 1243 }
+8
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_vid.c
··· 100 100 * display_v_end -= mode->hsync_start - mode->hdisplay; 101 101 * } 102 102 */ 103 + /* for DP/EDP, Shift timings to align it to bottom right */ 104 + if ((phys_enc->hw_intf->cap->type == INTF_DP) || 105 + (phys_enc->hw_intf->cap->type == INTF_EDP)) { 106 + timing->h_back_porch += timing->h_front_porch; 107 + timing->h_front_porch = 0; 108 + timing->v_back_porch += timing->v_front_porch; 109 + timing->v_front_porch = 0; 110 + } 103 111 } 104 112 105 113 static u32 get_horizontal_total(const struct intf_timing_params *timing)
+535
drivers/gpu/drm/msm/dp/dp_aux.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #include <linux/delay.h> 7 + 8 + #include "dp_reg.h" 9 + #include "dp_aux.h" 10 + 11 + #define DP_AUX_ENUM_STR(x) #x 12 + 13 + struct dp_aux_private { 14 + struct device *dev; 15 + struct dp_catalog *catalog; 16 + 17 + struct mutex mutex; 18 + struct completion comp; 19 + 20 + u32 aux_error_num; 21 + u32 retry_cnt; 22 + bool cmd_busy; 23 + bool native; 24 + bool read; 25 + bool no_send_addr; 26 + bool no_send_stop; 27 + u32 offset; 28 + u32 segment; 29 + u32 isr; 30 + 31 + struct drm_dp_aux dp_aux; 32 + }; 33 + 34 + static const char *dp_aux_get_error(u32 aux_error) 35 + { 36 + switch (aux_error) { 37 + case DP_AUX_ERR_NONE: 38 + return DP_AUX_ENUM_STR(DP_AUX_ERR_NONE); 39 + case DP_AUX_ERR_ADDR: 40 + return DP_AUX_ENUM_STR(DP_AUX_ERR_ADDR); 41 + case DP_AUX_ERR_TOUT: 42 + return DP_AUX_ENUM_STR(DP_AUX_ERR_TOUT); 43 + case DP_AUX_ERR_NACK: 44 + return DP_AUX_ENUM_STR(DP_AUX_ERR_NACK); 45 + case DP_AUX_ERR_DEFER: 46 + return DP_AUX_ENUM_STR(DP_AUX_ERR_DEFER); 47 + case DP_AUX_ERR_NACK_DEFER: 48 + return DP_AUX_ENUM_STR(DP_AUX_ERR_NACK_DEFER); 49 + default: 50 + return "unknown"; 51 + } 52 + } 53 + 54 + static u32 dp_aux_write(struct dp_aux_private *aux, 55 + struct drm_dp_aux_msg *msg) 56 + { 57 + u32 data[4], reg, len; 58 + u8 *msgdata = msg->buffer; 59 + int const AUX_CMD_FIFO_LEN = 128; 60 + int i = 0; 61 + 62 + if (aux->read) 63 + len = 4; 64 + else 65 + len = msg->size + 4; 66 + 67 + /* 68 + * cmd fifo only has depth of 144 bytes 69 + * limit buf length to 128 bytes here 70 + */ 71 + if (len > AUX_CMD_FIFO_LEN) { 72 + DRM_ERROR("buf size greater than allowed size of 128 bytes\n"); 73 + return 0; 74 + } 75 + 76 + /* Pack cmd and write to HW */ 77 + data[0] = (msg->address >> 16) & 0xf; /* addr[19:16] */ 78 + if (aux->read) 79 + data[0] |= BIT(4); /* R/W */ 80 + 81 + data[1] = (msg->address >> 8) & 0xff; /* addr[15:8] */ 82 + data[2] = msg->address & 0xff; /* addr[7:0] */ 83 + data[3] = (msg->size - 1) & 0xff; /* len[7:0] */ 84 + 85 + for (i = 0; i < len; i++) { 86 + reg = (i < 4) ? data[i] : msgdata[i - 4]; 87 + /* index = 0, write */ 88 + reg = (((reg) << DP_AUX_DATA_OFFSET) 89 + & DP_AUX_DATA_MASK) | DP_AUX_DATA_WRITE; 90 + if (i == 0) 91 + reg |= DP_AUX_DATA_INDEX_WRITE; 92 + aux->catalog->aux_data = reg; 93 + dp_catalog_aux_write_data(aux->catalog); 94 + } 95 + 96 + dp_catalog_aux_clear_trans(aux->catalog, false); 97 + dp_catalog_aux_clear_hw_interrupts(aux->catalog); 98 + 99 + reg = 0; /* Transaction number == 1 */ 100 + if (!aux->native) { /* i2c */ 101 + reg |= DP_AUX_TRANS_CTRL_I2C; 102 + 103 + if (aux->no_send_addr) 104 + reg |= DP_AUX_TRANS_CTRL_NO_SEND_ADDR; 105 + 106 + if (aux->no_send_stop) 107 + reg |= DP_AUX_TRANS_CTRL_NO_SEND_STOP; 108 + } 109 + 110 + reg |= DP_AUX_TRANS_CTRL_GO; 111 + aux->catalog->aux_data = reg; 112 + dp_catalog_aux_write_trans(aux->catalog); 113 + 114 + return len; 115 + } 116 + 117 + static int dp_aux_cmd_fifo_tx(struct dp_aux_private *aux, 118 + struct drm_dp_aux_msg *msg) 119 + { 120 + u32 ret, len, timeout; 121 + int aux_timeout_ms = HZ/4; 122 + 123 + reinit_completion(&aux->comp); 124 + 125 + len = dp_aux_write(aux, msg); 126 + if (len == 0) { 127 + DRM_ERROR("DP AUX write failed\n"); 128 + return -EINVAL; 129 + } 130 + 131 + timeout = wait_for_completion_timeout(&aux->comp, aux_timeout_ms); 132 + if (!timeout) { 133 + DRM_ERROR("aux %s timeout\n", (aux->read ? "read" : "write")); 134 + return -ETIMEDOUT; 135 + } 136 + 137 + if (aux->aux_error_num == DP_AUX_ERR_NONE) { 138 + ret = len; 139 + } else { 140 + DRM_ERROR_RATELIMITED("aux err: %s\n", 141 + dp_aux_get_error(aux->aux_error_num)); 142 + 143 + ret = -EINVAL; 144 + } 145 + 146 + return ret; 147 + } 148 + 149 + static void dp_aux_cmd_fifo_rx(struct dp_aux_private *aux, 150 + struct drm_dp_aux_msg *msg) 151 + { 152 + u32 data; 153 + u8 *dp; 154 + u32 i, actual_i; 155 + u32 len = msg->size; 156 + 157 + dp_catalog_aux_clear_trans(aux->catalog, true); 158 + 159 + data = DP_AUX_DATA_INDEX_WRITE; /* INDEX_WRITE */ 160 + data |= DP_AUX_DATA_READ; /* read */ 161 + 162 + aux->catalog->aux_data = data; 163 + dp_catalog_aux_write_data(aux->catalog); 164 + 165 + dp = msg->buffer; 166 + 167 + /* discard first byte */ 168 + data = dp_catalog_aux_read_data(aux->catalog); 169 + 170 + for (i = 0; i < len; i++) { 171 + data = dp_catalog_aux_read_data(aux->catalog); 172 + *dp++ = (u8)((data >> DP_AUX_DATA_OFFSET) & 0xff); 173 + 174 + actual_i = (data >> DP_AUX_DATA_INDEX_OFFSET) & 0xFF; 175 + if (i != actual_i) 176 + DRM_ERROR("Index mismatch: expected %d, found %d\n", 177 + i, actual_i); 178 + } 179 + } 180 + 181 + static void dp_aux_native_handler(struct dp_aux_private *aux) 182 + { 183 + u32 isr = aux->isr; 184 + 185 + if (isr & DP_INTR_AUX_I2C_DONE) 186 + aux->aux_error_num = DP_AUX_ERR_NONE; 187 + else if (isr & DP_INTR_WRONG_ADDR) 188 + aux->aux_error_num = DP_AUX_ERR_ADDR; 189 + else if (isr & DP_INTR_TIMEOUT) 190 + aux->aux_error_num = DP_AUX_ERR_TOUT; 191 + if (isr & DP_INTR_NACK_DEFER) 192 + aux->aux_error_num = DP_AUX_ERR_NACK; 193 + if (isr & DP_INTR_AUX_ERROR) { 194 + aux->aux_error_num = DP_AUX_ERR_PHY; 195 + dp_catalog_aux_clear_hw_interrupts(aux->catalog); 196 + } 197 + 198 + complete(&aux->comp); 199 + } 200 + 201 + static void dp_aux_i2c_handler(struct dp_aux_private *aux) 202 + { 203 + u32 isr = aux->isr; 204 + 205 + if (isr & DP_INTR_AUX_I2C_DONE) { 206 + if (isr & (DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER)) 207 + aux->aux_error_num = DP_AUX_ERR_NACK; 208 + else 209 + aux->aux_error_num = DP_AUX_ERR_NONE; 210 + } else { 211 + if (isr & DP_INTR_WRONG_ADDR) 212 + aux->aux_error_num = DP_AUX_ERR_ADDR; 213 + else if (isr & DP_INTR_TIMEOUT) 214 + aux->aux_error_num = DP_AUX_ERR_TOUT; 215 + if (isr & DP_INTR_NACK_DEFER) 216 + aux->aux_error_num = DP_AUX_ERR_NACK_DEFER; 217 + if (isr & DP_INTR_I2C_NACK) 218 + aux->aux_error_num = DP_AUX_ERR_NACK; 219 + if (isr & DP_INTR_I2C_DEFER) 220 + aux->aux_error_num = DP_AUX_ERR_DEFER; 221 + if (isr & DP_INTR_AUX_ERROR) { 222 + aux->aux_error_num = DP_AUX_ERR_PHY; 223 + dp_catalog_aux_clear_hw_interrupts(aux->catalog); 224 + } 225 + } 226 + 227 + complete(&aux->comp); 228 + } 229 + 230 + static void dp_aux_update_offset_and_segment(struct dp_aux_private *aux, 231 + struct drm_dp_aux_msg *input_msg) 232 + { 233 + u32 edid_address = 0x50; 234 + u32 segment_address = 0x30; 235 + bool i2c_read = input_msg->request & 236 + (DP_AUX_I2C_READ & DP_AUX_NATIVE_READ); 237 + u8 *data; 238 + 239 + if (aux->native || i2c_read || ((input_msg->address != edid_address) && 240 + (input_msg->address != segment_address))) 241 + return; 242 + 243 + 244 + data = input_msg->buffer; 245 + if (input_msg->address == segment_address) 246 + aux->segment = *data; 247 + else 248 + aux->offset = *data; 249 + } 250 + 251 + /** 252 + * dp_aux_transfer_helper() - helper function for EDID read transactions 253 + * 254 + * @aux: DP AUX private structure 255 + * @input_msg: input message from DRM upstream APIs 256 + * @send_seg: send the segment to sink 257 + * 258 + * return: void 259 + * 260 + * This helper function is used to fix EDID reads for non-compliant 261 + * sinks that do not handle the i2c middle-of-transaction flag correctly. 262 + */ 263 + static void dp_aux_transfer_helper(struct dp_aux_private *aux, 264 + struct drm_dp_aux_msg *input_msg, 265 + bool send_seg) 266 + { 267 + struct drm_dp_aux_msg helper_msg; 268 + u32 message_size = 0x10; 269 + u32 segment_address = 0x30; 270 + u32 const edid_block_length = 0x80; 271 + bool i2c_mot = input_msg->request & DP_AUX_I2C_MOT; 272 + bool i2c_read = input_msg->request & 273 + (DP_AUX_I2C_READ & DP_AUX_NATIVE_READ); 274 + 275 + if (!i2c_mot || !i2c_read || (input_msg->size == 0)) 276 + return; 277 + 278 + /* 279 + * Sending the segment value and EDID offset will be performed 280 + * from the DRM upstream EDID driver for each block. Avoid 281 + * duplicate AUX transactions related to this while reading the 282 + * first 16 bytes of each block. 283 + */ 284 + if (!(aux->offset % edid_block_length) || !send_seg) 285 + goto end; 286 + 287 + aux->read = false; 288 + aux->cmd_busy = true; 289 + aux->no_send_addr = true; 290 + aux->no_send_stop = true; 291 + 292 + /* 293 + * Send the segment address for every i2c read in which the 294 + * middle-of-tranaction flag is set. This is required to support EDID 295 + * reads of more than 2 blocks as the segment address is reset to 0 296 + * since we are overriding the middle-of-transaction flag for read 297 + * transactions. 298 + */ 299 + 300 + if (aux->segment) { 301 + memset(&helper_msg, 0, sizeof(helper_msg)); 302 + helper_msg.address = segment_address; 303 + helper_msg.buffer = &aux->segment; 304 + helper_msg.size = 1; 305 + dp_aux_cmd_fifo_tx(aux, &helper_msg); 306 + } 307 + 308 + /* 309 + * Send the offset address for every i2c read in which the 310 + * middle-of-transaction flag is set. This will ensure that the sink 311 + * will update its read pointer and return the correct portion of the 312 + * EDID buffer in the subsequent i2c read trasntion triggered in the 313 + * native AUX transfer function. 314 + */ 315 + memset(&helper_msg, 0, sizeof(helper_msg)); 316 + helper_msg.address = input_msg->address; 317 + helper_msg.buffer = &aux->offset; 318 + helper_msg.size = 1; 319 + dp_aux_cmd_fifo_tx(aux, &helper_msg); 320 + 321 + end: 322 + aux->offset += message_size; 323 + if (aux->offset == 0x80 || aux->offset == 0x100) 324 + aux->segment = 0x0; /* reset segment at end of block */ 325 + } 326 + 327 + /* 328 + * This function does the real job to process an AUX transaction. 329 + * It will call aux_reset() function to reset the AUX channel, 330 + * if the waiting is timeout. 331 + */ 332 + static ssize_t dp_aux_transfer(struct drm_dp_aux *dp_aux, 333 + struct drm_dp_aux_msg *msg) 334 + { 335 + ssize_t ret; 336 + int const aux_cmd_native_max = 16; 337 + int const aux_cmd_i2c_max = 128; 338 + int const retry_count = 5; 339 + struct dp_aux_private *aux = container_of(dp_aux, 340 + struct dp_aux_private, dp_aux); 341 + 342 + mutex_lock(&aux->mutex); 343 + 344 + aux->native = msg->request & (DP_AUX_NATIVE_WRITE & DP_AUX_NATIVE_READ); 345 + 346 + /* Ignore address only message */ 347 + if ((msg->size == 0) || (msg->buffer == NULL)) { 348 + msg->reply = aux->native ? 349 + DP_AUX_NATIVE_REPLY_ACK : DP_AUX_I2C_REPLY_ACK; 350 + ret = msg->size; 351 + goto unlock_exit; 352 + } 353 + 354 + /* msg sanity check */ 355 + if ((aux->native && (msg->size > aux_cmd_native_max)) || 356 + (msg->size > aux_cmd_i2c_max)) { 357 + DRM_ERROR("%s: invalid msg: size(%zu), request(%x)\n", 358 + __func__, msg->size, msg->request); 359 + ret = -EINVAL; 360 + goto unlock_exit; 361 + } 362 + 363 + dp_aux_update_offset_and_segment(aux, msg); 364 + dp_aux_transfer_helper(aux, msg, true); 365 + 366 + aux->read = msg->request & (DP_AUX_I2C_READ & DP_AUX_NATIVE_READ); 367 + aux->cmd_busy = true; 368 + 369 + if (aux->read) { 370 + aux->no_send_addr = true; 371 + aux->no_send_stop = false; 372 + } else { 373 + aux->no_send_addr = true; 374 + aux->no_send_stop = true; 375 + } 376 + 377 + ret = dp_aux_cmd_fifo_tx(aux, msg); 378 + 379 + if (ret < 0) { 380 + if (aux->native) { 381 + aux->retry_cnt++; 382 + if (!(aux->retry_cnt % retry_count)) 383 + dp_catalog_aux_update_cfg(aux->catalog, 384 + PHY_AUX_CFG1); 385 + dp_catalog_aux_reset(aux->catalog); 386 + } 387 + goto unlock_exit; 388 + } 389 + 390 + if (aux->aux_error_num == DP_AUX_ERR_NONE) { 391 + if (aux->read) 392 + dp_aux_cmd_fifo_rx(aux, msg); 393 + 394 + msg->reply = aux->native ? 395 + DP_AUX_NATIVE_REPLY_ACK : DP_AUX_I2C_REPLY_ACK; 396 + } else { 397 + /* Reply defer to retry */ 398 + msg->reply = aux->native ? 399 + DP_AUX_NATIVE_REPLY_DEFER : DP_AUX_I2C_REPLY_DEFER; 400 + } 401 + 402 + /* Return requested size for success or retry */ 403 + ret = msg->size; 404 + aux->retry_cnt = 0; 405 + 406 + unlock_exit: 407 + aux->cmd_busy = false; 408 + mutex_unlock(&aux->mutex); 409 + return ret; 410 + } 411 + 412 + void dp_aux_isr(struct drm_dp_aux *dp_aux) 413 + { 414 + struct dp_aux_private *aux; 415 + 416 + if (!dp_aux) { 417 + DRM_ERROR("invalid input\n"); 418 + return; 419 + } 420 + 421 + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); 422 + 423 + aux->isr = dp_catalog_aux_get_irq(aux->catalog); 424 + 425 + if (!aux->cmd_busy) 426 + return; 427 + 428 + if (aux->native) 429 + dp_aux_native_handler(aux); 430 + else 431 + dp_aux_i2c_handler(aux); 432 + } 433 + 434 + void dp_aux_reconfig(struct drm_dp_aux *dp_aux) 435 + { 436 + struct dp_aux_private *aux; 437 + 438 + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); 439 + 440 + dp_catalog_aux_update_cfg(aux->catalog, PHY_AUX_CFG1); 441 + dp_catalog_aux_reset(aux->catalog); 442 + } 443 + 444 + void dp_aux_init(struct drm_dp_aux *dp_aux) 445 + { 446 + struct dp_aux_private *aux; 447 + 448 + if (!dp_aux) { 449 + DRM_ERROR("invalid input\n"); 450 + return; 451 + } 452 + 453 + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); 454 + 455 + dp_catalog_aux_setup(aux->catalog); 456 + dp_catalog_aux_enable(aux->catalog, true); 457 + aux->retry_cnt = 0; 458 + } 459 + 460 + void dp_aux_deinit(struct drm_dp_aux *dp_aux) 461 + { 462 + struct dp_aux_private *aux; 463 + 464 + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); 465 + 466 + dp_catalog_aux_enable(aux->catalog, false); 467 + } 468 + 469 + int dp_aux_register(struct drm_dp_aux *dp_aux) 470 + { 471 + struct dp_aux_private *aux; 472 + int ret; 473 + 474 + if (!dp_aux) { 475 + DRM_ERROR("invalid input\n"); 476 + return -EINVAL; 477 + } 478 + 479 + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); 480 + 481 + aux->dp_aux.name = "dpu_dp_aux"; 482 + aux->dp_aux.dev = aux->dev; 483 + aux->dp_aux.transfer = dp_aux_transfer; 484 + ret = drm_dp_aux_register(&aux->dp_aux); 485 + if (ret) { 486 + DRM_ERROR("%s: failed to register drm aux: %d\n", __func__, 487 + ret); 488 + return ret; 489 + } 490 + 491 + return 0; 492 + } 493 + 494 + void dp_aux_unregister(struct drm_dp_aux *dp_aux) 495 + { 496 + drm_dp_aux_unregister(dp_aux); 497 + } 498 + 499 + struct drm_dp_aux *dp_aux_get(struct device *dev, struct dp_catalog *catalog) 500 + { 501 + struct dp_aux_private *aux; 502 + 503 + if (!catalog) { 504 + DRM_ERROR("invalid input\n"); 505 + return ERR_PTR(-ENODEV); 506 + } 507 + 508 + aux = devm_kzalloc(dev, sizeof(*aux), GFP_KERNEL); 509 + if (!aux) 510 + return ERR_PTR(-ENOMEM); 511 + 512 + init_completion(&aux->comp); 513 + aux->cmd_busy = false; 514 + mutex_init(&aux->mutex); 515 + 516 + aux->dev = dev; 517 + aux->catalog = catalog; 518 + aux->retry_cnt = 0; 519 + 520 + return &aux->dp_aux; 521 + } 522 + 523 + void dp_aux_put(struct drm_dp_aux *dp_aux) 524 + { 525 + struct dp_aux_private *aux; 526 + 527 + if (!dp_aux) 528 + return; 529 + 530 + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); 531 + 532 + mutex_destroy(&aux->mutex); 533 + 534 + devm_kfree(aux->dev, aux); 535 + }
+30
drivers/gpu/drm/msm/dp/dp_aux.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_AUX_H_ 7 + #define _DP_AUX_H_ 8 + 9 + #include "dp_catalog.h" 10 + #include <drm/drm_dp_helper.h> 11 + 12 + #define DP_AUX_ERR_NONE 0 13 + #define DP_AUX_ERR_ADDR -1 14 + #define DP_AUX_ERR_TOUT -2 15 + #define DP_AUX_ERR_NACK -3 16 + #define DP_AUX_ERR_DEFER -4 17 + #define DP_AUX_ERR_NACK_DEFER -5 18 + #define DP_AUX_ERR_PHY -6 19 + 20 + int dp_aux_register(struct drm_dp_aux *dp_aux); 21 + void dp_aux_unregister(struct drm_dp_aux *dp_aux); 22 + void dp_aux_isr(struct drm_dp_aux *dp_aux); 23 + void dp_aux_init(struct drm_dp_aux *dp_aux); 24 + void dp_aux_deinit(struct drm_dp_aux *dp_aux); 25 + void dp_aux_reconfig(struct drm_dp_aux *dp_aux); 26 + 27 + struct drm_dp_aux *dp_aux_get(struct device *dev, struct dp_catalog *catalog); 28 + void dp_aux_put(struct drm_dp_aux *aux); 29 + 30 + #endif /*__DP_AUX_H_*/
+1019
drivers/gpu/drm/msm/dp/dp_catalog.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ 7 + 8 + #include <linux/delay.h> 9 + #include <linux/iopoll.h> 10 + #include <linux/rational.h> 11 + #include <drm/drm_dp_helper.h> 12 + 13 + #include "dp_catalog.h" 14 + #include "dp_reg.h" 15 + 16 + #define POLLING_SLEEP_US 1000 17 + #define POLLING_TIMEOUT_US 10000 18 + 19 + #define REFTIMER_DEFAULT_VALUE 0x20000 20 + #define SCRAMBLER_RESET_COUNT_VALUE 0xFC 21 + 22 + #define DP_INTERRUPT_STATUS_ACK_SHIFT 1 23 + #define DP_INTERRUPT_STATUS_MASK_SHIFT 2 24 + 25 + #define MSM_DP_CONTROLLER_AHB_OFFSET 0x0000 26 + #define MSM_DP_CONTROLLER_AHB_SIZE 0x0200 27 + #define MSM_DP_CONTROLLER_AUX_OFFSET 0x0200 28 + #define MSM_DP_CONTROLLER_AUX_SIZE 0x0200 29 + #define MSM_DP_CONTROLLER_LINK_OFFSET 0x0400 30 + #define MSM_DP_CONTROLLER_LINK_SIZE 0x0C00 31 + #define MSM_DP_CONTROLLER_P0_OFFSET 0x1000 32 + #define MSM_DP_CONTROLLER_P0_SIZE 0x0400 33 + 34 + #define DP_INTERRUPT_STATUS1 \ 35 + (DP_INTR_AUX_I2C_DONE| \ 36 + DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \ 37 + DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \ 38 + DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \ 39 + DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR) 40 + 41 + #define DP_INTERRUPT_STATUS1_ACK \ 42 + (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT) 43 + #define DP_INTERRUPT_STATUS1_MASK \ 44 + (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT) 45 + 46 + #define DP_INTERRUPT_STATUS2 \ 47 + (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \ 48 + DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED) 49 + 50 + #define DP_INTERRUPT_STATUS2_ACK \ 51 + (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT) 52 + #define DP_INTERRUPT_STATUS2_MASK \ 53 + (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT) 54 + 55 + static u8 const vm_pre_emphasis_hbr_rbr[4][4] = { 56 + {0x00, 0x0C, 0x14, 0x19}, 57 + {0x00, 0x0B, 0x12, 0xFF}, 58 + {0x00, 0x0B, 0xFF, 0xFF}, 59 + {0x04, 0xFF, 0xFF, 0xFF} 60 + }; 61 + 62 + static u8 const vm_voltage_swing_hbr_rbr[4][4] = { 63 + {0x08, 0x0F, 0x16, 0x1F}, 64 + {0x11, 0x1E, 0x1F, 0xFF}, 65 + {0x19, 0x1F, 0xFF, 0xFF}, 66 + {0x1F, 0xFF, 0xFF, 0xFF} 67 + }; 68 + 69 + /* AUX look-up-table configurations 70 + * Pair of offset and config values for each LUT 71 + */ 72 + static u8 const aux_lut_offset[] = { 73 + 0x20, 0x24, 0x28, 0x2C, 0x30, 0x34, 0x38, 0x3C, 0x40, 0x44 74 + }; 75 + 76 + static u8 const 77 + aux_lut_value[PHY_AUX_CFG_MAX][DP_AUX_CFG_MAX_VALUE_CNT] = { 78 + { 0x00, 0x00, 0x00, }, 79 + { 0x13, 0x23, 0x1d, }, 80 + { 0x24, 0x00, 0x00, }, 81 + { 0x00, 0x00, 0x00, }, 82 + { 0x0A, 0x00, 0x00, }, 83 + { 0x26, 0x00, 0x00, }, 84 + { 0x0A, 0x00, 0x00, }, 85 + { 0x03, 0x00, 0x00, }, 86 + { 0xBB, 0x00, 0x00, }, 87 + { 0x03, 0x00, 0x00, } 88 + }; 89 + 90 + struct dp_catalog_private { 91 + struct device *dev; 92 + struct dp_io *io; 93 + struct dp_catalog dp_catalog; 94 + u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX]; 95 + }; 96 + 97 + static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset) 98 + { 99 + offset += MSM_DP_CONTROLLER_AUX_OFFSET; 100 + return readl_relaxed(catalog->io->dp_controller.base + offset); 101 + } 102 + 103 + static inline void dp_write_aux(struct dp_catalog_private *catalog, 104 + u32 offset, u32 data) 105 + { 106 + offset += MSM_DP_CONTROLLER_AUX_OFFSET; 107 + /* 108 + * To make sure aux reg writes happens before any other operation, 109 + * this function uses writel() instread of writel_relaxed() 110 + */ 111 + writel(data, catalog->io->dp_controller.base + offset); 112 + } 113 + 114 + static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset) 115 + { 116 + offset += MSM_DP_CONTROLLER_AHB_OFFSET; 117 + return readl_relaxed(catalog->io->dp_controller.base + offset); 118 + } 119 + 120 + static inline void dp_write_ahb(struct dp_catalog_private *catalog, 121 + u32 offset, u32 data) 122 + { 123 + offset += MSM_DP_CONTROLLER_AHB_OFFSET; 124 + /* 125 + * To make sure phy reg writes happens before any other operation, 126 + * this function uses writel() instread of writel_relaxed() 127 + */ 128 + writel(data, catalog->io->dp_controller.base + offset); 129 + } 130 + 131 + static inline void dp_write_phy(struct dp_catalog_private *catalog, 132 + u32 offset, u32 data) 133 + { 134 + /* 135 + * To make sure phy reg writes happens before any other operation, 136 + * this function uses writel() instread of writel_relaxed() 137 + */ 138 + writel(data, catalog->io->phy_io.base + offset); 139 + } 140 + 141 + static inline u32 dp_read_phy(struct dp_catalog_private *catalog, 142 + u32 offset) 143 + { 144 + /* 145 + * To make sure phy reg writes happens before any other operation, 146 + * this function uses writel() instread of writel_relaxed() 147 + */ 148 + return readl_relaxed(catalog->io->phy_io.base + offset); 149 + } 150 + 151 + static inline void dp_write_pll(struct dp_catalog_private *catalog, 152 + u32 offset, u32 data) 153 + { 154 + writel_relaxed(data, catalog->io->dp_pll_io.base + offset); 155 + } 156 + 157 + static inline void dp_write_ln_tx0(struct dp_catalog_private *catalog, 158 + u32 offset, u32 data) 159 + { 160 + writel_relaxed(data, catalog->io->ln_tx0_io.base + offset); 161 + } 162 + 163 + static inline void dp_write_ln_tx1(struct dp_catalog_private *catalog, 164 + u32 offset, u32 data) 165 + { 166 + writel_relaxed(data, catalog->io->ln_tx1_io.base + offset); 167 + } 168 + 169 + static inline u32 dp_read_ln_tx0(struct dp_catalog_private *catalog, 170 + u32 offset) 171 + { 172 + return readl_relaxed(catalog->io->ln_tx0_io.base + offset); 173 + } 174 + 175 + static inline u32 dp_read_ln_tx1(struct dp_catalog_private *catalog, 176 + u32 offset) 177 + { 178 + return readl_relaxed(catalog->io->ln_tx1_io.base + offset); 179 + } 180 + 181 + static inline void dp_write_usb_cm(struct dp_catalog_private *catalog, 182 + u32 offset, u32 data) 183 + { 184 + /* 185 + * To make sure usb reg writes happens before any other operation, 186 + * this function uses writel() instread of writel_relaxed() 187 + */ 188 + writel(data, catalog->io->usb3_dp_com.base + offset); 189 + } 190 + 191 + static inline u32 dp_read_usb_cm(struct dp_catalog_private *catalog, 192 + u32 offset) 193 + { 194 + /* 195 + * To make sure usb reg writes happens before any other operation, 196 + * this function uses writel() instread of writel_relaxed() 197 + */ 198 + return readl_relaxed(catalog->io->usb3_dp_com.base + offset); 199 + } 200 + 201 + static inline void dp_write_p0(struct dp_catalog_private *catalog, 202 + u32 offset, u32 data) 203 + { 204 + offset += MSM_DP_CONTROLLER_P0_OFFSET; 205 + /* 206 + * To make sure interface reg writes happens before any other operation, 207 + * this function uses writel() instread of writel_relaxed() 208 + */ 209 + writel(data, catalog->io->dp_controller.base + offset); 210 + } 211 + 212 + static inline u32 dp_read_p0(struct dp_catalog_private *catalog, 213 + u32 offset) 214 + { 215 + offset += MSM_DP_CONTROLLER_P0_OFFSET; 216 + /* 217 + * To make sure interface reg writes happens before any other operation, 218 + * this function uses writel() instread of writel_relaxed() 219 + */ 220 + return readl_relaxed(catalog->io->dp_controller.base + offset); 221 + } 222 + 223 + static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset) 224 + { 225 + offset += MSM_DP_CONTROLLER_LINK_OFFSET; 226 + return readl_relaxed(catalog->io->dp_controller.base + offset); 227 + } 228 + 229 + static inline void dp_write_link(struct dp_catalog_private *catalog, 230 + u32 offset, u32 data) 231 + { 232 + offset += MSM_DP_CONTROLLER_LINK_OFFSET; 233 + /* 234 + * To make sure link reg writes happens before any other operation, 235 + * this function uses writel() instread of writel_relaxed() 236 + */ 237 + writel(data, catalog->io->dp_controller.base + offset); 238 + } 239 + 240 + /* aux related catalog functions */ 241 + u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog) 242 + { 243 + struct dp_catalog_private *catalog = container_of(dp_catalog, 244 + struct dp_catalog_private, dp_catalog); 245 + 246 + return dp_read_aux(catalog, REG_DP_AUX_DATA); 247 + } 248 + 249 + int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog) 250 + { 251 + struct dp_catalog_private *catalog = container_of(dp_catalog, 252 + struct dp_catalog_private, dp_catalog); 253 + 254 + dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data); 255 + return 0; 256 + } 257 + 258 + int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog) 259 + { 260 + struct dp_catalog_private *catalog = container_of(dp_catalog, 261 + struct dp_catalog_private, dp_catalog); 262 + 263 + dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data); 264 + return 0; 265 + } 266 + 267 + int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read) 268 + { 269 + u32 data; 270 + struct dp_catalog_private *catalog = container_of(dp_catalog, 271 + struct dp_catalog_private, dp_catalog); 272 + 273 + if (read) { 274 + data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL); 275 + data &= ~DP_AUX_TRANS_CTRL_GO; 276 + dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data); 277 + } else { 278 + dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0); 279 + } 280 + return 0; 281 + } 282 + 283 + int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog) 284 + { 285 + struct dp_catalog_private *catalog = container_of(dp_catalog, 286 + struct dp_catalog_private, dp_catalog); 287 + 288 + dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS); 289 + dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f); 290 + dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f); 291 + dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0); 292 + return 0; 293 + } 294 + 295 + void dp_catalog_aux_reset(struct dp_catalog *dp_catalog) 296 + { 297 + u32 aux_ctrl; 298 + struct dp_catalog_private *catalog = container_of(dp_catalog, 299 + struct dp_catalog_private, dp_catalog); 300 + 301 + aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL); 302 + 303 + aux_ctrl |= DP_AUX_CTRL_RESET; 304 + dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl); 305 + usleep_range(1000, 1100); /* h/w recommended delay */ 306 + 307 + aux_ctrl &= ~DP_AUX_CTRL_RESET; 308 + dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl); 309 + } 310 + 311 + void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable) 312 + { 313 + u32 aux_ctrl; 314 + struct dp_catalog_private *catalog = container_of(dp_catalog, 315 + struct dp_catalog_private, dp_catalog); 316 + 317 + aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL); 318 + 319 + if (enable) { 320 + dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff); 321 + dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff); 322 + aux_ctrl |= DP_AUX_CTRL_ENABLE; 323 + } else { 324 + aux_ctrl &= ~DP_AUX_CTRL_ENABLE; 325 + } 326 + 327 + dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl); 328 + } 329 + 330 + void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog, 331 + enum dp_phy_aux_config_type type) 332 + { 333 + struct dp_catalog_private *catalog = container_of(dp_catalog, 334 + struct dp_catalog_private, dp_catalog); 335 + u32 new_index = 0, current_index = 0; 336 + 337 + if (type >= PHY_AUX_CFG_MAX) { 338 + DRM_ERROR("invalid input\n"); 339 + return; 340 + } 341 + 342 + current_index = catalog->aux_lut_cfg_index[type]; 343 + new_index = (current_index + 1) % DP_AUX_CFG_MAX_VALUE_CNT; 344 + DRM_DEBUG_DP("Updating PHY_AUX_CFG%d from 0x%08x to 0x%08x\n", 345 + type, aux_lut_value[type][current_index], 346 + aux_lut_value[type][new_index]); 347 + 348 + dp_write_phy(catalog, aux_lut_offset[type], 349 + aux_lut_value[type][new_index]); 350 + catalog->aux_lut_cfg_index[type] = new_index; 351 + } 352 + 353 + static void dump_regs(void __iomem *base, int len) 354 + { 355 + int i; 356 + u32 x0, x4, x8, xc; 357 + u32 addr_off = 0; 358 + 359 + len = DIV_ROUND_UP(len, 16); 360 + for (i = 0; i < len; i++) { 361 + x0 = readl_relaxed(base + addr_off); 362 + x4 = readl_relaxed(base + addr_off + 0x04); 363 + x8 = readl_relaxed(base + addr_off + 0x08); 364 + xc = readl_relaxed(base + addr_off + 0x0c); 365 + 366 + pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc); 367 + addr_off += 16; 368 + } 369 + } 370 + 371 + void dp_catalog_dump_regs(struct dp_catalog *dp_catalog) 372 + { 373 + u32 offset, len; 374 + struct dp_catalog_private *catalog = container_of(dp_catalog, 375 + struct dp_catalog_private, dp_catalog); 376 + 377 + pr_info("AHB regs\n"); 378 + offset = MSM_DP_CONTROLLER_AHB_OFFSET; 379 + len = MSM_DP_CONTROLLER_AHB_SIZE; 380 + dump_regs(catalog->io->dp_controller.base + offset, len); 381 + 382 + pr_info("AUXCLK regs\n"); 383 + offset = MSM_DP_CONTROLLER_AUX_OFFSET; 384 + len = MSM_DP_CONTROLLER_AUX_SIZE; 385 + dump_regs(catalog->io->dp_controller.base + offset, len); 386 + 387 + pr_info("LCLK regs\n"); 388 + offset = MSM_DP_CONTROLLER_LINK_OFFSET; 389 + len = MSM_DP_CONTROLLER_LINK_SIZE; 390 + dump_regs(catalog->io->dp_controller.base + offset, len); 391 + 392 + pr_info("P0CLK regs\n"); 393 + offset = MSM_DP_CONTROLLER_P0_OFFSET; 394 + len = MSM_DP_CONTROLLER_P0_SIZE; 395 + dump_regs(catalog->io->dp_controller.base + offset, len); 396 + 397 + pr_info("USB3 DP COM regs\n"); 398 + dump_regs(catalog->io->usb3_dp_com.base, catalog->io->usb3_dp_com.len); 399 + 400 + pr_info("LN TX0 regs\n"); 401 + dump_regs(catalog->io->ln_tx0_io.base, catalog->io->ln_tx0_io.len); 402 + 403 + pr_info("LN TX1 regs\n"); 404 + dump_regs(catalog->io->ln_tx1_io.base, catalog->io->ln_tx1_io.len); 405 + 406 + pr_info("DP PHY regs\n"); 407 + dump_regs(catalog->io->phy_io.base, catalog->io->phy_io.len); 408 + } 409 + 410 + void dp_catalog_aux_setup(struct dp_catalog *dp_catalog) 411 + { 412 + struct dp_catalog_private *catalog = container_of(dp_catalog, 413 + struct dp_catalog_private, dp_catalog); 414 + int i = 0; 415 + 416 + dp_write_phy(catalog, REG_DP_PHY_PD_CTL, DP_PHY_PD_CTL_PWRDN | 417 + DP_PHY_PD_CTL_AUX_PWRDN | DP_PHY_PD_CTL_PLL_PWRDN | 418 + DP_PHY_PD_CTL_DP_CLAMP_EN); 419 + 420 + /* Turn on BIAS current for PHY/PLL */ 421 + dp_write_pll(catalog, 422 + QSERDES_COM_BIAS_EN_CLKBUFLR_EN, QSERDES_COM_BIAS_EN | 423 + QSERDES_COM_BIAS_EN_MUX | QSERDES_COM_CLKBUF_L_EN | 424 + QSERDES_COM_EN_SYSCLK_TX_SEL); 425 + 426 + dp_write_phy(catalog, REG_DP_PHY_PD_CTL, DP_PHY_PD_CTL_PSR_PWRDN); 427 + 428 + dp_write_phy(catalog, REG_DP_PHY_PD_CTL, DP_PHY_PD_CTL_PWRDN | 429 + DP_PHY_PD_CTL_AUX_PWRDN | DP_PHY_PD_CTL_LANE_0_1_PWRDN 430 + | DP_PHY_PD_CTL_LANE_2_3_PWRDN | DP_PHY_PD_CTL_PLL_PWRDN 431 + | DP_PHY_PD_CTL_DP_CLAMP_EN); 432 + 433 + dp_write_pll(catalog, 434 + QSERDES_COM_BIAS_EN_CLKBUFLR_EN, QSERDES_COM_BIAS_EN | 435 + QSERDES_COM_BIAS_EN_MUX | QSERDES_COM_CLKBUF_R_EN | 436 + QSERDES_COM_CLKBUF_L_EN | QSERDES_COM_EN_SYSCLK_TX_SEL | 437 + QSERDES_COM_CLKBUF_RX_DRIVE_L); 438 + 439 + /* DP AUX CFG register programming */ 440 + for (i = 0; i < PHY_AUX_CFG_MAX; i++) { 441 + DRM_DEBUG_DP("PHY_AUX_CFG%ds: offset=0x%08x, value=0x%08x\n", 442 + i, aux_lut_offset[i], aux_lut_value[i][0]); 443 + dp_write_phy(catalog, aux_lut_offset[i], 444 + aux_lut_value[i][0]); 445 + } 446 + 447 + dp_write_phy(catalog, REG_DP_PHY_AUX_INTERRUPT_MASK, 448 + PHY_AUX_STOP_ERR_MASK | PHY_AUX_DEC_ERR_MASK | 449 + PHY_AUX_SYNC_ERR_MASK | PHY_AUX_ALIGN_ERR_MASK | 450 + PHY_AUX_REQ_ERR_MASK); 451 + } 452 + 453 + int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog) 454 + { 455 + struct dp_catalog_private *catalog = container_of(dp_catalog, 456 + struct dp_catalog_private, dp_catalog); 457 + u32 intr, intr_ack; 458 + 459 + intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS); 460 + intr &= ~DP_INTERRUPT_STATUS1_MASK; 461 + intr_ack = (intr & DP_INTERRUPT_STATUS1) 462 + << DP_INTERRUPT_STATUS_ACK_SHIFT; 463 + dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack | 464 + DP_INTERRUPT_STATUS1_MASK); 465 + 466 + return intr; 467 + 468 + } 469 + 470 + /* controller related catalog functions */ 471 + void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog, 472 + u32 dp_tu, u32 valid_boundary, 473 + u32 valid_boundary2) 474 + { 475 + struct dp_catalog_private *catalog = container_of(dp_catalog, 476 + struct dp_catalog_private, dp_catalog); 477 + 478 + dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary); 479 + dp_write_link(catalog, REG_DP_TU, dp_tu); 480 + dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2); 481 + } 482 + 483 + void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state) 484 + { 485 + struct dp_catalog_private *catalog = container_of(dp_catalog, 486 + struct dp_catalog_private, dp_catalog); 487 + 488 + dp_write_link(catalog, REG_DP_STATE_CTRL, state); 489 + } 490 + 491 + void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg) 492 + { 493 + struct dp_catalog_private *catalog = container_of(dp_catalog, 494 + struct dp_catalog_private, dp_catalog); 495 + 496 + DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg); 497 + 498 + dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg); 499 + } 500 + 501 + void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog) 502 + { 503 + struct dp_catalog_private *catalog = container_of(dp_catalog, 504 + struct dp_catalog_private, dp_catalog); 505 + u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */ 506 + u32 ln_mapping; 507 + 508 + ln_mapping = ln_0 << LANE0_MAPPING_SHIFT; 509 + ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT; 510 + ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT; 511 + ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT; 512 + 513 + dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING, 514 + ln_mapping); 515 + } 516 + 517 + void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog, 518 + bool enable) 519 + { 520 + u32 mainlink_ctrl; 521 + struct dp_catalog_private *catalog = container_of(dp_catalog, 522 + struct dp_catalog_private, dp_catalog); 523 + 524 + if (enable) { 525 + /* 526 + * To make sure link reg writes happens before other operation, 527 + * dp_write_link() function uses writel() 528 + */ 529 + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, 530 + DP_MAINLINK_FB_BOUNDARY_SEL); 531 + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, 532 + DP_MAINLINK_FB_BOUNDARY_SEL | 533 + DP_MAINLINK_CTRL_RESET); 534 + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, 535 + DP_MAINLINK_FB_BOUNDARY_SEL); 536 + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, 537 + DP_MAINLINK_FB_BOUNDARY_SEL | 538 + DP_MAINLINK_CTRL_ENABLE); 539 + } else { 540 + mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL); 541 + mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE; 542 + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl); 543 + } 544 + } 545 + 546 + void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog, 547 + u32 colorimetry_cfg, 548 + u32 test_bits_depth) 549 + { 550 + u32 misc_val; 551 + struct dp_catalog_private *catalog = container_of(dp_catalog, 552 + struct dp_catalog_private, dp_catalog); 553 + 554 + misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0); 555 + 556 + /* clear bpp bits */ 557 + misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT); 558 + misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT; 559 + misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT; 560 + /* Configure clock to synchronous mode */ 561 + misc_val |= DP_MISC0_SYNCHRONOUS_CLK; 562 + 563 + DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val); 564 + dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val); 565 + } 566 + 567 + void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog, 568 + u32 rate, u32 stream_rate_khz, 569 + bool fixed_nvid) 570 + { 571 + u32 pixel_m, pixel_n; 572 + u32 mvid, nvid, div, pixel_div = 0, dispcc_input_rate; 573 + u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE; 574 + u32 const link_rate_hbr2 = 540000; 575 + u32 const link_rate_hbr3 = 810000; 576 + unsigned long den, num; 577 + 578 + struct dp_catalog_private *catalog = container_of(dp_catalog, 579 + struct dp_catalog_private, dp_catalog); 580 + 581 + div = dp_read_phy(catalog, REG_DP_PHY_VCO_DIV); 582 + div &= 0x03; 583 + 584 + if (div == 0) 585 + pixel_div = 6; 586 + else if (div == 1) 587 + pixel_div = 2; 588 + else if (div == 2) 589 + pixel_div = 4; 590 + else 591 + DRM_ERROR("Invalid pixel mux divider\n"); 592 + 593 + dispcc_input_rate = (rate * 10) / pixel_div; 594 + 595 + rational_best_approximation(dispcc_input_rate, stream_rate_khz, 596 + (unsigned long)(1 << 16) - 1, 597 + (unsigned long)(1 << 16) - 1, &den, &num); 598 + 599 + den = ~(den - num); 600 + den = den & 0xFFFF; 601 + pixel_m = num; 602 + pixel_n = den; 603 + 604 + mvid = (pixel_m & 0xFFFF) * 5; 605 + nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF); 606 + 607 + if (nvid < nvid_fixed) { 608 + u32 temp; 609 + 610 + temp = (nvid_fixed / nvid) * nvid; 611 + mvid = (nvid_fixed / nvid) * mvid; 612 + nvid = temp; 613 + } 614 + 615 + if (link_rate_hbr2 == rate) 616 + nvid *= 2; 617 + 618 + if (link_rate_hbr3 == rate) 619 + nvid *= 3; 620 + 621 + DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid); 622 + dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid); 623 + dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid); 624 + dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0); 625 + } 626 + 627 + int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog, 628 + u32 pattern) 629 + { 630 + int bit, ret; 631 + u32 data; 632 + struct dp_catalog_private *catalog = container_of(dp_catalog, 633 + struct dp_catalog_private, dp_catalog); 634 + 635 + bit = BIT(pattern - 1); 636 + DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern); 637 + dp_write_link(catalog, REG_DP_STATE_CTRL, bit); 638 + 639 + bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT; 640 + 641 + /* Poll for mainlink ready status */ 642 + ret = readx_poll_timeout(readl, catalog->io->dp_controller.base + 643 + MSM_DP_CONTROLLER_LINK_OFFSET + 644 + REG_DP_MAINLINK_READY, 645 + data, data & bit, 646 + POLLING_SLEEP_US, POLLING_TIMEOUT_US); 647 + if (ret < 0) { 648 + DRM_ERROR("set pattern for link_train=%d failed\n", pattern); 649 + return ret; 650 + } 651 + return 0; 652 + } 653 + 654 + void dp_catalog_ctrl_usb_reset(struct dp_catalog *dp_catalog, bool flip) 655 + { 656 + struct dp_catalog_private *catalog = container_of(dp_catalog, 657 + struct dp_catalog_private, dp_catalog); 658 + u32 typec_ctrl; 659 + 660 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_RESET_OVRD_CTRL, 661 + USB3_DP_COM_OVRD_CTRL_SW_DPPHY_RESET_MUX | 662 + USB3_DP_COM_OVRD_CTRL_SW_USB3PHY_RESET_MUX); 663 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_PHY_MODE_CTRL, 664 + USB3_DP_COM_PHY_MODE_DP); 665 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_SW_RESET, 666 + USB3_DP_COM_SW_RESET_SET); 667 + 668 + /* Default configuration i.e CC1 */ 669 + typec_ctrl = USB3_DP_COM_TYPEC_CTRL_PORTSEL_MUX; 670 + if (flip) 671 + typec_ctrl |= USB3_DP_COM_TYPEC_CTRL_PORTSEL; 672 + 673 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_TYPEC_CTRL, typec_ctrl); 674 + 675 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_SWI_CTRL, 0x00); 676 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_SW_RESET, 0x00); 677 + 678 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_POWER_DOWN_CTRL, 679 + USB3_DP_COM_POWER_DOWN_CTRL_SW_PWRDN); 680 + dp_write_usb_cm(catalog, REG_USB3_DP_COM_RESET_OVRD_CTRL, 0x00); 681 + 682 + } 683 + 684 + void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog) 685 + { 686 + u32 sw_reset; 687 + struct dp_catalog_private *catalog = container_of(dp_catalog, 688 + struct dp_catalog_private, dp_catalog); 689 + 690 + sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET); 691 + 692 + sw_reset |= DP_SW_RESET; 693 + dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset); 694 + usleep_range(1000, 1100); /* h/w recommended delay */ 695 + 696 + sw_reset &= ~DP_SW_RESET; 697 + dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset); 698 + } 699 + 700 + bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog) 701 + { 702 + u32 data; 703 + int ret; 704 + struct dp_catalog_private *catalog = container_of(dp_catalog, 705 + struct dp_catalog_private, dp_catalog); 706 + 707 + /* Poll for mainlink ready status */ 708 + ret = readl_poll_timeout(catalog->io->dp_controller.base + 709 + MSM_DP_CONTROLLER_LINK_OFFSET + 710 + REG_DP_MAINLINK_READY, 711 + data, data & DP_MAINLINK_READY_FOR_VIDEO, 712 + POLLING_SLEEP_US, POLLING_TIMEOUT_US); 713 + if (ret < 0) { 714 + DRM_ERROR("mainlink not ready\n"); 715 + return false; 716 + } 717 + 718 + return true; 719 + } 720 + 721 + void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog, 722 + bool enable) 723 + { 724 + struct dp_catalog_private *catalog = container_of(dp_catalog, 725 + struct dp_catalog_private, dp_catalog); 726 + 727 + if (enable) { 728 + dp_write_ahb(catalog, REG_DP_INTR_STATUS, 729 + DP_INTERRUPT_STATUS1_MASK); 730 + dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 731 + DP_INTERRUPT_STATUS2_MASK); 732 + } else { 733 + dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00); 734 + dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00); 735 + } 736 + } 737 + 738 + void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog, bool en) 739 + { 740 + struct dp_catalog_private *catalog = container_of(dp_catalog, 741 + struct dp_catalog_private, dp_catalog); 742 + 743 + if (en) { 744 + u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER); 745 + 746 + dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK, 747 + DP_DP_HPD_PLUG_INT_ACK | 748 + DP_DP_IRQ_HPD_INT_ACK | 749 + DP_DP_HPD_REPLUG_INT_ACK | 750 + DP_DP_HPD_UNPLUG_INT_ACK); 751 + dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK, 752 + DP_DP_HPD_PLUG_INT_MASK | 753 + DP_DP_IRQ_HPD_INT_MASK | 754 + DP_DP_HPD_REPLUG_INT_MASK | 755 + DP_DP_HPD_UNPLUG_INT_MASK); 756 + 757 + /* Configure REFTIMER */ 758 + reftimer |= REFTIMER_DEFAULT_VALUE; 759 + dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer); 760 + /* Enable HPD */ 761 + dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 762 + DP_DP_HPD_CTRL_HPD_EN); 763 + } else { 764 + /* Disable HPD */ 765 + dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0x0); 766 + } 767 + } 768 + 769 + int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog) 770 + { 771 + struct dp_catalog_private *catalog = container_of(dp_catalog, 772 + struct dp_catalog_private, dp_catalog); 773 + u32 intr, intr_ack; 774 + 775 + intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2); 776 + intr &= ~DP_INTERRUPT_STATUS2_MASK; 777 + intr_ack = (intr & DP_INTERRUPT_STATUS2) 778 + << DP_INTERRUPT_STATUS_ACK_SHIFT; 779 + dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 780 + intr_ack | DP_INTERRUPT_STATUS2_MASK); 781 + 782 + return intr; 783 + } 784 + 785 + void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog) 786 + { 787 + struct dp_catalog_private *catalog = container_of(dp_catalog, 788 + struct dp_catalog_private, dp_catalog); 789 + 790 + dp_write_ahb(catalog, REG_DP_PHY_CTRL, 791 + DP_PHY_CTRL_SW_RESET_PLL | DP_PHY_CTRL_SW_RESET); 792 + usleep_range(1000, 1100); /* h/w recommended delay */ 793 + dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0); 794 + } 795 + 796 + void dp_catalog_ctrl_phy_lane_cfg(struct dp_catalog *dp_catalog, 797 + bool flipped, u8 ln_cnt) 798 + { 799 + u32 info; 800 + struct dp_catalog_private *catalog = container_of(dp_catalog, 801 + struct dp_catalog_private, dp_catalog); 802 + u8 orientation = BIT(!!flipped); 803 + 804 + info = ln_cnt & DP_PHY_SPARE0_MASK; 805 + info |= (orientation & DP_PHY_SPARE0_MASK) 806 + << DP_PHY_SPARE0_ORIENTATION_INFO_SHIFT; 807 + DRM_DEBUG_DP("Shared Info = 0x%x\n", info); 808 + 809 + dp_write_phy(catalog, REG_DP_PHY_SPARE0, info); 810 + } 811 + 812 + int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog, 813 + u8 v_level, u8 p_level) 814 + { 815 + struct dp_catalog_private *catalog = container_of(dp_catalog, 816 + struct dp_catalog_private, dp_catalog); 817 + u8 voltage_swing_cfg, pre_emphasis_cfg; 818 + 819 + DRM_DEBUG_DP("hw: v=%d p=%d\n", v_level, p_level); 820 + 821 + voltage_swing_cfg = vm_voltage_swing_hbr_rbr[v_level][p_level]; 822 + pre_emphasis_cfg = vm_pre_emphasis_hbr_rbr[v_level][p_level]; 823 + 824 + if (voltage_swing_cfg == 0xFF && pre_emphasis_cfg == 0xFF) { 825 + DRM_ERROR("invalid vx (0x%x=0x%x), px (0x%x=0x%x\n", 826 + v_level, voltage_swing_cfg, p_level, pre_emphasis_cfg); 827 + return -EINVAL; 828 + } 829 + 830 + /* Enable MUX to use Cursor values from these registers */ 831 + voltage_swing_cfg |= DP_PHY_TXn_TX_DRV_LVL_MUX_EN; 832 + pre_emphasis_cfg |= DP_PHY_TXn_TX_EMP_POST1_LVL_MUX_EN; 833 + 834 + /* Configure host and panel only if both values are allowed */ 835 + dp_write_ln_tx0(catalog, REG_DP_PHY_TXn_TX_DRV_LVL, voltage_swing_cfg); 836 + dp_write_ln_tx1(catalog, REG_DP_PHY_TXn_TX_DRV_LVL, voltage_swing_cfg); 837 + dp_write_ln_tx0(catalog, REG_DP_PHY_TXn_TX_EMP_POST1_LVL, 838 + pre_emphasis_cfg); 839 + dp_write_ln_tx1(catalog, REG_DP_PHY_TXn_TX_EMP_POST1_LVL, 840 + pre_emphasis_cfg); 841 + DRM_DEBUG_DP("hw: vx_value=0x%x px_value=0x%x\n", 842 + voltage_swing_cfg, pre_emphasis_cfg); 843 + 844 + return 0; 845 + } 846 + 847 + void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog, 848 + u32 pattern) 849 + { 850 + struct dp_catalog_private *catalog = container_of(dp_catalog, 851 + struct dp_catalog_private, dp_catalog); 852 + u32 value = 0x0; 853 + 854 + /* Make sure to clear the current pattern before starting a new one */ 855 + dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0); 856 + 857 + switch (pattern) { 858 + case DP_LINK_QUAL_PATTERN_D10_2: 859 + dp_write_link(catalog, REG_DP_STATE_CTRL, 860 + DP_STATE_CTRL_LINK_TRAINING_PATTERN1); 861 + return; 862 + case DP_LINK_QUAL_PATTERN_PRBS7: 863 + dp_write_link(catalog, REG_DP_STATE_CTRL, 864 + DP_STATE_CTRL_LINK_PRBS7); 865 + return; 866 + case DP_LINK_QUAL_PATTERN_80BIT_CUSTOM: 867 + dp_write_link(catalog, REG_DP_STATE_CTRL, 868 + DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN); 869 + /* 00111110000011111000001111100000 */ 870 + dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0, 871 + 0x3E0F83E0); 872 + /* 00001111100000111110000011111000 */ 873 + dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1, 874 + 0x0F83E0F8); 875 + /* 1111100000111110 */ 876 + dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2, 877 + 0x0000F83E); 878 + return; 879 + case DP_LINK_QUAL_PATTERN_HBR2_EYE: 880 + case DP_LINK_QUAL_PATTERN_ERROR_RATE: 881 + value &= ~DP_HBR2_ERM_PATTERN; 882 + if (pattern == DP_LINK_QUAL_PATTERN_HBR2_EYE) 883 + value = DP_HBR2_ERM_PATTERN; 884 + dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET, 885 + value); 886 + value |= SCRAMBLER_RESET_COUNT_VALUE; 887 + dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET, 888 + value); 889 + dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, 890 + DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2); 891 + dp_write_link(catalog, REG_DP_STATE_CTRL, 892 + DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE); 893 + return; 894 + default: 895 + DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern); 896 + return; 897 + } 898 + } 899 + 900 + u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog) 901 + { 902 + struct dp_catalog_private *catalog = container_of(dp_catalog, 903 + struct dp_catalog_private, dp_catalog); 904 + 905 + return dp_read_link(catalog, REG_DP_MAINLINK_READY); 906 + } 907 + 908 + /* panel related catalog functions */ 909 + int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog) 910 + { 911 + struct dp_catalog_private *catalog = container_of(dp_catalog, 912 + struct dp_catalog_private, dp_catalog); 913 + 914 + dp_write_link(catalog, REG_DP_TOTAL_HOR_VER, 915 + dp_catalog->total); 916 + dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC, 917 + dp_catalog->sync_start); 918 + dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY, 919 + dp_catalog->width_blanking); 920 + dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active); 921 + return 0; 922 + } 923 + 924 + void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog, 925 + struct drm_display_mode *drm_mode) 926 + { 927 + struct dp_catalog_private *catalog = container_of(dp_catalog, 928 + struct dp_catalog_private, dp_catalog); 929 + u32 hsync_period, vsync_period; 930 + u32 display_v_start, display_v_end; 931 + u32 hsync_start_x, hsync_end_x; 932 + u32 v_sync_width; 933 + u32 hsync_ctl; 934 + u32 display_hctl; 935 + 936 + /* TPG config parameters*/ 937 + hsync_period = drm_mode->htotal; 938 + vsync_period = drm_mode->vtotal; 939 + 940 + display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) * 941 + hsync_period); 942 + display_v_end = ((vsync_period - (drm_mode->vsync_start - 943 + drm_mode->vdisplay)) 944 + * hsync_period) - 1; 945 + 946 + display_v_start += drm_mode->htotal - drm_mode->hsync_start; 947 + display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay); 948 + 949 + hsync_start_x = drm_mode->htotal - drm_mode->hsync_start; 950 + hsync_end_x = hsync_period - (drm_mode->hsync_start - 951 + drm_mode->hdisplay) - 1; 952 + 953 + v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start; 954 + 955 + hsync_ctl = (hsync_period << 16) | 956 + (drm_mode->hsync_end - drm_mode->hsync_start); 957 + display_hctl = (hsync_end_x << 16) | hsync_start_x; 958 + 959 + 960 + dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0); 961 + dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl); 962 + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period * 963 + hsync_period); 964 + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width * 965 + hsync_period); 966 + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0); 967 + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0); 968 + dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl); 969 + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0); 970 + dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start); 971 + dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end); 972 + dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0); 973 + dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0); 974 + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0); 975 + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0); 976 + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0); 977 + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0); 978 + dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0); 979 + 980 + dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 981 + DP_TPG_CHECKERED_RECT_PATTERN); 982 + dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG, 983 + DP_TPG_VIDEO_CONFIG_BPP_8BIT | 984 + DP_TPG_VIDEO_CONFIG_RGB); 985 + dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 986 + DP_BIST_ENABLE_DPBIST_EN); 987 + dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 988 + DP_TIMING_ENGINE_EN_EN); 989 + DRM_DEBUG_DP("%s: enabled tpg\n", __func__); 990 + } 991 + 992 + void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog) 993 + { 994 + struct dp_catalog_private *catalog = container_of(dp_catalog, 995 + struct dp_catalog_private, dp_catalog); 996 + 997 + dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0); 998 + dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0); 999 + dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0); 1000 + } 1001 + 1002 + struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io) 1003 + { 1004 + struct dp_catalog_private *catalog; 1005 + 1006 + if (!io) { 1007 + DRM_ERROR("invalid input\n"); 1008 + return ERR_PTR(-EINVAL); 1009 + } 1010 + 1011 + catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL); 1012 + if (!catalog) 1013 + return ERR_PTR(-ENOMEM); 1014 + 1015 + catalog->dev = dev; 1016 + catalog->io = io; 1017 + 1018 + return &catalog->dp_catalog; 1019 + }
+102
drivers/gpu/drm/msm/dp/dp_catalog.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_CATALOG_H_ 7 + #define _DP_CATALOG_H_ 8 + 9 + #include "dp_parser.h" 10 + 11 + /* interrupts */ 12 + #define DP_INTR_HPD BIT(0) 13 + #define DP_INTR_AUX_I2C_DONE BIT(3) 14 + #define DP_INTR_WRONG_ADDR BIT(6) 15 + #define DP_INTR_TIMEOUT BIT(9) 16 + #define DP_INTR_NACK_DEFER BIT(12) 17 + #define DP_INTR_WRONG_DATA_CNT BIT(15) 18 + #define DP_INTR_I2C_NACK BIT(18) 19 + #define DP_INTR_I2C_DEFER BIT(21) 20 + #define DP_INTR_PLL_UNLOCKED BIT(24) 21 + #define DP_INTR_AUX_ERROR BIT(27) 22 + 23 + #define DP_INTR_READY_FOR_VIDEO BIT(0) 24 + #define DP_INTR_IDLE_PATTERN_SENT BIT(3) 25 + #define DP_INTR_FRAME_END BIT(6) 26 + #define DP_INTR_CRC_UPDATED BIT(9) 27 + 28 + #define DP_AUX_CFG_MAX_VALUE_CNT 3 29 + 30 + /* PHY AUX config registers */ 31 + enum dp_phy_aux_config_type { 32 + PHY_AUX_CFG0, 33 + PHY_AUX_CFG1, 34 + PHY_AUX_CFG2, 35 + PHY_AUX_CFG3, 36 + PHY_AUX_CFG4, 37 + PHY_AUX_CFG5, 38 + PHY_AUX_CFG6, 39 + PHY_AUX_CFG7, 40 + PHY_AUX_CFG8, 41 + PHY_AUX_CFG9, 42 + PHY_AUX_CFG_MAX, 43 + }; 44 + 45 + struct dp_catalog { 46 + u32 aux_data; 47 + u32 total; 48 + u32 sync_start; 49 + u32 width_blanking; 50 + u32 dp_active; 51 + }; 52 + 53 + /* AUX APIs */ 54 + u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog); 55 + int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog); 56 + int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog); 57 + int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read); 58 + int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog); 59 + void dp_catalog_aux_reset(struct dp_catalog *dp_catalog); 60 + void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable); 61 + void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog, 62 + enum dp_phy_aux_config_type type); 63 + void dp_catalog_aux_setup(struct dp_catalog *dp_catalog); 64 + int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog); 65 + 66 + /* DP Controller APIs */ 67 + void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state); 68 + void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 config); 69 + void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog); 70 + void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog, bool enable); 71 + void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog, u32 cc, u32 tb); 72 + void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog, u32 rate, 73 + u32 stream_rate_khz, bool fixed_nvid); 74 + int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog, u32 pattern); 75 + void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog); 76 + void dp_catalog_ctrl_usb_reset(struct dp_catalog *dp_catalog, bool flip); 77 + bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog); 78 + void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog, bool enable); 79 + void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog, bool enable); 80 + void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog); 81 + void dp_catalog_ctrl_phy_lane_cfg(struct dp_catalog *dp_catalog, bool flipped, 82 + u8 lane_cnt); 83 + int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog, u8 v_level, 84 + u8 p_level); 85 + int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog); 86 + void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog, 87 + u32 dp_tu, u32 valid_boundary, 88 + u32 valid_boundary2); 89 + void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog, 90 + u32 pattern); 91 + u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog); 92 + 93 + /* DP Panel APIs */ 94 + int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog); 95 + void dp_catalog_dump_regs(struct dp_catalog *dp_catalog); 96 + void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog, 97 + struct drm_display_mode *drm_mode); 98 + void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog); 99 + 100 + struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io); 101 + 102 + #endif /* _DP_CATALOG_H_ */
+1694
drivers/gpu/drm/msm/dp/dp_ctrl.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ 7 + 8 + #include <linux/types.h> 9 + #include <linux/completion.h> 10 + #include <linux/delay.h> 11 + #include <drm/drm_fixed.h> 12 + #include <drm/drm_dp_helper.h> 13 + 14 + #include "dp_reg.h" 15 + #include "dp_ctrl.h" 16 + #include "dp_link.h" 17 + 18 + #define DP_KHZ_TO_HZ 1000 19 + #define IDLE_PATTERN_COMPLETION_TIMEOUT_JIFFIES (30 * HZ / 1000) /* 30 ms */ 20 + #define WAIT_FOR_VIDEO_READY_TIMEOUT_JIFFIES (HZ / 2) 21 + 22 + #define DP_CTRL_INTR_READY_FOR_VIDEO BIT(0) 23 + #define DP_CTRL_INTR_IDLE_PATTERN_SENT BIT(3) 24 + 25 + #define MR_LINK_TRAINING1 0x8 26 + #define MR_LINK_SYMBOL_ERM 0x80 27 + #define MR_LINK_PRBS7 0x100 28 + #define MR_LINK_CUSTOM80 0x200 29 + 30 + struct dp_tu_calc_input { 31 + u64 lclk; /* 162, 270, 540 and 810 */ 32 + u64 pclk_khz; /* in KHz */ 33 + u64 hactive; /* active h-width */ 34 + u64 hporch; /* bp + fp + pulse */ 35 + int nlanes; /* no.of.lanes */ 36 + int bpp; /* bits */ 37 + int pixel_enc; /* 444, 420, 422 */ 38 + int dsc_en; /* dsc on/off */ 39 + int async_en; /* async mode */ 40 + int fec_en; /* fec */ 41 + int compress_ratio; /* 2:1 = 200, 3:1 = 300, 3.75:1 = 375 */ 42 + int num_of_dsc_slices; /* number of slices per line */ 43 + }; 44 + 45 + struct dp_vc_tu_mapping_table { 46 + u32 vic; 47 + u8 lanes; 48 + u8 lrate; /* DP_LINK_RATE -> 162(6), 270(10), 540(20), 810 (30) */ 49 + u8 bpp; 50 + u8 valid_boundary_link; 51 + u16 delay_start_link; 52 + bool boundary_moderation_en; 53 + u8 valid_lower_boundary_link; 54 + u8 upper_boundary_count; 55 + u8 lower_boundary_count; 56 + u8 tu_size_minus1; 57 + }; 58 + 59 + struct dp_ctrl_private { 60 + struct dp_ctrl dp_ctrl; 61 + 62 + struct device *dev; 63 + struct drm_dp_aux *aux; 64 + struct dp_panel *panel; 65 + struct dp_link *link; 66 + struct dp_power *power; 67 + struct dp_parser *parser; 68 + struct dp_catalog *catalog; 69 + 70 + struct completion idle_comp; 71 + struct mutex push_idle_mutex; 72 + struct completion video_comp; 73 + }; 74 + 75 + static int dp_aux_link_configure(struct drm_dp_aux *aux, 76 + struct dp_link_info *link) 77 + { 78 + u8 values[2]; 79 + int err; 80 + 81 + values[0] = drm_dp_link_rate_to_bw_code(link->rate); 82 + values[1] = link->num_lanes; 83 + 84 + if (link->capabilities & DP_LINK_CAP_ENHANCED_FRAMING) 85 + values[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 86 + 87 + err = drm_dp_dpcd_write(aux, DP_LINK_BW_SET, values, sizeof(values)); 88 + if (err < 0) 89 + return err; 90 + 91 + return 0; 92 + } 93 + 94 + void dp_ctrl_push_idle(struct dp_ctrl *dp_ctrl) 95 + { 96 + struct dp_ctrl_private *ctrl; 97 + 98 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 99 + 100 + mutex_lock(&ctrl->push_idle_mutex); 101 + 102 + reinit_completion(&ctrl->idle_comp); 103 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, DP_STATE_CTRL_PUSH_IDLE); 104 + 105 + if (!wait_for_completion_timeout(&ctrl->idle_comp, 106 + IDLE_PATTERN_COMPLETION_TIMEOUT_JIFFIES)) 107 + pr_warn("PUSH_IDLE pattern timedout\n"); 108 + 109 + mutex_unlock(&ctrl->push_idle_mutex); 110 + pr_debug("mainlink off done\n"); 111 + } 112 + 113 + static void dp_ctrl_config_ctrl(struct dp_ctrl_private *ctrl) 114 + { 115 + u32 config = 0, tbd; 116 + u8 *dpcd = ctrl->panel->dpcd; 117 + 118 + /* Default-> LSCLK DIV: 1/4 LCLK */ 119 + config |= (2 << DP_CONFIGURATION_CTRL_LSCLK_DIV_SHIFT); 120 + 121 + /* Scrambler reset enable */ 122 + if (dpcd[DP_EDP_CONFIGURATION_CAP] & DP_ALTERNATE_SCRAMBLER_RESET_CAP) 123 + config |= DP_CONFIGURATION_CTRL_ASSR; 124 + 125 + tbd = dp_link_get_test_bits_depth(ctrl->link, 126 + ctrl->panel->dp_mode.bpp); 127 + 128 + if (tbd == DP_TEST_BIT_DEPTH_UNKNOWN) { 129 + pr_debug("BIT_DEPTH not set. Configure default\n"); 130 + tbd = DP_TEST_BIT_DEPTH_8; 131 + } 132 + 133 + config |= tbd << DP_CONFIGURATION_CTRL_BPC_SHIFT; 134 + 135 + /* Num of Lanes */ 136 + config |= ((ctrl->link->link_params.num_lanes - 1) 137 + << DP_CONFIGURATION_CTRL_NUM_OF_LANES_SHIFT); 138 + 139 + if (drm_dp_enhanced_frame_cap(dpcd)) 140 + config |= DP_CONFIGURATION_CTRL_ENHANCED_FRAMING; 141 + 142 + config |= DP_CONFIGURATION_CTRL_P_INTERLACED; /* progressive video */ 143 + 144 + /* sync clock & static Mvid */ 145 + config |= DP_CONFIGURATION_CTRL_STATIC_DYNAMIC_CN; 146 + config |= DP_CONFIGURATION_CTRL_SYNC_ASYNC_CLK; 147 + 148 + dp_catalog_ctrl_config_ctrl(ctrl->catalog, config); 149 + } 150 + 151 + static void dp_ctrl_configure_source_params(struct dp_ctrl_private *ctrl) 152 + { 153 + u32 cc, tb; 154 + 155 + dp_catalog_ctrl_lane_mapping(ctrl->catalog); 156 + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, true); 157 + 158 + dp_ctrl_config_ctrl(ctrl); 159 + 160 + tb = dp_link_get_test_bits_depth(ctrl->link, 161 + ctrl->panel->dp_mode.bpp); 162 + cc = dp_link_get_colorimetry_config(ctrl->link); 163 + dp_catalog_ctrl_config_misc(ctrl->catalog, cc, tb); 164 + dp_panel_timing_cfg(ctrl->panel); 165 + } 166 + 167 + /* 168 + * The structure and few functions present below are IP/Hardware 169 + * specific implementation. Most of the implementation will not 170 + * have coding comments 171 + */ 172 + struct tu_algo_data { 173 + s64 lclk_fp; 174 + s64 pclk_fp; 175 + s64 lwidth; 176 + s64 lwidth_fp; 177 + s64 hbp_relative_to_pclk; 178 + s64 hbp_relative_to_pclk_fp; 179 + int nlanes; 180 + int bpp; 181 + int pixelEnc; 182 + int dsc_en; 183 + int async_en; 184 + int bpc; 185 + 186 + uint delay_start_link_extra_pixclk; 187 + int extra_buffer_margin; 188 + s64 ratio_fp; 189 + s64 original_ratio_fp; 190 + 191 + s64 err_fp; 192 + s64 n_err_fp; 193 + s64 n_n_err_fp; 194 + int tu_size; 195 + int tu_size_desired; 196 + int tu_size_minus1; 197 + 198 + int valid_boundary_link; 199 + s64 resulting_valid_fp; 200 + s64 total_valid_fp; 201 + s64 effective_valid_fp; 202 + s64 effective_valid_recorded_fp; 203 + int n_tus; 204 + int n_tus_per_lane; 205 + int paired_tus; 206 + int remainder_tus; 207 + int remainder_tus_upper; 208 + int remainder_tus_lower; 209 + int extra_bytes; 210 + int filler_size; 211 + int delay_start_link; 212 + 213 + int extra_pclk_cycles; 214 + int extra_pclk_cycles_in_link_clk; 215 + s64 ratio_by_tu_fp; 216 + s64 average_valid2_fp; 217 + int new_valid_boundary_link; 218 + int remainder_symbols_exist; 219 + int n_symbols; 220 + s64 n_remainder_symbols_per_lane_fp; 221 + s64 last_partial_tu_fp; 222 + s64 TU_ratio_err_fp; 223 + 224 + int n_tus_incl_last_incomplete_tu; 225 + int extra_pclk_cycles_tmp; 226 + int extra_pclk_cycles_in_link_clk_tmp; 227 + int extra_required_bytes_new_tmp; 228 + int filler_size_tmp; 229 + int lower_filler_size_tmp; 230 + int delay_start_link_tmp; 231 + 232 + bool boundary_moderation_en; 233 + int boundary_mod_lower_err; 234 + int upper_boundary_count; 235 + int lower_boundary_count; 236 + int i_upper_boundary_count; 237 + int i_lower_boundary_count; 238 + int valid_lower_boundary_link; 239 + int even_distribution_BF; 240 + int even_distribution_legacy; 241 + int even_distribution; 242 + int min_hblank_violated; 243 + s64 delay_start_time_fp; 244 + s64 hbp_time_fp; 245 + s64 hactive_time_fp; 246 + s64 diff_abs_fp; 247 + 248 + s64 ratio; 249 + }; 250 + 251 + static int _tu_param_compare(s64 a, s64 b) 252 + { 253 + u32 a_sign; 254 + u32 b_sign; 255 + s64 a_temp, b_temp, minus_1; 256 + 257 + if (a == b) 258 + return 0; 259 + 260 + minus_1 = drm_fixp_from_fraction(-1, 1); 261 + 262 + a_sign = (a >> 32) & 0x80000000 ? 1 : 0; 263 + 264 + b_sign = (b >> 32) & 0x80000000 ? 1 : 0; 265 + 266 + if (a_sign > b_sign) 267 + return 2; 268 + else if (b_sign > a_sign) 269 + return 1; 270 + 271 + if (!a_sign && !b_sign) { /* positive */ 272 + if (a > b) 273 + return 1; 274 + else 275 + return 2; 276 + } else { /* negative */ 277 + a_temp = drm_fixp_mul(a, minus_1); 278 + b_temp = drm_fixp_mul(b, minus_1); 279 + 280 + if (a_temp > b_temp) 281 + return 2; 282 + else 283 + return 1; 284 + } 285 + } 286 + 287 + static void dp_panel_update_tu_timings(struct dp_tu_calc_input *in, 288 + struct tu_algo_data *tu) 289 + { 290 + int nlanes = in->nlanes; 291 + int dsc_num_slices = in->num_of_dsc_slices; 292 + int dsc_num_bytes = 0; 293 + int numerator; 294 + s64 pclk_dsc_fp; 295 + s64 dwidth_dsc_fp; 296 + s64 hbp_dsc_fp; 297 + 298 + int tot_num_eoc_symbols = 0; 299 + int tot_num_hor_bytes = 0; 300 + int tot_num_dummy_bytes = 0; 301 + int dwidth_dsc_bytes = 0; 302 + int eoc_bytes = 0; 303 + 304 + s64 temp1_fp, temp2_fp, temp3_fp; 305 + 306 + tu->lclk_fp = drm_fixp_from_fraction(in->lclk, 1); 307 + tu->pclk_fp = drm_fixp_from_fraction(in->pclk_khz, 1000); 308 + tu->lwidth = in->hactive; 309 + tu->hbp_relative_to_pclk = in->hporch; 310 + tu->nlanes = in->nlanes; 311 + tu->bpp = in->bpp; 312 + tu->pixelEnc = in->pixel_enc; 313 + tu->dsc_en = in->dsc_en; 314 + tu->async_en = in->async_en; 315 + tu->lwidth_fp = drm_fixp_from_fraction(in->hactive, 1); 316 + tu->hbp_relative_to_pclk_fp = drm_fixp_from_fraction(in->hporch, 1); 317 + 318 + if (tu->pixelEnc == 420) { 319 + temp1_fp = drm_fixp_from_fraction(2, 1); 320 + tu->pclk_fp = drm_fixp_div(tu->pclk_fp, temp1_fp); 321 + tu->lwidth_fp = drm_fixp_div(tu->lwidth_fp, temp1_fp); 322 + tu->hbp_relative_to_pclk_fp = 323 + drm_fixp_div(tu->hbp_relative_to_pclk_fp, 2); 324 + } 325 + 326 + if (tu->pixelEnc == 422) { 327 + switch (tu->bpp) { 328 + case 24: 329 + tu->bpp = 16; 330 + tu->bpc = 8; 331 + break; 332 + case 30: 333 + tu->bpp = 20; 334 + tu->bpc = 10; 335 + break; 336 + default: 337 + tu->bpp = 16; 338 + tu->bpc = 8; 339 + break; 340 + } 341 + } else { 342 + tu->bpc = tu->bpp/3; 343 + } 344 + 345 + if (!in->dsc_en) 346 + goto fec_check; 347 + 348 + temp1_fp = drm_fixp_from_fraction(in->compress_ratio, 100); 349 + temp2_fp = drm_fixp_from_fraction(in->bpp, 1); 350 + temp3_fp = drm_fixp_div(temp2_fp, temp1_fp); 351 + temp2_fp = drm_fixp_mul(tu->lwidth_fp, temp3_fp); 352 + 353 + temp1_fp = drm_fixp_from_fraction(8, 1); 354 + temp3_fp = drm_fixp_div(temp2_fp, temp1_fp); 355 + 356 + numerator = drm_fixp2int(temp3_fp); 357 + 358 + dsc_num_bytes = numerator / dsc_num_slices; 359 + eoc_bytes = dsc_num_bytes % nlanes; 360 + tot_num_eoc_symbols = nlanes * dsc_num_slices; 361 + tot_num_hor_bytes = dsc_num_bytes * dsc_num_slices; 362 + tot_num_dummy_bytes = (nlanes - eoc_bytes) * dsc_num_slices; 363 + 364 + if (dsc_num_bytes == 0) 365 + pr_info("incorrect no of bytes per slice=%d\n", dsc_num_bytes); 366 + 367 + dwidth_dsc_bytes = (tot_num_hor_bytes + 368 + tot_num_eoc_symbols + 369 + (eoc_bytes == 0 ? 0 : tot_num_dummy_bytes)); 370 + 371 + dwidth_dsc_fp = drm_fixp_from_fraction(dwidth_dsc_bytes, 3); 372 + 373 + temp2_fp = drm_fixp_mul(tu->pclk_fp, dwidth_dsc_fp); 374 + temp1_fp = drm_fixp_div(temp2_fp, tu->lwidth_fp); 375 + pclk_dsc_fp = temp1_fp; 376 + 377 + temp1_fp = drm_fixp_div(pclk_dsc_fp, tu->pclk_fp); 378 + temp2_fp = drm_fixp_mul(tu->hbp_relative_to_pclk_fp, temp1_fp); 379 + hbp_dsc_fp = temp2_fp; 380 + 381 + /* output */ 382 + tu->pclk_fp = pclk_dsc_fp; 383 + tu->lwidth_fp = dwidth_dsc_fp; 384 + tu->hbp_relative_to_pclk_fp = hbp_dsc_fp; 385 + 386 + fec_check: 387 + if (in->fec_en) { 388 + temp1_fp = drm_fixp_from_fraction(976, 1000); /* 0.976 */ 389 + tu->lclk_fp = drm_fixp_mul(tu->lclk_fp, temp1_fp); 390 + } 391 + } 392 + 393 + static void _tu_valid_boundary_calc(struct tu_algo_data *tu) 394 + { 395 + s64 temp1_fp, temp2_fp, temp, temp1, temp2; 396 + int compare_result_1, compare_result_2, compare_result_3; 397 + 398 + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); 399 + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); 400 + 401 + tu->new_valid_boundary_link = drm_fixp2int_ceil(temp2_fp); 402 + 403 + temp = (tu->i_upper_boundary_count * 404 + tu->new_valid_boundary_link + 405 + tu->i_lower_boundary_count * 406 + (tu->new_valid_boundary_link-1)); 407 + tu->average_valid2_fp = drm_fixp_from_fraction(temp, 408 + (tu->i_upper_boundary_count + 409 + tu->i_lower_boundary_count)); 410 + 411 + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); 412 + temp2_fp = tu->lwidth_fp; 413 + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); 414 + temp2_fp = drm_fixp_div(temp1_fp, tu->average_valid2_fp); 415 + tu->n_tus = drm_fixp2int(temp2_fp); 416 + if ((temp2_fp & 0xFFFFFFFF) > 0xFFFFF000) 417 + tu->n_tus += 1; 418 + 419 + temp1_fp = drm_fixp_from_fraction(tu->n_tus, 1); 420 + temp2_fp = drm_fixp_mul(temp1_fp, tu->average_valid2_fp); 421 + temp1_fp = drm_fixp_from_fraction(tu->n_symbols, 1); 422 + temp2_fp = temp1_fp - temp2_fp; 423 + temp1_fp = drm_fixp_from_fraction(tu->nlanes, 1); 424 + temp2_fp = drm_fixp_div(temp2_fp, temp1_fp); 425 + tu->n_remainder_symbols_per_lane_fp = temp2_fp; 426 + 427 + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); 428 + tu->last_partial_tu_fp = 429 + drm_fixp_div(tu->n_remainder_symbols_per_lane_fp, 430 + temp1_fp); 431 + 432 + if (tu->n_remainder_symbols_per_lane_fp != 0) 433 + tu->remainder_symbols_exist = 1; 434 + else 435 + tu->remainder_symbols_exist = 0; 436 + 437 + temp1_fp = drm_fixp_from_fraction(tu->n_tus, tu->nlanes); 438 + tu->n_tus_per_lane = drm_fixp2int(temp1_fp); 439 + 440 + tu->paired_tus = (int)((tu->n_tus_per_lane) / 441 + (tu->i_upper_boundary_count + 442 + tu->i_lower_boundary_count)); 443 + 444 + tu->remainder_tus = tu->n_tus_per_lane - tu->paired_tus * 445 + (tu->i_upper_boundary_count + 446 + tu->i_lower_boundary_count); 447 + 448 + if ((tu->remainder_tus - tu->i_upper_boundary_count) > 0) { 449 + tu->remainder_tus_upper = tu->i_upper_boundary_count; 450 + tu->remainder_tus_lower = tu->remainder_tus - 451 + tu->i_upper_boundary_count; 452 + } else { 453 + tu->remainder_tus_upper = tu->remainder_tus; 454 + tu->remainder_tus_lower = 0; 455 + } 456 + 457 + temp = tu->paired_tus * (tu->i_upper_boundary_count * 458 + tu->new_valid_boundary_link + 459 + tu->i_lower_boundary_count * 460 + (tu->new_valid_boundary_link - 1)) + 461 + (tu->remainder_tus_upper * 462 + tu->new_valid_boundary_link) + 463 + (tu->remainder_tus_lower * 464 + (tu->new_valid_boundary_link - 1)); 465 + tu->total_valid_fp = drm_fixp_from_fraction(temp, 1); 466 + 467 + if (tu->remainder_symbols_exist) { 468 + temp1_fp = tu->total_valid_fp + 469 + tu->n_remainder_symbols_per_lane_fp; 470 + temp2_fp = drm_fixp_from_fraction(tu->n_tus_per_lane, 1); 471 + temp2_fp = temp2_fp + tu->last_partial_tu_fp; 472 + temp1_fp = drm_fixp_div(temp1_fp, temp2_fp); 473 + } else { 474 + temp2_fp = drm_fixp_from_fraction(tu->n_tus_per_lane, 1); 475 + temp1_fp = drm_fixp_div(tu->total_valid_fp, temp2_fp); 476 + } 477 + tu->effective_valid_fp = temp1_fp; 478 + 479 + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); 480 + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); 481 + tu->n_n_err_fp = tu->effective_valid_fp - temp2_fp; 482 + 483 + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); 484 + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); 485 + tu->n_err_fp = tu->average_valid2_fp - temp2_fp; 486 + 487 + tu->even_distribution = tu->n_tus % tu->nlanes == 0 ? 1 : 0; 488 + 489 + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); 490 + temp2_fp = tu->lwidth_fp; 491 + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); 492 + temp2_fp = drm_fixp_div(temp1_fp, tu->average_valid2_fp); 493 + 494 + if (temp2_fp) 495 + tu->n_tus_incl_last_incomplete_tu = drm_fixp2int_ceil(temp2_fp); 496 + else 497 + tu->n_tus_incl_last_incomplete_tu = 0; 498 + 499 + temp1 = 0; 500 + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); 501 + temp2_fp = drm_fixp_mul(tu->original_ratio_fp, temp1_fp); 502 + temp1_fp = tu->average_valid2_fp - temp2_fp; 503 + temp2_fp = drm_fixp_from_fraction(tu->n_tus_incl_last_incomplete_tu, 1); 504 + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); 505 + 506 + if (temp1_fp) 507 + temp1 = drm_fixp2int_ceil(temp1_fp); 508 + 509 + temp = tu->i_upper_boundary_count * tu->nlanes; 510 + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); 511 + temp2_fp = drm_fixp_mul(tu->original_ratio_fp, temp1_fp); 512 + temp1_fp = drm_fixp_from_fraction(tu->new_valid_boundary_link, 1); 513 + temp2_fp = temp1_fp - temp2_fp; 514 + temp1_fp = drm_fixp_from_fraction(temp, 1); 515 + temp2_fp = drm_fixp_mul(temp1_fp, temp2_fp); 516 + 517 + if (temp2_fp) 518 + temp2 = drm_fixp2int_ceil(temp2_fp); 519 + else 520 + temp2 = 0; 521 + tu->extra_required_bytes_new_tmp = (int)(temp1 + temp2); 522 + 523 + temp1_fp = drm_fixp_from_fraction(8, tu->bpp); 524 + temp2_fp = drm_fixp_from_fraction( 525 + tu->extra_required_bytes_new_tmp, 1); 526 + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); 527 + 528 + if (temp1_fp) 529 + tu->extra_pclk_cycles_tmp = drm_fixp2int_ceil(temp1_fp); 530 + else 531 + tu->extra_pclk_cycles_tmp = 0; 532 + 533 + temp1_fp = drm_fixp_from_fraction(tu->extra_pclk_cycles_tmp, 1); 534 + temp2_fp = drm_fixp_div(tu->lclk_fp, tu->pclk_fp); 535 + temp1_fp = drm_fixp_mul(temp1_fp, temp2_fp); 536 + 537 + if (temp1_fp) 538 + tu->extra_pclk_cycles_in_link_clk_tmp = 539 + drm_fixp2int_ceil(temp1_fp); 540 + else 541 + tu->extra_pclk_cycles_in_link_clk_tmp = 0; 542 + 543 + tu->filler_size_tmp = tu->tu_size - tu->new_valid_boundary_link; 544 + 545 + tu->lower_filler_size_tmp = tu->filler_size_tmp + 1; 546 + 547 + tu->delay_start_link_tmp = tu->extra_pclk_cycles_in_link_clk_tmp + 548 + tu->lower_filler_size_tmp + 549 + tu->extra_buffer_margin; 550 + 551 + temp1_fp = drm_fixp_from_fraction(tu->delay_start_link_tmp, 1); 552 + tu->delay_start_time_fp = drm_fixp_div(temp1_fp, tu->lclk_fp); 553 + 554 + compare_result_1 = _tu_param_compare(tu->n_n_err_fp, tu->diff_abs_fp); 555 + if (compare_result_1 == 2) 556 + compare_result_1 = 1; 557 + else 558 + compare_result_1 = 0; 559 + 560 + compare_result_2 = _tu_param_compare(tu->n_n_err_fp, tu->err_fp); 561 + if (compare_result_2 == 2) 562 + compare_result_2 = 1; 563 + else 564 + compare_result_2 = 0; 565 + 566 + compare_result_3 = _tu_param_compare(tu->hbp_time_fp, 567 + tu->delay_start_time_fp); 568 + if (compare_result_3 == 2) 569 + compare_result_3 = 0; 570 + else 571 + compare_result_3 = 1; 572 + 573 + if (((tu->even_distribution == 1) || 574 + ((tu->even_distribution_BF == 0) && 575 + (tu->even_distribution_legacy == 0))) && 576 + tu->n_err_fp >= 0 && tu->n_n_err_fp >= 0 && 577 + compare_result_2 && 578 + (compare_result_1 || (tu->min_hblank_violated == 1)) && 579 + (tu->new_valid_boundary_link - 1) > 0 && 580 + compare_result_3 && 581 + (tu->delay_start_link_tmp <= 1023)) { 582 + tu->upper_boundary_count = tu->i_upper_boundary_count; 583 + tu->lower_boundary_count = tu->i_lower_boundary_count; 584 + tu->err_fp = tu->n_n_err_fp; 585 + tu->boundary_moderation_en = true; 586 + tu->tu_size_desired = tu->tu_size; 587 + tu->valid_boundary_link = tu->new_valid_boundary_link; 588 + tu->effective_valid_recorded_fp = tu->effective_valid_fp; 589 + tu->even_distribution_BF = 1; 590 + tu->delay_start_link = tu->delay_start_link_tmp; 591 + } else if (tu->boundary_mod_lower_err == 0) { 592 + compare_result_1 = _tu_param_compare(tu->n_n_err_fp, 593 + tu->diff_abs_fp); 594 + if (compare_result_1 == 2) 595 + tu->boundary_mod_lower_err = 1; 596 + } 597 + } 598 + 599 + static void _dp_ctrl_calc_tu(struct dp_tu_calc_input *in, 600 + struct dp_vc_tu_mapping_table *tu_table) 601 + { 602 + struct tu_algo_data tu; 603 + int compare_result_1, compare_result_2; 604 + u64 temp = 0; 605 + s64 temp_fp = 0, temp1_fp = 0, temp2_fp = 0; 606 + 607 + s64 LCLK_FAST_SKEW_fp = drm_fixp_from_fraction(6, 10000); /* 0.0006 */ 608 + s64 const_p49_fp = drm_fixp_from_fraction(49, 100); /* 0.49 */ 609 + s64 const_p56_fp = drm_fixp_from_fraction(56, 100); /* 0.56 */ 610 + s64 RATIO_SCALE_fp = drm_fixp_from_fraction(1001, 1000); 611 + 612 + u8 DP_BRUTE_FORCE = 1; 613 + s64 BRUTE_FORCE_THRESHOLD_fp = drm_fixp_from_fraction(1, 10); /* 0.1 */ 614 + uint EXTRA_PIXCLK_CYCLE_DELAY = 4; 615 + uint HBLANK_MARGIN = 4; 616 + 617 + memset(&tu, 0, sizeof(tu)); 618 + 619 + dp_panel_update_tu_timings(in, &tu); 620 + 621 + tu.err_fp = drm_fixp_from_fraction(1000, 1); /* 1000 */ 622 + 623 + temp1_fp = drm_fixp_from_fraction(4, 1); 624 + temp2_fp = drm_fixp_mul(temp1_fp, tu.lclk_fp); 625 + temp_fp = drm_fixp_div(temp2_fp, tu.pclk_fp); 626 + tu.extra_buffer_margin = drm_fixp2int_ceil(temp_fp); 627 + 628 + temp1_fp = drm_fixp_from_fraction(tu.bpp, 8); 629 + temp2_fp = drm_fixp_mul(tu.pclk_fp, temp1_fp); 630 + temp1_fp = drm_fixp_from_fraction(tu.nlanes, 1); 631 + temp2_fp = drm_fixp_div(temp2_fp, temp1_fp); 632 + tu.ratio_fp = drm_fixp_div(temp2_fp, tu.lclk_fp); 633 + 634 + tu.original_ratio_fp = tu.ratio_fp; 635 + tu.boundary_moderation_en = false; 636 + tu.upper_boundary_count = 0; 637 + tu.lower_boundary_count = 0; 638 + tu.i_upper_boundary_count = 0; 639 + tu.i_lower_boundary_count = 0; 640 + tu.valid_lower_boundary_link = 0; 641 + tu.even_distribution_BF = 0; 642 + tu.even_distribution_legacy = 0; 643 + tu.even_distribution = 0; 644 + tu.delay_start_time_fp = 0; 645 + 646 + tu.err_fp = drm_fixp_from_fraction(1000, 1); 647 + tu.n_err_fp = 0; 648 + tu.n_n_err_fp = 0; 649 + 650 + tu.ratio = drm_fixp2int(tu.ratio_fp); 651 + temp1_fp = drm_fixp_from_fraction(tu.nlanes, 1); 652 + div64_u64_rem(tu.lwidth_fp, temp1_fp, &temp2_fp); 653 + if (temp2_fp != 0 && 654 + !tu.ratio && tu.dsc_en == 0) { 655 + tu.ratio_fp = drm_fixp_mul(tu.ratio_fp, RATIO_SCALE_fp); 656 + tu.ratio = drm_fixp2int(tu.ratio_fp); 657 + if (tu.ratio) 658 + tu.ratio_fp = drm_fixp_from_fraction(1, 1); 659 + } 660 + 661 + if (tu.ratio > 1) 662 + tu.ratio = 1; 663 + 664 + if (tu.ratio == 1) 665 + goto tu_size_calc; 666 + 667 + compare_result_1 = _tu_param_compare(tu.ratio_fp, const_p49_fp); 668 + if (!compare_result_1 || compare_result_1 == 1) 669 + compare_result_1 = 1; 670 + else 671 + compare_result_1 = 0; 672 + 673 + compare_result_2 = _tu_param_compare(tu.ratio_fp, const_p56_fp); 674 + if (!compare_result_2 || compare_result_2 == 2) 675 + compare_result_2 = 1; 676 + else 677 + compare_result_2 = 0; 678 + 679 + if (tu.dsc_en && compare_result_1 && compare_result_2) { 680 + HBLANK_MARGIN += 4; 681 + DRM_DEBUG_DP("Info: increase HBLANK_MARGIN to %d\n", 682 + HBLANK_MARGIN); 683 + } 684 + 685 + tu_size_calc: 686 + for (tu.tu_size = 32; tu.tu_size <= 64; tu.tu_size++) { 687 + temp1_fp = drm_fixp_from_fraction(tu.tu_size, 1); 688 + temp2_fp = drm_fixp_mul(tu.ratio_fp, temp1_fp); 689 + temp = drm_fixp2int_ceil(temp2_fp); 690 + temp1_fp = drm_fixp_from_fraction(temp, 1); 691 + tu.n_err_fp = temp1_fp - temp2_fp; 692 + 693 + if (tu.n_err_fp < tu.err_fp) { 694 + tu.err_fp = tu.n_err_fp; 695 + tu.tu_size_desired = tu.tu_size; 696 + } 697 + } 698 + 699 + tu.tu_size_minus1 = tu.tu_size_desired - 1; 700 + 701 + temp1_fp = drm_fixp_from_fraction(tu.tu_size_desired, 1); 702 + temp2_fp = drm_fixp_mul(tu.ratio_fp, temp1_fp); 703 + tu.valid_boundary_link = drm_fixp2int_ceil(temp2_fp); 704 + 705 + temp1_fp = drm_fixp_from_fraction(tu.bpp, 8); 706 + temp2_fp = tu.lwidth_fp; 707 + temp2_fp = drm_fixp_mul(temp2_fp, temp1_fp); 708 + 709 + temp1_fp = drm_fixp_from_fraction(tu.valid_boundary_link, 1); 710 + temp2_fp = drm_fixp_div(temp2_fp, temp1_fp); 711 + tu.n_tus = drm_fixp2int(temp2_fp); 712 + if ((temp2_fp & 0xFFFFFFFF) > 0xFFFFF000) 713 + tu.n_tus += 1; 714 + 715 + tu.even_distribution_legacy = tu.n_tus % tu.nlanes == 0 ? 1 : 0; 716 + DRM_DEBUG_DP("Info: n_sym = %d, num_of_tus = %d\n", 717 + tu.valid_boundary_link, tu.n_tus); 718 + 719 + temp1_fp = drm_fixp_from_fraction(tu.tu_size_desired, 1); 720 + temp2_fp = drm_fixp_mul(tu.original_ratio_fp, temp1_fp); 721 + temp1_fp = drm_fixp_from_fraction(tu.valid_boundary_link, 1); 722 + temp2_fp = temp1_fp - temp2_fp; 723 + temp1_fp = drm_fixp_from_fraction(tu.n_tus + 1, 1); 724 + temp2_fp = drm_fixp_mul(temp1_fp, temp2_fp); 725 + 726 + temp = drm_fixp2int(temp2_fp); 727 + if (temp && temp2_fp) 728 + tu.extra_bytes = drm_fixp2int_ceil(temp2_fp); 729 + else 730 + tu.extra_bytes = 0; 731 + 732 + temp1_fp = drm_fixp_from_fraction(tu.extra_bytes, 1); 733 + temp2_fp = drm_fixp_from_fraction(8, tu.bpp); 734 + temp1_fp = drm_fixp_mul(temp1_fp, temp2_fp); 735 + 736 + if (temp && temp1_fp) 737 + tu.extra_pclk_cycles = drm_fixp2int_ceil(temp1_fp); 738 + else 739 + tu.extra_pclk_cycles = drm_fixp2int(temp1_fp); 740 + 741 + temp1_fp = drm_fixp_div(tu.lclk_fp, tu.pclk_fp); 742 + temp2_fp = drm_fixp_from_fraction(tu.extra_pclk_cycles, 1); 743 + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); 744 + 745 + if (temp1_fp) 746 + tu.extra_pclk_cycles_in_link_clk = drm_fixp2int_ceil(temp1_fp); 747 + else 748 + tu.extra_pclk_cycles_in_link_clk = drm_fixp2int(temp1_fp); 749 + 750 + tu.filler_size = tu.tu_size_desired - tu.valid_boundary_link; 751 + 752 + temp1_fp = drm_fixp_from_fraction(tu.tu_size_desired, 1); 753 + tu.ratio_by_tu_fp = drm_fixp_mul(tu.ratio_fp, temp1_fp); 754 + 755 + tu.delay_start_link = tu.extra_pclk_cycles_in_link_clk + 756 + tu.filler_size + tu.extra_buffer_margin; 757 + 758 + tu.resulting_valid_fp = 759 + drm_fixp_from_fraction(tu.valid_boundary_link, 1); 760 + 761 + temp1_fp = drm_fixp_from_fraction(tu.tu_size_desired, 1); 762 + temp2_fp = drm_fixp_div(tu.resulting_valid_fp, temp1_fp); 763 + tu.TU_ratio_err_fp = temp2_fp - tu.original_ratio_fp; 764 + 765 + temp1_fp = drm_fixp_from_fraction(HBLANK_MARGIN, 1); 766 + temp1_fp = tu.hbp_relative_to_pclk_fp - temp1_fp; 767 + tu.hbp_time_fp = drm_fixp_div(temp1_fp, tu.pclk_fp); 768 + 769 + temp1_fp = drm_fixp_from_fraction(tu.delay_start_link, 1); 770 + tu.delay_start_time_fp = drm_fixp_div(temp1_fp, tu.lclk_fp); 771 + 772 + compare_result_1 = _tu_param_compare(tu.hbp_time_fp, 773 + tu.delay_start_time_fp); 774 + if (compare_result_1 == 2) /* if (hbp_time_fp < delay_start_time_fp) */ 775 + tu.min_hblank_violated = 1; 776 + 777 + tu.hactive_time_fp = drm_fixp_div(tu.lwidth_fp, tu.pclk_fp); 778 + 779 + compare_result_2 = _tu_param_compare(tu.hactive_time_fp, 780 + tu.delay_start_time_fp); 781 + if (compare_result_2 == 2) 782 + tu.min_hblank_violated = 1; 783 + 784 + tu.delay_start_time_fp = 0; 785 + 786 + /* brute force */ 787 + 788 + tu.delay_start_link_extra_pixclk = EXTRA_PIXCLK_CYCLE_DELAY; 789 + tu.diff_abs_fp = tu.resulting_valid_fp - tu.ratio_by_tu_fp; 790 + 791 + temp = drm_fixp2int(tu.diff_abs_fp); 792 + if (!temp && tu.diff_abs_fp <= 0xffff) 793 + tu.diff_abs_fp = 0; 794 + 795 + /* if(diff_abs < 0) diff_abs *= -1 */ 796 + if (tu.diff_abs_fp < 0) 797 + tu.diff_abs_fp = drm_fixp_mul(tu.diff_abs_fp, -1); 798 + 799 + tu.boundary_mod_lower_err = 0; 800 + if ((tu.diff_abs_fp != 0 && 801 + ((tu.diff_abs_fp > BRUTE_FORCE_THRESHOLD_fp) || 802 + (tu.even_distribution_legacy == 0) || 803 + (DP_BRUTE_FORCE == 1))) || 804 + (tu.min_hblank_violated == 1)) { 805 + do { 806 + tu.err_fp = drm_fixp_from_fraction(1000, 1); 807 + 808 + temp1_fp = drm_fixp_div(tu.lclk_fp, tu.pclk_fp); 809 + temp2_fp = drm_fixp_from_fraction( 810 + tu.delay_start_link_extra_pixclk, 1); 811 + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); 812 + 813 + if (temp1_fp) 814 + tu.extra_buffer_margin = 815 + drm_fixp2int_ceil(temp1_fp); 816 + else 817 + tu.extra_buffer_margin = 0; 818 + 819 + temp1_fp = drm_fixp_from_fraction(tu.bpp, 8); 820 + temp1_fp = drm_fixp_mul(tu.lwidth_fp, temp1_fp); 821 + 822 + if (temp1_fp) 823 + tu.n_symbols = drm_fixp2int_ceil(temp1_fp); 824 + else 825 + tu.n_symbols = 0; 826 + 827 + for (tu.tu_size = 32; tu.tu_size <= 64; tu.tu_size++) { 828 + for (tu.i_upper_boundary_count = 1; 829 + tu.i_upper_boundary_count <= 15; 830 + tu.i_upper_boundary_count++) { 831 + for (tu.i_lower_boundary_count = 1; 832 + tu.i_lower_boundary_count <= 15; 833 + tu.i_lower_boundary_count++) { 834 + _tu_valid_boundary_calc(&tu); 835 + } 836 + } 837 + } 838 + tu.delay_start_link_extra_pixclk--; 839 + } while (tu.boundary_moderation_en != true && 840 + tu.boundary_mod_lower_err == 1 && 841 + tu.delay_start_link_extra_pixclk != 0); 842 + 843 + if (tu.boundary_moderation_en == true) { 844 + temp1_fp = drm_fixp_from_fraction( 845 + (tu.upper_boundary_count * 846 + tu.valid_boundary_link + 847 + tu.lower_boundary_count * 848 + (tu.valid_boundary_link - 1)), 1); 849 + temp2_fp = drm_fixp_from_fraction( 850 + (tu.upper_boundary_count + 851 + tu.lower_boundary_count), 1); 852 + tu.resulting_valid_fp = 853 + drm_fixp_div(temp1_fp, temp2_fp); 854 + 855 + temp1_fp = drm_fixp_from_fraction( 856 + tu.tu_size_desired, 1); 857 + tu.ratio_by_tu_fp = 858 + drm_fixp_mul(tu.original_ratio_fp, temp1_fp); 859 + 860 + tu.valid_lower_boundary_link = 861 + tu.valid_boundary_link - 1; 862 + 863 + temp1_fp = drm_fixp_from_fraction(tu.bpp, 8); 864 + temp1_fp = drm_fixp_mul(tu.lwidth_fp, temp1_fp); 865 + temp2_fp = drm_fixp_div(temp1_fp, 866 + tu.resulting_valid_fp); 867 + tu.n_tus = drm_fixp2int(temp2_fp); 868 + 869 + tu.tu_size_minus1 = tu.tu_size_desired - 1; 870 + tu.even_distribution_BF = 1; 871 + 872 + temp1_fp = 873 + drm_fixp_from_fraction(tu.tu_size_desired, 1); 874 + temp2_fp = 875 + drm_fixp_div(tu.resulting_valid_fp, temp1_fp); 876 + tu.TU_ratio_err_fp = temp2_fp - tu.original_ratio_fp; 877 + } 878 + } 879 + 880 + temp2_fp = drm_fixp_mul(LCLK_FAST_SKEW_fp, tu.lwidth_fp); 881 + 882 + if (temp2_fp) 883 + temp = drm_fixp2int_ceil(temp2_fp); 884 + else 885 + temp = 0; 886 + 887 + temp1_fp = drm_fixp_from_fraction(tu.nlanes, 1); 888 + temp2_fp = drm_fixp_mul(tu.original_ratio_fp, temp1_fp); 889 + temp1_fp = drm_fixp_from_fraction(tu.bpp, 8); 890 + temp2_fp = drm_fixp_div(temp1_fp, temp2_fp); 891 + temp1_fp = drm_fixp_from_fraction(temp, 1); 892 + temp2_fp = drm_fixp_mul(temp1_fp, temp2_fp); 893 + temp = drm_fixp2int(temp2_fp); 894 + 895 + if (tu.async_en) 896 + tu.delay_start_link += (int)temp; 897 + 898 + temp1_fp = drm_fixp_from_fraction(tu.delay_start_link, 1); 899 + tu.delay_start_time_fp = drm_fixp_div(temp1_fp, tu.lclk_fp); 900 + 901 + /* OUTPUTS */ 902 + tu_table->valid_boundary_link = tu.valid_boundary_link; 903 + tu_table->delay_start_link = tu.delay_start_link; 904 + tu_table->boundary_moderation_en = tu.boundary_moderation_en; 905 + tu_table->valid_lower_boundary_link = tu.valid_lower_boundary_link; 906 + tu_table->upper_boundary_count = tu.upper_boundary_count; 907 + tu_table->lower_boundary_count = tu.lower_boundary_count; 908 + tu_table->tu_size_minus1 = tu.tu_size_minus1; 909 + 910 + DRM_DEBUG_DP("TU: valid_boundary_link: %d\n", 911 + tu_table->valid_boundary_link); 912 + DRM_DEBUG_DP("TU: delay_start_link: %d\n", 913 + tu_table->delay_start_link); 914 + DRM_DEBUG_DP("TU: boundary_moderation_en: %d\n", 915 + tu_table->boundary_moderation_en); 916 + DRM_DEBUG_DP("TU: valid_lower_boundary_link: %d\n", 917 + tu_table->valid_lower_boundary_link); 918 + DRM_DEBUG_DP("TU: upper_boundary_count: %d\n", 919 + tu_table->upper_boundary_count); 920 + DRM_DEBUG_DP("TU: lower_boundary_count: %d\n", 921 + tu_table->lower_boundary_count); 922 + DRM_DEBUG_DP("TU: tu_size_minus1: %d\n", tu_table->tu_size_minus1); 923 + } 924 + 925 + static void dp_ctrl_calc_tu_parameters(struct dp_ctrl_private *ctrl, 926 + struct dp_vc_tu_mapping_table *tu_table) 927 + { 928 + struct dp_tu_calc_input in; 929 + struct drm_display_mode *drm_mode; 930 + 931 + drm_mode = &ctrl->panel->dp_mode.drm_mode; 932 + 933 + in.lclk = ctrl->link->link_params.rate / 1000; 934 + in.pclk_khz = drm_mode->clock; 935 + in.hactive = drm_mode->hdisplay; 936 + in.hporch = drm_mode->htotal - drm_mode->hdisplay; 937 + in.nlanes = ctrl->link->link_params.num_lanes; 938 + in.bpp = ctrl->panel->dp_mode.bpp; 939 + in.pixel_enc = 444; 940 + in.dsc_en = 0; 941 + in.async_en = 0; 942 + in.fec_en = 0; 943 + in.num_of_dsc_slices = 0; 944 + in.compress_ratio = 100; 945 + 946 + _dp_ctrl_calc_tu(&in, tu_table); 947 + } 948 + 949 + static void dp_ctrl_setup_tr_unit(struct dp_ctrl_private *ctrl) 950 + { 951 + u32 dp_tu = 0x0; 952 + u32 valid_boundary = 0x0; 953 + u32 valid_boundary2 = 0x0; 954 + struct dp_vc_tu_mapping_table tu_calc_table; 955 + 956 + dp_ctrl_calc_tu_parameters(ctrl, &tu_calc_table); 957 + 958 + dp_tu |= tu_calc_table.tu_size_minus1; 959 + valid_boundary |= tu_calc_table.valid_boundary_link; 960 + valid_boundary |= (tu_calc_table.delay_start_link << 16); 961 + 962 + valid_boundary2 |= (tu_calc_table.valid_lower_boundary_link << 1); 963 + valid_boundary2 |= (tu_calc_table.upper_boundary_count << 16); 964 + valid_boundary2 |= (tu_calc_table.lower_boundary_count << 20); 965 + 966 + if (tu_calc_table.boundary_moderation_en) 967 + valid_boundary2 |= BIT(0); 968 + 969 + pr_debug("dp_tu=0x%x, valid_boundary=0x%x, valid_boundary2=0x%x\n", 970 + dp_tu, valid_boundary, valid_boundary2); 971 + 972 + dp_catalog_ctrl_update_transfer_unit(ctrl->catalog, 973 + dp_tu, valid_boundary, valid_boundary2); 974 + } 975 + 976 + static int dp_ctrl_wait4video_ready(struct dp_ctrl_private *ctrl) 977 + { 978 + int ret = 0; 979 + 980 + if (!wait_for_completion_timeout(&ctrl->video_comp, 981 + WAIT_FOR_VIDEO_READY_TIMEOUT_JIFFIES)) { 982 + DRM_ERROR("Link Train timedout\n"); 983 + ret = -ETIMEDOUT; 984 + } 985 + return ret; 986 + } 987 + 988 + static int dp_ctrl_update_vx_px(struct dp_ctrl_private *ctrl) 989 + { 990 + struct dp_link *link = ctrl->link; 991 + int ret = 0, lane, lane_cnt; 992 + u8 buf[4]; 993 + u32 max_level_reached = 0; 994 + u32 voltage_swing_level = link->phy_params.v_level; 995 + u32 pre_emphasis_level = link->phy_params.p_level; 996 + 997 + ret = dp_catalog_ctrl_update_vx_px(ctrl->catalog, 998 + voltage_swing_level, pre_emphasis_level); 999 + 1000 + if (ret) 1001 + return ret; 1002 + 1003 + if (voltage_swing_level > DP_TRAIN_VOLTAGE_SWING_MAX) { 1004 + DRM_DEBUG_DP("max. voltage swing level reached %d\n", 1005 + voltage_swing_level); 1006 + max_level_reached |= DP_TRAIN_MAX_SWING_REACHED; 1007 + } 1008 + 1009 + if (pre_emphasis_level == DP_TRAIN_PRE_EMPHASIS_MAX) { 1010 + DRM_DEBUG_DP("max. pre-emphasis level reached %d\n", 1011 + pre_emphasis_level); 1012 + max_level_reached |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1013 + } 1014 + 1015 + pre_emphasis_level <<= DP_TRAIN_PRE_EMPHASIS_SHIFT; 1016 + 1017 + lane_cnt = ctrl->link->link_params.num_lanes; 1018 + for (lane = 0; lane < lane_cnt; lane++) 1019 + buf[lane] = voltage_swing_level | pre_emphasis_level 1020 + | max_level_reached; 1021 + 1022 + DRM_DEBUG_DP("sink: p|v=0x%x\n", voltage_swing_level 1023 + | pre_emphasis_level); 1024 + ret = drm_dp_dpcd_write(ctrl->aux, DP_TRAINING_LANE0_SET, 1025 + buf, lane_cnt); 1026 + if (ret == lane_cnt) 1027 + ret = 0; 1028 + 1029 + return ret; 1030 + } 1031 + 1032 + static bool dp_ctrl_train_pattern_set(struct dp_ctrl_private *ctrl, 1033 + u8 pattern) 1034 + { 1035 + u8 buf; 1036 + int ret = 0; 1037 + 1038 + DRM_DEBUG_DP("sink: pattern=%x\n", pattern); 1039 + 1040 + buf = pattern; 1041 + ret = drm_dp_dpcd_writeb(ctrl->aux, 1042 + DP_TRAINING_PATTERN_SET, buf); 1043 + return ret == 1; 1044 + } 1045 + 1046 + static int dp_ctrl_read_link_status(struct dp_ctrl_private *ctrl, 1047 + u8 *link_status) 1048 + { 1049 + int len = 0; 1050 + u32 const offset = DP_LANE_ALIGN_STATUS_UPDATED - DP_LANE0_1_STATUS; 1051 + u32 link_status_read_max_retries = 100; 1052 + 1053 + while (--link_status_read_max_retries) { 1054 + len = drm_dp_dpcd_read_link_status(ctrl->aux, 1055 + link_status); 1056 + if (len != DP_LINK_STATUS_SIZE) { 1057 + DRM_ERROR("DP link status read failed, err: %d\n", len); 1058 + return len; 1059 + } 1060 + 1061 + if (!(link_status[offset] & DP_LINK_STATUS_UPDATED)) 1062 + return 0; 1063 + } 1064 + 1065 + return -ETIMEDOUT; 1066 + } 1067 + 1068 + static int dp_ctrl_link_train_1(struct dp_ctrl_private *ctrl) 1069 + { 1070 + int tries, old_v_level, ret = 0; 1071 + u8 link_status[DP_LINK_STATUS_SIZE]; 1072 + int const maximum_retries = 5; 1073 + 1074 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); 1075 + 1076 + ret = dp_catalog_ctrl_set_pattern(ctrl->catalog, DP_TRAINING_PATTERN_1); 1077 + if (ret) 1078 + return ret; 1079 + dp_ctrl_train_pattern_set(ctrl, DP_TRAINING_PATTERN_1 | 1080 + DP_LINK_SCRAMBLING_DISABLE); 1081 + ret = dp_ctrl_update_vx_px(ctrl); 1082 + if (ret) 1083 + return ret; 1084 + 1085 + tries = 0; 1086 + old_v_level = ctrl->link->phy_params.v_level; 1087 + for (tries = 0; tries < maximum_retries; tries++) { 1088 + drm_dp_link_train_clock_recovery_delay(ctrl->panel->dpcd); 1089 + 1090 + ret = dp_ctrl_read_link_status(ctrl, link_status); 1091 + if (ret) 1092 + return ret; 1093 + 1094 + if (drm_dp_clock_recovery_ok(link_status, 1095 + ctrl->link->link_params.num_lanes)) { 1096 + return ret; 1097 + } 1098 + 1099 + if (ctrl->link->phy_params.v_level > 1100 + DP_TRAIN_VOLTAGE_SWING_MAX) { 1101 + DRM_ERROR_RATELIMITED("max v_level reached\n"); 1102 + return -EAGAIN; 1103 + } 1104 + 1105 + if (old_v_level != ctrl->link->phy_params.v_level) { 1106 + tries = 0; 1107 + old_v_level = ctrl->link->phy_params.v_level; 1108 + } 1109 + 1110 + DRM_DEBUG_DP("clock recovery not done, adjusting vx px\n"); 1111 + 1112 + dp_link_adjust_levels(ctrl->link, link_status); 1113 + ret = dp_ctrl_update_vx_px(ctrl); 1114 + if (ret) 1115 + return ret; 1116 + } 1117 + 1118 + DRM_ERROR("max tries reached\n"); 1119 + return -ETIMEDOUT; 1120 + } 1121 + 1122 + static void dp_ctrl_link_rate_down_shift(struct dp_ctrl_private *ctrl) 1123 + { 1124 + switch (ctrl->link->link_params.rate) { 1125 + case 810000: 1126 + ctrl->link->link_params.rate = 540000; 1127 + break; 1128 + case 540000: 1129 + ctrl->link->link_params.rate = 270000; 1130 + break; 1131 + case 270000: 1132 + case 162000: 1133 + default: 1134 + ctrl->link->link_params.rate = 162000; 1135 + break; 1136 + }; 1137 + 1138 + DRM_DEBUG_DP("new rate=0x%x\n", ctrl->link->link_params.rate); 1139 + } 1140 + 1141 + static void dp_ctrl_clear_training_pattern(struct dp_ctrl_private *ctrl) 1142 + { 1143 + dp_ctrl_train_pattern_set(ctrl, DP_TRAINING_PATTERN_DISABLE); 1144 + drm_dp_link_train_channel_eq_delay(ctrl->panel->dpcd); 1145 + } 1146 + 1147 + static int dp_ctrl_link_training_2(struct dp_ctrl_private *ctrl) 1148 + { 1149 + int tries = 0, ret = 0; 1150 + char pattern; 1151 + int const maximum_retries = 5; 1152 + u8 link_status[DP_LINK_STATUS_SIZE]; 1153 + 1154 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); 1155 + 1156 + if (drm_dp_tps3_supported(ctrl->panel->dpcd)) 1157 + pattern = DP_TRAINING_PATTERN_3; 1158 + else 1159 + pattern = DP_TRAINING_PATTERN_2; 1160 + 1161 + ret = dp_ctrl_update_vx_px(ctrl); 1162 + if (ret) 1163 + return ret; 1164 + 1165 + ret = dp_catalog_ctrl_set_pattern(ctrl->catalog, pattern); 1166 + if (ret) 1167 + return ret; 1168 + 1169 + dp_ctrl_train_pattern_set(ctrl, pattern | DP_RECOVERED_CLOCK_OUT_EN); 1170 + 1171 + for (tries = 0; tries <= maximum_retries; tries++) { 1172 + drm_dp_link_train_channel_eq_delay(ctrl->panel->dpcd); 1173 + 1174 + ret = dp_ctrl_read_link_status(ctrl, link_status); 1175 + if (ret) 1176 + return ret; 1177 + 1178 + if (drm_dp_channel_eq_ok(link_status, 1179 + ctrl->link->link_params.num_lanes)) 1180 + return ret; 1181 + 1182 + dp_link_adjust_levels(ctrl->link, link_status); 1183 + ret = dp_ctrl_update_vx_px(ctrl); 1184 + if (ret) 1185 + return ret; 1186 + 1187 + } 1188 + 1189 + return -ETIMEDOUT; 1190 + } 1191 + 1192 + static int dp_ctrl_link_train(struct dp_ctrl_private *ctrl) 1193 + { 1194 + int ret = 0; 1195 + u8 encoding = DP_SET_ANSI_8B10B; 1196 + struct dp_link_info link_info = {0}; 1197 + 1198 + ctrl->link->phy_params.p_level = 0; 1199 + ctrl->link->phy_params.v_level = 0; 1200 + 1201 + dp_ctrl_config_ctrl(ctrl); 1202 + 1203 + link_info.num_lanes = ctrl->link->link_params.num_lanes; 1204 + link_info.rate = ctrl->link->link_params.rate; 1205 + link_info.capabilities = DP_LINK_CAP_ENHANCED_FRAMING; 1206 + 1207 + dp_aux_link_configure(ctrl->aux, &link_info); 1208 + drm_dp_dpcd_write(ctrl->aux, DP_MAIN_LINK_CHANNEL_CODING_SET, 1209 + &encoding, 1); 1210 + 1211 + ret = dp_ctrl_link_train_1(ctrl); 1212 + if (ret) { 1213 + DRM_ERROR("link training #1 failed. ret=%d\n", ret); 1214 + goto end; 1215 + } 1216 + 1217 + /* print success info as this is a result of user initiated action */ 1218 + DRM_DEBUG_DP("link training #1 successful\n"); 1219 + 1220 + ret = dp_ctrl_link_training_2(ctrl); 1221 + if (ret) { 1222 + DRM_ERROR("link training #2 failed. ret=%d\n", ret); 1223 + goto end; 1224 + } 1225 + 1226 + /* print success info as this is a result of user initiated action */ 1227 + DRM_DEBUG_DP("link training #2 successful\n"); 1228 + 1229 + end: 1230 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); 1231 + 1232 + dp_ctrl_clear_training_pattern(ctrl); 1233 + return ret; 1234 + } 1235 + 1236 + static int dp_ctrl_setup_main_link(struct dp_ctrl_private *ctrl, bool train) 1237 + { 1238 + bool mainlink_ready = false; 1239 + int ret = 0; 1240 + 1241 + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, true); 1242 + 1243 + ret = dp_link_psm_config(ctrl->link, &ctrl->panel->link_info, false); 1244 + if (ret) 1245 + return ret; 1246 + 1247 + if (ctrl->link->sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) 1248 + return ret; 1249 + 1250 + if (train) { 1251 + /* 1252 + * As part of previous calls, DP controller state might have 1253 + * transitioned to PUSH_IDLE. In order to start transmitting 1254 + * a link training pattern, we have to first do soft reset. 1255 + */ 1256 + dp_catalog_ctrl_reset(ctrl->catalog); 1257 + 1258 + ret = dp_ctrl_link_train(ctrl); 1259 + if (ret) 1260 + return ret; 1261 + } 1262 + 1263 + /* 1264 + * Set up transfer unit values and set controller state to send 1265 + * video. 1266 + */ 1267 + dp_ctrl_setup_tr_unit(ctrl); 1268 + dp_catalog_ctrl_state_ctrl(ctrl->catalog, DP_STATE_CTRL_SEND_VIDEO); 1269 + 1270 + ret = dp_ctrl_wait4video_ready(ctrl); 1271 + if (ret) 1272 + return ret; 1273 + 1274 + mainlink_ready = dp_catalog_ctrl_mainlink_ready(ctrl->catalog); 1275 + DRM_DEBUG_DP("mainlink %s\n", mainlink_ready ? "READY" : "NOT READY"); 1276 + return ret; 1277 + } 1278 + 1279 + static void dp_ctrl_set_clock_rate(struct dp_ctrl_private *ctrl, 1280 + char *name, u32 rate) 1281 + { 1282 + u32 num = ctrl->parser->mp[DP_CTRL_PM].num_clk; 1283 + struct dss_clk *cfg = ctrl->parser->mp[DP_CTRL_PM].clk_config; 1284 + 1285 + while (num && strcmp(cfg->clk_name, name)) { 1286 + num--; 1287 + cfg++; 1288 + } 1289 + 1290 + DRM_DEBUG_DP("setting rate=%d on clk=%s\n", rate, name); 1291 + 1292 + if (num) 1293 + cfg->rate = rate; 1294 + else 1295 + DRM_ERROR("%s clock doesn't exit to set rate %d\n", 1296 + name, rate); 1297 + } 1298 + 1299 + static int dp_ctrl_enable_mainlink_clocks(struct dp_ctrl_private *ctrl) 1300 + { 1301 + int ret = 0; 1302 + 1303 + dp_power_set_link_clk_parent(ctrl->power); 1304 + 1305 + dp_ctrl_set_clock_rate(ctrl, "ctrl_link", 1306 + ctrl->link->link_params.rate); 1307 + 1308 + dp_ctrl_set_clock_rate(ctrl, "stream_pixel", 1309 + ctrl->dp_ctrl.pixel_rate); 1310 + 1311 + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, true); 1312 + if (ret) 1313 + DRM_ERROR("Unable to start link clocks. ret=%d\n", ret); 1314 + 1315 + return ret; 1316 + } 1317 + 1318 + int dp_ctrl_host_init(struct dp_ctrl *dp_ctrl, bool flip) 1319 + { 1320 + struct dp_ctrl_private *ctrl; 1321 + 1322 + if (!dp_ctrl) { 1323 + DRM_ERROR("Invalid input data\n"); 1324 + return -EINVAL; 1325 + } 1326 + 1327 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1328 + 1329 + ctrl->dp_ctrl.orientation = flip; 1330 + 1331 + dp_catalog_ctrl_usb_reset(ctrl->catalog, flip); 1332 + dp_catalog_ctrl_phy_reset(ctrl->catalog); 1333 + dp_catalog_ctrl_enable_irq(ctrl->catalog, true); 1334 + 1335 + return 0; 1336 + } 1337 + 1338 + /** 1339 + * dp_ctrl_host_deinit() - Uninitialize DP controller 1340 + * @dp_ctrl: Display Port Driver data 1341 + * 1342 + * Perform required steps to uninitialize DP controller 1343 + * and its resources. 1344 + */ 1345 + void dp_ctrl_host_deinit(struct dp_ctrl *dp_ctrl) 1346 + { 1347 + struct dp_ctrl_private *ctrl; 1348 + 1349 + if (!dp_ctrl) { 1350 + DRM_ERROR("Invalid input data\n"); 1351 + return; 1352 + } 1353 + 1354 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1355 + 1356 + dp_catalog_ctrl_enable_irq(ctrl->catalog, false); 1357 + 1358 + DRM_DEBUG_DP("Host deinitialized successfully\n"); 1359 + } 1360 + 1361 + static bool dp_ctrl_use_fixed_nvid(struct dp_ctrl_private *ctrl) 1362 + { 1363 + u8 *dpcd = ctrl->panel->dpcd; 1364 + u32 edid_quirks = 0; 1365 + 1366 + edid_quirks = drm_dp_get_edid_quirks(ctrl->panel->edid); 1367 + /* 1368 + * For better interop experience, used a fixed NVID=0x8000 1369 + * whenever connected to a VGA dongle downstream. 1370 + */ 1371 + if (drm_dp_is_branch(dpcd)) 1372 + return (drm_dp_has_quirk(&ctrl->panel->desc, edid_quirks, 1373 + DP_DPCD_QUIRK_CONSTANT_N)); 1374 + 1375 + return false; 1376 + } 1377 + 1378 + static int dp_ctrl_reinitialize_mainlink(struct dp_ctrl_private *ctrl) 1379 + { 1380 + int ret = 0; 1381 + 1382 + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); 1383 + dp_catalog_ctrl_phy_lane_cfg(ctrl->catalog, 1384 + ctrl->dp_ctrl.orientation, ctrl->link->link_params.num_lanes); 1385 + /* 1386 + * Disable and re-enable the mainlink clock since the 1387 + * link clock might have been adjusted as part of the 1388 + * link maintenance. 1389 + */ 1390 + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); 1391 + if (ret) { 1392 + DRM_ERROR("Failed to disable clocks. ret=%d\n", ret); 1393 + return ret; 1394 + } 1395 + /* hw recommended delay before re-enabling clocks */ 1396 + msleep(20); 1397 + 1398 + ret = dp_ctrl_enable_mainlink_clocks(ctrl); 1399 + if (ret) { 1400 + DRM_ERROR("Failed to enable mainlink clks. ret=%d\n", ret); 1401 + return ret; 1402 + } 1403 + 1404 + dp_ctrl_configure_source_params(ctrl); 1405 + dp_catalog_ctrl_config_msa(ctrl->catalog, 1406 + ctrl->link->link_params.rate, 1407 + ctrl->dp_ctrl.pixel_rate, dp_ctrl_use_fixed_nvid(ctrl)); 1408 + reinit_completion(&ctrl->idle_comp); 1409 + 1410 + return ret; 1411 + } 1412 + 1413 + static int dp_ctrl_link_maintenance(struct dp_ctrl_private *ctrl) 1414 + { 1415 + int ret = 0; 1416 + int tries; 1417 + 1418 + dp_ctrl_push_idle(&ctrl->dp_ctrl); 1419 + dp_catalog_ctrl_reset(ctrl->catalog); 1420 + 1421 + ctrl->dp_ctrl.pixel_rate = ctrl->panel->dp_mode.drm_mode.clock; 1422 + 1423 + for (tries = 0; tries < 10; tries++) { 1424 + ret = dp_ctrl_reinitialize_mainlink(ctrl); 1425 + if (ret) { 1426 + DRM_ERROR("Failed to reinitialize mainlink. ret=%d\n", 1427 + ret); 1428 + break; 1429 + } 1430 + 1431 + ret = dp_ctrl_setup_main_link(ctrl, true); 1432 + if (ret == -EAGAIN) /* try with lower link rate */ 1433 + dp_ctrl_link_rate_down_shift(ctrl); 1434 + } 1435 + return ret; 1436 + } 1437 + 1438 + static int dp_ctrl_process_phy_test_request(struct dp_ctrl_private *ctrl) 1439 + { 1440 + int ret = 0; 1441 + 1442 + if (!ctrl->link->phy_params.phy_test_pattern_sel) { 1443 + DRM_DEBUG_DP("no test pattern selected by sink\n"); 1444 + return ret; 1445 + } 1446 + 1447 + dp_ctrl_push_idle(&ctrl->dp_ctrl); 1448 + /* 1449 + * The global reset will need DP link related clocks to be 1450 + * running. Add the global reset just before disabling the 1451 + * link clocks and core clocks. 1452 + */ 1453 + dp_catalog_ctrl_reset(ctrl->catalog); 1454 + ret = dp_ctrl_off(&ctrl->dp_ctrl); 1455 + if (ret) { 1456 + DRM_ERROR("failed to disable DP controller\n"); 1457 + return ret; 1458 + } 1459 + 1460 + ret = dp_ctrl_on(&ctrl->dp_ctrl); 1461 + if (ret) 1462 + DRM_ERROR("failed to enable DP controller\n"); 1463 + 1464 + return ret; 1465 + } 1466 + 1467 + static bool dp_ctrl_send_phy_test_pattern(struct dp_ctrl_private *ctrl) 1468 + { 1469 + bool success = false; 1470 + u32 pattern_sent = 0x0; 1471 + u32 pattern_requested = ctrl->link->phy_params.phy_test_pattern_sel; 1472 + 1473 + DRM_DEBUG_DP("request: 0x%x\n", pattern_requested); 1474 + 1475 + if (dp_catalog_ctrl_update_vx_px(ctrl->catalog, 1476 + ctrl->link->phy_params.v_level, 1477 + ctrl->link->phy_params.p_level)) { 1478 + DRM_ERROR("Failed to set v/p levels\n"); 1479 + return false; 1480 + } 1481 + dp_catalog_ctrl_send_phy_pattern(ctrl->catalog, pattern_requested); 1482 + dp_link_send_test_response(ctrl->link); 1483 + 1484 + pattern_sent = dp_catalog_ctrl_read_phy_pattern(ctrl->catalog); 1485 + 1486 + switch (pattern_sent) { 1487 + case MR_LINK_TRAINING1: 1488 + success = pattern_requested == 1489 + DP_LINK_QUAL_PATTERN_D10_2; 1490 + break; 1491 + case MR_LINK_SYMBOL_ERM: 1492 + success = (pattern_requested == 1493 + DP_LINK_QUAL_PATTERN_ERROR_RATE) 1494 + || (pattern_requested == 1495 + DP_LINK_QUAL_PATTERN_HBR2_EYE); 1496 + break; 1497 + case MR_LINK_PRBS7: 1498 + success = pattern_requested == DP_LINK_QUAL_PATTERN_PRBS7; 1499 + break; 1500 + case MR_LINK_CUSTOM80: 1501 + success = pattern_requested == 1502 + DP_LINK_QUAL_PATTERN_80BIT_CUSTOM; 1503 + break; 1504 + default: 1505 + success = false; 1506 + } 1507 + 1508 + DRM_DEBUG_DP("%s: test->0x%x\n", success ? "success" : "failed", 1509 + pattern_requested); 1510 + return success; 1511 + } 1512 + 1513 + void dp_ctrl_handle_sink_request(struct dp_ctrl *dp_ctrl) 1514 + { 1515 + struct dp_ctrl_private *ctrl; 1516 + u32 sink_request = 0x0; 1517 + 1518 + if (!dp_ctrl) { 1519 + DRM_ERROR("invalid input\n"); 1520 + return; 1521 + } 1522 + 1523 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1524 + sink_request = ctrl->link->sink_request; 1525 + 1526 + if (sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) { 1527 + DRM_DEBUG_DP("PHY_TEST_PATTERN request\n"); 1528 + if (dp_ctrl_process_phy_test_request(ctrl)) { 1529 + DRM_ERROR("process phy_test_req failed\n"); 1530 + return; 1531 + } 1532 + } 1533 + 1534 + if (sink_request & DP_LINK_STATUS_UPDATED) 1535 + if (dp_ctrl_link_maintenance(ctrl)) { 1536 + DRM_ERROR("LM failed: STATUS_UPDATED\n"); 1537 + return; 1538 + } 1539 + 1540 + 1541 + if (sink_request & DP_TEST_LINK_TRAINING) { 1542 + dp_link_send_test_response(ctrl->link); 1543 + if (dp_ctrl_link_maintenance(ctrl)) { 1544 + DRM_ERROR("LM failed: TEST_LINK_TRAINING\n"); 1545 + return; 1546 + } 1547 + } 1548 + } 1549 + 1550 + int dp_ctrl_on(struct dp_ctrl *dp_ctrl) 1551 + { 1552 + int rc = 0; 1553 + struct dp_ctrl_private *ctrl; 1554 + u32 rate = 0; 1555 + u32 link_train_max_retries = 10; 1556 + u32 const phy_cts_pixel_clk_khz = 148500; 1557 + 1558 + if (!dp_ctrl) 1559 + return -EINVAL; 1560 + 1561 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1562 + 1563 + rate = ctrl->panel->link_info.rate; 1564 + 1565 + dp_power_clk_enable(ctrl->power, DP_CORE_PM, true); 1566 + dp_catalog_ctrl_hpd_config(ctrl->catalog, true); 1567 + 1568 + if (ctrl->link->sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) { 1569 + DRM_DEBUG_DP("using phy test link parameters\n"); 1570 + if (!ctrl->panel->dp_mode.drm_mode.clock) 1571 + ctrl->dp_ctrl.pixel_rate = phy_cts_pixel_clk_khz; 1572 + } else { 1573 + ctrl->link->link_params.rate = rate; 1574 + ctrl->link->link_params.num_lanes = 1575 + ctrl->panel->link_info.num_lanes; 1576 + ctrl->dp_ctrl.pixel_rate = ctrl->panel->dp_mode.drm_mode.clock; 1577 + } 1578 + 1579 + DRM_DEBUG_DP("rate=%d, num_lanes=%d, pixel_rate=%d\n", 1580 + ctrl->link->link_params.rate, 1581 + ctrl->link->link_params.num_lanes, ctrl->dp_ctrl.pixel_rate); 1582 + 1583 + dp_catalog_ctrl_phy_lane_cfg(ctrl->catalog, 1584 + ctrl->dp_ctrl.orientation, 1585 + ctrl->link->link_params.num_lanes); 1586 + 1587 + rc = dp_ctrl_enable_mainlink_clocks(ctrl); 1588 + if (rc) 1589 + return rc; 1590 + 1591 + while (--link_train_max_retries && 1592 + !atomic_read(&ctrl->dp_ctrl.aborted)) { 1593 + rc = dp_ctrl_reinitialize_mainlink(ctrl); 1594 + if (rc) { 1595 + DRM_ERROR("Failed to reinitialize mainlink. rc=%d\n", 1596 + rc); 1597 + break; 1598 + } 1599 + rc = dp_ctrl_setup_main_link(ctrl, true); 1600 + if (!rc) 1601 + break; 1602 + /* try with lower link rate */ 1603 + dp_ctrl_link_rate_down_shift(ctrl); 1604 + } 1605 + 1606 + if (ctrl->link->sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) 1607 + dp_ctrl_send_phy_test_pattern(ctrl); 1608 + 1609 + return rc; 1610 + } 1611 + 1612 + int dp_ctrl_off(struct dp_ctrl *dp_ctrl) 1613 + { 1614 + struct dp_ctrl_private *ctrl; 1615 + int ret = 0; 1616 + 1617 + if (!dp_ctrl) 1618 + return -EINVAL; 1619 + 1620 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1621 + 1622 + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); 1623 + dp_catalog_ctrl_reset(ctrl->catalog); 1624 + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); 1625 + if (ret) { 1626 + DRM_ERROR("Failed to disable clocks. ret=%d\n", ret); 1627 + return ret; 1628 + } 1629 + 1630 + DRM_DEBUG_DP("DP off done\n"); 1631 + return ret; 1632 + } 1633 + 1634 + void dp_ctrl_isr(struct dp_ctrl *dp_ctrl) 1635 + { 1636 + struct dp_ctrl_private *ctrl; 1637 + u32 isr; 1638 + 1639 + if (!dp_ctrl) 1640 + return; 1641 + 1642 + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); 1643 + 1644 + isr = dp_catalog_ctrl_get_interrupt(ctrl->catalog); 1645 + 1646 + if (isr & DP_CTRL_INTR_READY_FOR_VIDEO) { 1647 + DRM_DEBUG_DP("dp_video_ready\n"); 1648 + complete(&ctrl->video_comp); 1649 + } 1650 + 1651 + if (isr & DP_CTRL_INTR_IDLE_PATTERN_SENT) { 1652 + DRM_DEBUG_DP("idle_patterns_sent\n"); 1653 + complete(&ctrl->idle_comp); 1654 + } 1655 + } 1656 + 1657 + struct dp_ctrl *dp_ctrl_get(struct device *dev, struct dp_link *link, 1658 + struct dp_panel *panel, struct drm_dp_aux *aux, 1659 + struct dp_power *power, struct dp_catalog *catalog, 1660 + struct dp_parser *parser) 1661 + { 1662 + struct dp_ctrl_private *ctrl; 1663 + 1664 + if (!dev || !panel || !aux || 1665 + !link || !catalog) { 1666 + DRM_ERROR("invalid input\n"); 1667 + return ERR_PTR(-EINVAL); 1668 + } 1669 + 1670 + ctrl = devm_kzalloc(dev, sizeof(*ctrl), GFP_KERNEL); 1671 + if (!ctrl) { 1672 + DRM_ERROR("Mem allocation failure\n"); 1673 + return ERR_PTR(-ENOMEM); 1674 + } 1675 + 1676 + init_completion(&ctrl->idle_comp); 1677 + init_completion(&ctrl->video_comp); 1678 + mutex_init(&ctrl->push_idle_mutex); 1679 + 1680 + /* in parameters */ 1681 + ctrl->parser = parser; 1682 + ctrl->panel = panel; 1683 + ctrl->power = power; 1684 + ctrl->aux = aux; 1685 + ctrl->link = link; 1686 + ctrl->catalog = catalog; 1687 + ctrl->dev = dev; 1688 + 1689 + return &ctrl->dp_ctrl; 1690 + } 1691 + 1692 + void dp_ctrl_put(struct dp_ctrl *dp_ctrl) 1693 + { 1694 + }
+35
drivers/gpu/drm/msm/dp/dp_ctrl.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_CTRL_H_ 7 + #define _DP_CTRL_H_ 8 + 9 + #include "dp_aux.h" 10 + #include "dp_panel.h" 11 + #include "dp_link.h" 12 + #include "dp_parser.h" 13 + #include "dp_power.h" 14 + #include "dp_catalog.h" 15 + 16 + struct dp_ctrl { 17 + bool orientation; 18 + atomic_t aborted; 19 + u32 pixel_rate; 20 + }; 21 + 22 + int dp_ctrl_host_init(struct dp_ctrl *dp_ctrl, bool flip); 23 + void dp_ctrl_host_deinit(struct dp_ctrl *dp_ctrl); 24 + int dp_ctrl_on(struct dp_ctrl *dp_ctrl); 25 + int dp_ctrl_off(struct dp_ctrl *dp_ctrl); 26 + void dp_ctrl_push_idle(struct dp_ctrl *dp_ctrl); 27 + void dp_ctrl_isr(struct dp_ctrl *dp_ctrl); 28 + void dp_ctrl_handle_sink_request(struct dp_ctrl *dp_ctrl); 29 + struct dp_ctrl *dp_ctrl_get(struct device *dev, struct dp_link *link, 30 + struct dp_panel *panel, struct drm_dp_aux *aux, 31 + struct dp_power *power, struct dp_catalog *catalog, 32 + struct dp_parser *parser); 33 + void dp_ctrl_put(struct dp_ctrl *dp_ctrl); 34 + 35 + #endif /* _DP_CTRL_H_ */
+936
drivers/gpu/drm/msm/dp/dp_display.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #include <linux/module.h> 7 + #include <linux/slab.h> 8 + #include <linux/uaccess.h> 9 + #include <linux/debugfs.h> 10 + #include <linux/component.h> 11 + #include <linux/of_irq.h> 12 + 13 + #include "msm_drv.h" 14 + #include "msm_kms.h" 15 + #include "dp_hpd.h" 16 + #include "dp_parser.h" 17 + #include "dp_power.h" 18 + #include "dp_catalog.h" 19 + #include "dp_aux.h" 20 + #include "dp_link.h" 21 + #include "dp_panel.h" 22 + #include "dp_ctrl.h" 23 + #include "dp_display.h" 24 + #include "dp_drm.h" 25 + 26 + static struct msm_dp *g_dp_display; 27 + #define HPD_STRING_SIZE 30 28 + 29 + struct dp_display_private { 30 + char *name; 31 + int irq; 32 + 33 + /* state variables */ 34 + bool core_initialized; 35 + bool power_on; 36 + bool hpd_irq_on; 37 + bool audio_supported; 38 + 39 + struct platform_device *pdev; 40 + struct dentry *root; 41 + struct completion notification_comp; 42 + 43 + struct dp_usbpd *usbpd; 44 + struct dp_parser *parser; 45 + struct dp_power *power; 46 + struct dp_catalog *catalog; 47 + struct drm_dp_aux *aux; 48 + struct dp_link *link; 49 + struct dp_panel *panel; 50 + struct dp_ctrl *ctrl; 51 + 52 + struct dp_usbpd_cb usbpd_cb; 53 + struct dp_display_mode dp_mode; 54 + struct msm_dp dp_display; 55 + }; 56 + 57 + static const struct of_device_id dp_dt_match[] = { 58 + {.compatible = "qcom,sc7180-dp"}, 59 + {} 60 + }; 61 + 62 + static irqreturn_t dp_display_irq(int irq, void *dev_id) 63 + { 64 + struct dp_display_private *dp = dev_id; 65 + 66 + /* DP controller isr */ 67 + dp_ctrl_isr(dp->ctrl); 68 + 69 + /* DP aux isr */ 70 + dp_aux_isr(dp->aux); 71 + 72 + return IRQ_HANDLED; 73 + } 74 + 75 + static int dp_display_bind(struct device *dev, struct device *master, 76 + void *data) 77 + { 78 + int rc = 0; 79 + struct dp_display_private *dp; 80 + struct drm_device *drm; 81 + struct msm_drm_private *priv; 82 + struct platform_device *pdev = to_platform_device(dev); 83 + 84 + drm = dev_get_drvdata(master); 85 + 86 + dp = platform_get_drvdata(pdev); 87 + if (!dp) { 88 + DRM_ERROR("DP driver bind failed. Invalid driver data\n"); 89 + return -EINVAL; 90 + } 91 + 92 + dp->dp_display.drm_dev = drm; 93 + priv = drm->dev_private; 94 + priv->dp = &(dp->dp_display); 95 + 96 + rc = dp->parser->parse(dp->parser); 97 + if (rc) { 98 + DRM_ERROR("device tree parsing failed\n"); 99 + goto end; 100 + } 101 + 102 + rc = dp_aux_register(dp->aux); 103 + if (rc) { 104 + DRM_ERROR("DRM DP AUX register failed\n"); 105 + goto end; 106 + } 107 + 108 + rc = dp_power_client_init(dp->power); 109 + if (rc) { 110 + DRM_ERROR("Power client create failed\n"); 111 + goto end; 112 + } 113 + 114 + end: 115 + return rc; 116 + } 117 + 118 + static void dp_display_unbind(struct device *dev, struct device *master, 119 + void *data) 120 + { 121 + struct dp_display_private *dp; 122 + struct platform_device *pdev = to_platform_device(dev); 123 + struct drm_device *drm = dev_get_drvdata(master); 124 + struct msm_drm_private *priv = drm->dev_private; 125 + 126 + dp = platform_get_drvdata(pdev); 127 + if (!dp) { 128 + DRM_ERROR("Invalid DP driver data\n"); 129 + return; 130 + } 131 + 132 + dp_power_client_deinit(dp->power); 133 + dp_aux_unregister(dp->aux); 134 + priv->dp = NULL; 135 + } 136 + 137 + static const struct component_ops dp_display_comp_ops = { 138 + .bind = dp_display_bind, 139 + .unbind = dp_display_unbind, 140 + }; 141 + 142 + static bool dp_display_is_ds_bridge(struct dp_panel *panel) 143 + { 144 + return (panel->dpcd[DP_DOWNSTREAMPORT_PRESENT] & 145 + DP_DWN_STRM_PORT_PRESENT); 146 + } 147 + 148 + static bool dp_display_is_sink_count_zero(struct dp_display_private *dp) 149 + { 150 + return dp_display_is_ds_bridge(dp->panel) && 151 + (dp->link->sink_count == 0); 152 + } 153 + 154 + static void dp_display_send_hpd_event(struct msm_dp *dp_display) 155 + { 156 + struct dp_display_private *dp; 157 + struct drm_connector *connector; 158 + 159 + dp = container_of(dp_display, struct dp_display_private, dp_display); 160 + 161 + connector = dp->dp_display.connector; 162 + drm_helper_hpd_irq_event(connector->dev); 163 + } 164 + 165 + static int dp_display_send_hpd_notification(struct dp_display_private *dp, 166 + bool hpd) 167 + { 168 + static bool encoder_mode_set; 169 + struct msm_drm_private *priv = dp->dp_display.drm_dev->dev_private; 170 + struct msm_kms *kms = priv->kms; 171 + 172 + mutex_lock(&dp->dp_display.connect_mutex); 173 + if ((hpd && dp->dp_display.is_connected) || 174 + (!hpd && !dp->dp_display.is_connected)) { 175 + DRM_DEBUG_DP("HPD already %s\n", (hpd ? "on" : "off")); 176 + mutex_unlock(&dp->dp_display.connect_mutex); 177 + return 0; 178 + } 179 + 180 + /* reset video pattern flag on disconnect */ 181 + if (!hpd) 182 + dp->panel->video_test = false; 183 + 184 + dp->dp_display.is_connected = hpd; 185 + reinit_completion(&dp->notification_comp); 186 + 187 + if (dp->dp_display.is_connected && dp->dp_display.encoder 188 + && !encoder_mode_set 189 + && kms->funcs->set_encoder_mode) { 190 + kms->funcs->set_encoder_mode(kms, 191 + dp->dp_display.encoder, false); 192 + DRM_DEBUG_DP("set_encoder_mode() Completed\n"); 193 + encoder_mode_set = true; 194 + } 195 + 196 + dp_display_send_hpd_event(&dp->dp_display); 197 + 198 + if (!wait_for_completion_timeout(&dp->notification_comp, HZ * 2)) { 199 + pr_warn("%s timeout\n", hpd ? "connect" : "disconnect"); 200 + mutex_unlock(&dp->dp_display.connect_mutex); 201 + return -EINVAL; 202 + } 203 + 204 + mutex_unlock(&dp->dp_display.connect_mutex); 205 + return 0; 206 + } 207 + 208 + static int dp_display_process_hpd_high(struct dp_display_private *dp) 209 + { 210 + int rc = 0; 211 + struct edid *edid; 212 + 213 + dp_aux_init(dp->aux); 214 + 215 + if (dp->link->psm_enabled) 216 + goto notify; 217 + 218 + dp->panel->max_dp_lanes = dp->parser->max_dp_lanes; 219 + 220 + rc = dp_panel_read_sink_caps(dp->panel, dp->dp_display.connector); 221 + if (rc) 222 + goto notify; 223 + 224 + dp_link_process_request(dp->link); 225 + 226 + if (dp_display_is_sink_count_zero(dp)) { 227 + DRM_DEBUG_DP("no downstream devices connected\n"); 228 + rc = -EINVAL; 229 + goto end; 230 + } 231 + 232 + edid = dp->panel->edid; 233 + 234 + dp->audio_supported = drm_detect_monitor_audio(edid); 235 + 236 + dp_panel_handle_sink_request(dp->panel); 237 + 238 + dp->dp_display.max_pclk_khz = DP_MAX_PIXEL_CLK_KHZ; 239 + dp->dp_display.max_dp_lanes = dp->parser->max_dp_lanes; 240 + notify: 241 + dp_display_send_hpd_notification(dp, true); 242 + 243 + end: 244 + return rc; 245 + } 246 + 247 + static void dp_display_host_init(struct dp_display_private *dp) 248 + { 249 + bool flip = false; 250 + 251 + if (dp->core_initialized) { 252 + DRM_DEBUG_DP("DP core already initialized\n"); 253 + return; 254 + } 255 + 256 + if (dp->usbpd->orientation == ORIENTATION_CC2) 257 + flip = true; 258 + 259 + dp_power_init(dp->power, flip); 260 + dp_ctrl_host_init(dp->ctrl, flip); 261 + dp_aux_init(dp->aux); 262 + dp->core_initialized = true; 263 + } 264 + 265 + static void dp_display_host_deinit(struct dp_display_private *dp) 266 + { 267 + if (!dp->core_initialized) { 268 + DRM_DEBUG_DP("DP core already off\n"); 269 + return; 270 + } 271 + 272 + dp_ctrl_host_deinit(dp->ctrl); 273 + dp_aux_deinit(dp->aux); 274 + dp_power_deinit(dp->power); 275 + disable_irq(dp->irq); 276 + dp->core_initialized = false; 277 + } 278 + 279 + static void dp_display_process_hpd_low(struct dp_display_private *dp) 280 + { 281 + dp_display_send_hpd_notification(dp, false); 282 + 283 + dp_aux_deinit(dp->aux); 284 + } 285 + 286 + static int dp_display_usbpd_configure_cb(struct device *dev) 287 + { 288 + int rc = 0; 289 + struct dp_display_private *dp; 290 + 291 + if (!dev) { 292 + DRM_ERROR("invalid dev\n"); 293 + rc = -EINVAL; 294 + goto end; 295 + } 296 + 297 + dp = dev_get_drvdata(dev); 298 + if (!dp) { 299 + DRM_ERROR("no driver data found\n"); 300 + rc = -ENODEV; 301 + goto end; 302 + } 303 + 304 + dp_display_host_init(dp); 305 + 306 + if (dp->usbpd->hpd_high) 307 + dp_display_process_hpd_high(dp); 308 + end: 309 + return rc; 310 + } 311 + 312 + static void dp_display_clean(struct dp_display_private *dp) 313 + { 314 + dp_ctrl_push_idle(dp->ctrl); 315 + dp_ctrl_off(dp->ctrl); 316 + } 317 + 318 + static int dp_display_usbpd_disconnect_cb(struct device *dev) 319 + { 320 + int rc = 0; 321 + struct dp_display_private *dp; 322 + 323 + dp = dev_get_drvdata(dev); 324 + 325 + rc = dp_display_send_hpd_notification(dp, false); 326 + 327 + /* if cable is disconnected, reset psm_enabled flag */ 328 + if (!dp->usbpd->alt_mode_cfg_done) 329 + dp->link->psm_enabled = false; 330 + 331 + if ((rc < 0) && dp->power_on) 332 + dp_display_clean(dp); 333 + 334 + dp_display_host_deinit(dp); 335 + return rc; 336 + } 337 + 338 + static void dp_display_handle_video_request(struct dp_display_private *dp) 339 + { 340 + if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) { 341 + /* force disconnect followed by connect */ 342 + dp->usbpd->connect(dp->usbpd, false); 343 + dp->panel->video_test = true; 344 + dp->usbpd->connect(dp->usbpd, true); 345 + dp_link_send_test_response(dp->link); 346 + } 347 + } 348 + 349 + static int dp_display_handle_hpd_irq(struct dp_display_private *dp) 350 + { 351 + if (dp->link->sink_request & DS_PORT_STATUS_CHANGED) { 352 + dp_display_send_hpd_notification(dp, false); 353 + 354 + if (dp_display_is_sink_count_zero(dp)) { 355 + DRM_DEBUG_DP("sink count is zero, nothing to do\n"); 356 + return 0; 357 + } 358 + 359 + return dp_display_process_hpd_high(dp); 360 + } 361 + 362 + dp_ctrl_handle_sink_request(dp->ctrl); 363 + 364 + dp_display_handle_video_request(dp); 365 + 366 + return 0; 367 + } 368 + 369 + static int dp_display_usbpd_attention_cb(struct device *dev) 370 + { 371 + int rc = 0; 372 + struct dp_display_private *dp; 373 + 374 + if (!dev) { 375 + DRM_ERROR("invalid dev\n"); 376 + return -EINVAL; 377 + } 378 + 379 + dp = dev_get_drvdata(dev); 380 + if (!dp) { 381 + DRM_ERROR("no driver data found\n"); 382 + return -ENODEV; 383 + } 384 + 385 + if (dp->usbpd->hpd_irq) { 386 + dp->hpd_irq_on = true; 387 + 388 + rc = dp_link_process_request(dp->link); 389 + /* check for any test request issued by sink */ 390 + if (!rc) 391 + dp_display_handle_hpd_irq(dp); 392 + 393 + dp->hpd_irq_on = false; 394 + goto end; 395 + } 396 + 397 + if (!dp->usbpd->hpd_high) { 398 + dp_display_process_hpd_low(dp); 399 + goto end; 400 + } 401 + 402 + if (dp->usbpd->alt_mode_cfg_done) 403 + dp_display_process_hpd_high(dp); 404 + end: 405 + return rc; 406 + } 407 + 408 + static void dp_display_deinit_sub_modules(struct dp_display_private *dp) 409 + { 410 + dp_ctrl_put(dp->ctrl); 411 + dp_panel_put(dp->panel); 412 + dp_aux_put(dp->aux); 413 + } 414 + 415 + static int dp_init_sub_modules(struct dp_display_private *dp) 416 + { 417 + int rc = 0; 418 + struct device *dev = &dp->pdev->dev; 419 + struct dp_usbpd_cb *cb = &dp->usbpd_cb; 420 + struct dp_panel_in panel_in = { 421 + .dev = dev, 422 + }; 423 + 424 + /* Callback APIs used for cable status change event */ 425 + cb->configure = dp_display_usbpd_configure_cb; 426 + cb->disconnect = dp_display_usbpd_disconnect_cb; 427 + cb->attention = dp_display_usbpd_attention_cb; 428 + 429 + dp->usbpd = dp_hpd_get(dev, cb); 430 + if (IS_ERR(dp->usbpd)) { 431 + rc = PTR_ERR(dp->usbpd); 432 + DRM_ERROR("failed to initialize hpd, rc = %d\n", rc); 433 + dp->usbpd = NULL; 434 + goto error; 435 + } 436 + 437 + dp->parser = dp_parser_get(dp->pdev); 438 + if (IS_ERR(dp->parser)) { 439 + rc = PTR_ERR(dp->parser); 440 + DRM_ERROR("failed to initialize parser, rc = %d\n", rc); 441 + dp->parser = NULL; 442 + goto error; 443 + } 444 + 445 + dp->catalog = dp_catalog_get(dev, &dp->parser->io); 446 + if (IS_ERR(dp->catalog)) { 447 + rc = PTR_ERR(dp->catalog); 448 + DRM_ERROR("failed to initialize catalog, rc = %d\n", rc); 449 + dp->catalog = NULL; 450 + goto error; 451 + } 452 + 453 + dp->power = dp_power_get(dp->parser); 454 + if (IS_ERR(dp->power)) { 455 + rc = PTR_ERR(dp->power); 456 + DRM_ERROR("failed to initialize power, rc = %d\n", rc); 457 + dp->power = NULL; 458 + goto error; 459 + } 460 + 461 + dp->aux = dp_aux_get(dev, dp->catalog); 462 + if (IS_ERR(dp->aux)) { 463 + rc = PTR_ERR(dp->aux); 464 + DRM_ERROR("failed to initialize aux, rc = %d\n", rc); 465 + dp->aux = NULL; 466 + goto error; 467 + } 468 + 469 + dp->link = dp_link_get(dev, dp->aux); 470 + if (IS_ERR(dp->link)) { 471 + rc = PTR_ERR(dp->link); 472 + DRM_ERROR("failed to initialize link, rc = %d\n", rc); 473 + dp->link = NULL; 474 + goto error_link; 475 + } 476 + 477 + panel_in.aux = dp->aux; 478 + panel_in.catalog = dp->catalog; 479 + panel_in.link = dp->link; 480 + 481 + dp->panel = dp_panel_get(&panel_in); 482 + if (IS_ERR(dp->panel)) { 483 + rc = PTR_ERR(dp->panel); 484 + DRM_ERROR("failed to initialize panel, rc = %d\n", rc); 485 + dp->panel = NULL; 486 + goto error_link; 487 + } 488 + 489 + dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux, 490 + dp->power, dp->catalog, dp->parser); 491 + if (IS_ERR(dp->ctrl)) { 492 + rc = PTR_ERR(dp->ctrl); 493 + DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc); 494 + dp->ctrl = NULL; 495 + goto error_ctrl; 496 + } 497 + 498 + return rc; 499 + error_ctrl: 500 + dp_panel_put(dp->panel); 501 + error_link: 502 + dp_aux_put(dp->aux); 503 + error: 504 + return rc; 505 + } 506 + 507 + static int dp_display_set_mode(struct msm_dp *dp_display, 508 + struct dp_display_mode *mode) 509 + { 510 + struct dp_display_private *dp; 511 + 512 + dp = container_of(dp_display, struct dp_display_private, dp_display); 513 + 514 + dp->panel->dp_mode.drm_mode = mode->drm_mode; 515 + dp->panel->dp_mode.bpp = mode->bpp; 516 + dp->panel->dp_mode.capabilities = mode->capabilities; 517 + dp_panel_init_panel_info(dp->panel); 518 + return 0; 519 + } 520 + 521 + static int dp_display_prepare(struct msm_dp *dp) 522 + { 523 + return 0; 524 + } 525 + 526 + static void dp_display_dump(struct msm_dp *dp_display) 527 + { 528 + struct dp_display_private *dp; 529 + 530 + dp = container_of(dp_display, struct dp_display_private, dp_display); 531 + 532 + dp_panel_dump_regs(dp->panel); 533 + } 534 + 535 + static int dp_display_enable(struct msm_dp *dp_display) 536 + { 537 + int rc = 0; 538 + struct dp_display_private *dp; 539 + bool dump_dp = false; 540 + 541 + dp = container_of(dp_display, struct dp_display_private, dp_display); 542 + 543 + if (dp->power_on) { 544 + DRM_DEBUG_DP("Link already setup, return\n"); 545 + return 0; 546 + } 547 + 548 + rc = dp_ctrl_on(dp->ctrl); 549 + if (!rc) 550 + dp->power_on = true; 551 + 552 + if (dump_dp != false) 553 + dp_display_dump(dp_display); 554 + 555 + return rc; 556 + } 557 + 558 + static int dp_display_post_enable(struct msm_dp *dp_display) 559 + { 560 + struct dp_display_private *dp; 561 + 562 + dp = container_of(dp_display, struct dp_display_private, dp_display); 563 + 564 + complete_all(&dp->notification_comp); 565 + return 0; 566 + } 567 + 568 + static int dp_display_pre_disable(struct msm_dp *dp_display) 569 + { 570 + struct dp_display_private *dp; 571 + 572 + dp = container_of(dp_display, struct dp_display_private, dp_display); 573 + 574 + if (dp->usbpd->alt_mode_cfg_done) 575 + dp_link_psm_config(dp->link, &dp->panel->link_info, true); 576 + 577 + dp_ctrl_push_idle(dp->ctrl); 578 + return 0; 579 + } 580 + 581 + static int dp_display_disable(struct msm_dp *dp_display) 582 + { 583 + struct dp_display_private *dp; 584 + 585 + dp = container_of(dp_display, struct dp_display_private, dp_display); 586 + 587 + if (!dp->power_on || !dp->core_initialized) 588 + return -EINVAL; 589 + 590 + dp_ctrl_off(dp->ctrl); 591 + 592 + dp->power_on = false; 593 + 594 + complete_all(&dp->notification_comp); 595 + return 0; 596 + } 597 + 598 + int dp_display_request_irq(struct msm_dp *dp_display) 599 + { 600 + int rc = 0; 601 + struct dp_display_private *dp; 602 + 603 + if (!dp_display) { 604 + DRM_ERROR("invalid input\n"); 605 + return -EINVAL; 606 + } 607 + 608 + dp = container_of(dp_display, struct dp_display_private, dp_display); 609 + 610 + dp->irq = irq_of_parse_and_map(dp->pdev->dev.of_node, 0); 611 + if (dp->irq < 0) { 612 + rc = dp->irq; 613 + DRM_ERROR("failed to get irq: %d\n", rc); 614 + return rc; 615 + } 616 + 617 + rc = devm_request_irq(&dp->pdev->dev, dp->irq, dp_display_irq, 618 + IRQF_TRIGGER_HIGH, "dp_display_isr", dp); 619 + if (rc < 0) { 620 + DRM_ERROR("failed to request IRQ%u: %d\n", 621 + dp->irq, rc); 622 + return rc; 623 + } 624 + disable_irq(dp->irq); 625 + 626 + return 0; 627 + } 628 + 629 + static int dp_display_unprepare(struct msm_dp *dp) 630 + { 631 + return 0; 632 + } 633 + 634 + int dp_display_validate_mode(struct msm_dp *dp, u32 mode_pclk_khz) 635 + { 636 + const u32 num_components = 3, default_bpp = 24; 637 + struct dp_display_private *dp_display; 638 + struct dp_link_info *link_info; 639 + u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0; 640 + 641 + if (!dp || !mode_pclk_khz || !dp->connector) { 642 + DRM_ERROR("invalid params\n"); 643 + return -EINVAL; 644 + } 645 + 646 + dp_display = container_of(dp, struct dp_display_private, dp_display); 647 + link_info = &dp_display->panel->link_info; 648 + 649 + mode_bpp = dp->connector->display_info.bpc * num_components; 650 + if (!mode_bpp) 651 + mode_bpp = default_bpp; 652 + 653 + mode_bpp = dp_panel_get_mode_bpp(dp_display->panel, 654 + mode_bpp, mode_pclk_khz); 655 + 656 + mode_rate_khz = mode_pclk_khz * mode_bpp; 657 + supported_rate_khz = link_info->num_lanes * link_info->rate * 8; 658 + 659 + if (mode_rate_khz > supported_rate_khz) 660 + return MODE_BAD; 661 + 662 + return MODE_OK; 663 + } 664 + 665 + int dp_display_get_modes(struct msm_dp *dp, 666 + struct dp_display_mode *dp_mode) 667 + { 668 + struct dp_display_private *dp_display; 669 + int ret = 0; 670 + 671 + if (!dp) { 672 + DRM_ERROR("invalid params\n"); 673 + return 0; 674 + } 675 + 676 + dp_display = container_of(dp, struct dp_display_private, dp_display); 677 + 678 + ret = dp_panel_get_modes(dp_display->panel, 679 + dp->connector, dp_mode); 680 + if (dp_mode->drm_mode.clock) 681 + dp->max_pclk_khz = dp_mode->drm_mode.clock; 682 + return ret; 683 + } 684 + 685 + bool dp_display_check_video_test(struct msm_dp *dp) 686 + { 687 + struct dp_display_private *dp_display; 688 + 689 + dp_display = container_of(dp, struct dp_display_private, dp_display); 690 + 691 + return dp_display->panel->video_test; 692 + } 693 + 694 + int dp_display_get_test_bpp(struct msm_dp *dp) 695 + { 696 + struct dp_display_private *dp_display; 697 + 698 + if (!dp) { 699 + DRM_ERROR("invalid params\n"); 700 + return 0; 701 + } 702 + 703 + dp_display = container_of(dp, struct dp_display_private, dp_display); 704 + 705 + return dp_link_bit_depth_to_bpp( 706 + dp_display->link->test_video.test_bit_depth); 707 + } 708 + 709 + static int dp_display_probe(struct platform_device *pdev) 710 + { 711 + int rc = 0; 712 + struct dp_display_private *dp; 713 + 714 + if (!pdev || !pdev->dev.of_node) { 715 + DRM_ERROR("pdev not found\n"); 716 + return -ENODEV; 717 + } 718 + 719 + dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL); 720 + if (!dp) 721 + return -ENOMEM; 722 + 723 + init_completion(&dp->notification_comp); 724 + 725 + dp->pdev = pdev; 726 + dp->name = "drm_dp"; 727 + 728 + rc = dp_init_sub_modules(dp); 729 + if (rc) { 730 + DRM_ERROR("init sub module failed\n"); 731 + return -EPROBE_DEFER; 732 + } 733 + 734 + platform_set_drvdata(pdev, dp); 735 + 736 + mutex_init(&dp->dp_display.connect_mutex); 737 + g_dp_display = &dp->dp_display; 738 + 739 + rc = component_add(&pdev->dev, &dp_display_comp_ops); 740 + if (rc) { 741 + DRM_ERROR("component add failed, rc=%d\n", rc); 742 + dp_display_deinit_sub_modules(dp); 743 + } 744 + 745 + return rc; 746 + } 747 + 748 + static int dp_display_remove(struct platform_device *pdev) 749 + { 750 + struct dp_display_private *dp; 751 + 752 + dp = platform_get_drvdata(pdev); 753 + 754 + dp_display_deinit_sub_modules(dp); 755 + 756 + component_del(&pdev->dev, &dp_display_comp_ops); 757 + platform_set_drvdata(pdev, NULL); 758 + 759 + return 0; 760 + } 761 + 762 + static int dp_pm_resume(struct device *dev) 763 + { 764 + return 0; 765 + } 766 + 767 + static int dp_pm_suspend(struct device *dev) 768 + { 769 + return 0; 770 + } 771 + 772 + static int dp_pm_prepare(struct device *dev) 773 + { 774 + return 0; 775 + } 776 + 777 + static void dp_pm_complete(struct device *dev) 778 + { 779 + 780 + } 781 + 782 + static const struct dev_pm_ops dp_pm_ops = { 783 + .suspend = dp_pm_suspend, 784 + .resume = dp_pm_resume, 785 + .prepare = dp_pm_prepare, 786 + .complete = dp_pm_complete, 787 + }; 788 + 789 + static struct platform_driver dp_display_driver = { 790 + .probe = dp_display_probe, 791 + .remove = dp_display_remove, 792 + .driver = { 793 + .name = "msm-dp-display", 794 + .of_match_table = dp_dt_match, 795 + .suppress_bind_attrs = true, 796 + .pm = &dp_pm_ops, 797 + }, 798 + }; 799 + 800 + int __init msm_dp_register(void) 801 + { 802 + int ret; 803 + 804 + ret = platform_driver_register(&dp_display_driver); 805 + if (ret) 806 + DRM_ERROR("Dp display driver register failed"); 807 + 808 + return ret; 809 + } 810 + 811 + void __exit msm_dp_unregister(void) 812 + { 813 + platform_driver_unregister(&dp_display_driver); 814 + } 815 + 816 + int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev, 817 + struct drm_encoder *encoder) 818 + { 819 + struct msm_drm_private *priv; 820 + int ret; 821 + 822 + if (WARN_ON(!encoder) || WARN_ON(!dp_display) || WARN_ON(!dev)) 823 + return -EINVAL; 824 + 825 + priv = dev->dev_private; 826 + dp_display->drm_dev = dev; 827 + 828 + ret = dp_display_request_irq(dp_display); 829 + if (ret) { 830 + DRM_ERROR("request_irq failed, ret=%d\n", ret); 831 + return ret; 832 + } 833 + 834 + dp_display->encoder = encoder; 835 + 836 + dp_display->connector = dp_drm_connector_init(dp_display); 837 + if (IS_ERR(dp_display->connector)) { 838 + ret = PTR_ERR(dp_display->connector); 839 + DRM_DEV_ERROR(dev->dev, 840 + "failed to create dp connector: %d\n", ret); 841 + dp_display->connector = NULL; 842 + return ret; 843 + } 844 + 845 + priv->connectors[priv->num_connectors++] = dp_display->connector; 846 + return 0; 847 + } 848 + 849 + int msm_dp_display_enable(struct msm_dp *dp, struct drm_encoder *encoder) 850 + { 851 + int rc = 0; 852 + struct dp_display_private *dp_display; 853 + 854 + dp_display = container_of(dp, struct dp_display_private, dp_display); 855 + if (!dp_display->dp_mode.drm_mode.clock) { 856 + DRM_ERROR("invalid params\n"); 857 + return -EINVAL; 858 + } 859 + 860 + rc = dp_display_set_mode(dp, &dp_display->dp_mode); 861 + if (rc) { 862 + DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc); 863 + return rc; 864 + } 865 + 866 + rc = dp_display_prepare(dp); 867 + if (rc) { 868 + DRM_ERROR("DP display prepare failed, rc=%d\n", rc); 869 + return rc; 870 + } 871 + 872 + rc = dp_display_enable(dp); 873 + if (rc) { 874 + DRM_ERROR("DP display enable failed, rc=%d\n", rc); 875 + dp_display_unprepare(dp); 876 + return rc; 877 + } 878 + 879 + rc = dp_display_post_enable(dp); 880 + if (rc) { 881 + DRM_ERROR("DP display post enable failed, rc=%d\n", rc); 882 + dp_display_disable(dp); 883 + dp_display_unprepare(dp); 884 + } 885 + return rc; 886 + } 887 + 888 + int msm_dp_display_disable(struct msm_dp *dp, struct drm_encoder *encoder) 889 + { 890 + int rc = 0; 891 + 892 + rc = dp_display_pre_disable(dp); 893 + if (rc) { 894 + DRM_ERROR("DP display pre disable failed, rc=%d\n", rc); 895 + return rc; 896 + } 897 + 898 + rc = dp_display_disable(dp); 899 + if (rc) { 900 + DRM_ERROR("DP display disable failed, rc=%d\n", rc); 901 + return rc; 902 + } 903 + 904 + rc = dp_display_unprepare(dp); 905 + if (rc) 906 + DRM_ERROR("DP display unprepare failed, rc=%d\n", rc); 907 + 908 + return rc; 909 + } 910 + 911 + void msm_dp_display_mode_set(struct msm_dp *dp, struct drm_encoder *encoder, 912 + struct drm_display_mode *mode, 913 + struct drm_display_mode *adjusted_mode) 914 + { 915 + struct dp_display_private *dp_display; 916 + 917 + dp_display = container_of(dp, struct dp_display_private, dp_display); 918 + 919 + memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode)); 920 + 921 + if (dp_display_check_video_test(dp)) 922 + dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp); 923 + else /* Default num_components per pixel = 3 */ 924 + dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3; 925 + 926 + if (!dp_display->dp_mode.bpp) 927 + dp_display->dp_mode.bpp = 24; /* Default bpp */ 928 + 929 + drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode); 930 + 931 + dp_display->dp_mode.v_active_low = 932 + !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC); 933 + 934 + dp_display->dp_mode.h_active_low = 935 + !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC); 936 + }
+28
drivers/gpu/drm/msm/dp/dp_display.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_DISPLAY_H_ 7 + #define _DP_DISPLAY_H_ 8 + 9 + #include "dp_panel.h" 10 + 11 + struct msm_dp { 12 + struct drm_device *drm_dev; 13 + struct drm_connector *connector; 14 + struct drm_encoder *encoder; 15 + bool is_connected; 16 + struct mutex connect_mutex; 17 + u32 max_pclk_khz; 18 + u32 max_dp_lanes; 19 + }; 20 + 21 + int dp_display_validate_mode(struct msm_dp *dp_display, u32 mode_pclk_khz); 22 + int dp_display_get_modes(struct msm_dp *dp_display, 23 + struct dp_display_mode *dp_mode); 24 + int dp_display_request_irq(struct msm_dp *dp_display); 25 + bool dp_display_check_video_test(struct msm_dp *dp_display); 26 + int dp_display_get_test_bpp(struct msm_dp *dp_display); 27 + 28 + #endif /* _DP_DISPLAY_H_ */
+168
drivers/gpu/drm/msm/dp/dp_drm.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #include <drm/drm_atomic_helper.h> 7 + #include <drm/drm_atomic.h> 8 + #include <drm/drm_crtc.h> 9 + 10 + #include "msm_drv.h" 11 + #include "msm_kms.h" 12 + #include "dp_drm.h" 13 + 14 + struct dp_connector { 15 + struct drm_connector base; 16 + struct msm_dp *dp_display; 17 + }; 18 + #define to_dp_connector(x) container_of(x, struct dp_connector, base) 19 + 20 + /** 21 + * dp_connector_detect - callback to determine if connector is connected 22 + * @conn: Pointer to drm connector structure 23 + * @force: Force detect setting from drm framework 24 + * Returns: Connector 'is connected' status 25 + */ 26 + static enum drm_connector_status dp_connector_detect(struct drm_connector *conn, 27 + bool force) 28 + { 29 + struct msm_dp *dp; 30 + 31 + dp = to_dp_connector(conn)->dp_display; 32 + 33 + DRM_DEBUG_DP("is_connected = %s\n", 34 + (dp->is_connected) ? "true" : "false"); 35 + 36 + return (dp->is_connected) ? connector_status_connected : 37 + connector_status_disconnected; 38 + } 39 + 40 + /** 41 + * dp_connector_get_modes - callback to add drm modes via drm_mode_probed_add() 42 + * @connector: Pointer to drm connector structure 43 + * Returns: Number of modes added 44 + */ 45 + static int dp_connector_get_modes(struct drm_connector *connector) 46 + { 47 + int rc = 0; 48 + struct msm_dp *dp; 49 + struct dp_display_mode *dp_mode = NULL; 50 + struct drm_display_mode *m, drm_mode; 51 + 52 + if (!connector) 53 + return 0; 54 + 55 + dp = to_dp_connector(connector)->dp_display; 56 + 57 + dp_mode = kzalloc(sizeof(*dp_mode), GFP_KERNEL); 58 + if (!dp_mode) 59 + return 0; 60 + 61 + mutex_lock(&dp->connect_mutex); 62 + /* pluggable case assumes EDID is read when HPD */ 63 + if (dp->is_connected) { 64 + /* 65 + *The get_modes() function might return one mode that is stored 66 + * in dp_mode when compliance test is in progress. If not, the 67 + * return value is equal to the total number of modes supported 68 + * by the sink 69 + */ 70 + rc = dp_display_get_modes(dp, dp_mode); 71 + if (rc <= 0) { 72 + DRM_ERROR("failed to get DP sink modes, rc=%d\n", rc); 73 + kfree(dp_mode); 74 + mutex_unlock(&dp->connect_mutex); 75 + return rc; 76 + } 77 + if (dp_mode->drm_mode.clock) { /* valid DP mode */ 78 + memset(&drm_mode, 0x0, sizeof(drm_mode)); 79 + drm_mode_copy(&drm_mode, &dp_mode->drm_mode); 80 + m = drm_mode_duplicate(connector->dev, &drm_mode); 81 + if (!m) { 82 + DRM_ERROR("failed to add mode %ux%u\n", 83 + drm_mode.hdisplay, 84 + drm_mode.vdisplay); 85 + kfree(dp_mode); 86 + mutex_unlock(&dp->connect_mutex); 87 + return 0; 88 + } 89 + drm_mode_probed_add(connector, m); 90 + } 91 + } else { 92 + DRM_DEBUG_DP("No sink connected\n"); 93 + } 94 + mutex_unlock(&dp->connect_mutex); 95 + kfree(dp_mode); 96 + return rc; 97 + } 98 + 99 + /** 100 + * dp_connector_mode_valid - callback to determine if specified mode is valid 101 + * @connector: Pointer to drm connector structure 102 + * @mode: Pointer to drm mode structure 103 + * Returns: Validity status for specified mode 104 + */ 105 + static enum drm_mode_status dp_connector_mode_valid( 106 + struct drm_connector *connector, 107 + struct drm_display_mode *mode) 108 + { 109 + struct msm_dp *dp_disp; 110 + 111 + dp_disp = to_dp_connector(connector)->dp_display; 112 + 113 + if ((dp_disp->max_pclk_khz <= 0) || 114 + (dp_disp->max_pclk_khz > DP_MAX_PIXEL_CLK_KHZ) || 115 + (mode->clock > dp_disp->max_pclk_khz)) 116 + return MODE_BAD; 117 + 118 + return dp_display_validate_mode(dp_disp, mode->clock); 119 + } 120 + 121 + static const struct drm_connector_funcs dp_connector_funcs = { 122 + .detect = dp_connector_detect, 123 + .fill_modes = drm_helper_probe_single_connector_modes, 124 + .destroy = drm_connector_cleanup, 125 + .reset = drm_atomic_helper_connector_reset, 126 + .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state, 127 + .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 128 + }; 129 + 130 + static const struct drm_connector_helper_funcs dp_connector_helper_funcs = { 131 + .get_modes = dp_connector_get_modes, 132 + .mode_valid = dp_connector_mode_valid, 133 + }; 134 + 135 + /* connector initialization */ 136 + struct drm_connector *dp_drm_connector_init(struct msm_dp *dp_display) 137 + { 138 + struct drm_connector *connector = NULL; 139 + struct dp_connector *dp_connector; 140 + int ret; 141 + 142 + dp_connector = devm_kzalloc(dp_display->drm_dev->dev, 143 + sizeof(*dp_connector), 144 + GFP_KERNEL); 145 + if (!dp_connector) 146 + return ERR_PTR(-ENOMEM); 147 + 148 + dp_connector->dp_display = dp_display; 149 + 150 + connector = &dp_connector->base; 151 + 152 + ret = drm_connector_init(dp_display->drm_dev, connector, 153 + &dp_connector_funcs, 154 + DRM_MODE_CONNECTOR_DisplayPort); 155 + if (ret) 156 + return ERR_PTR(ret); 157 + 158 + drm_connector_helper_add(connector, &dp_connector_helper_funcs); 159 + 160 + /* 161 + * Enable HPD to let hpd event is handled when cable is connected. 162 + */ 163 + connector->polled = DRM_CONNECTOR_POLL_HPD; 164 + 165 + drm_connector_attach_encoder(connector, dp_display->encoder); 166 + 167 + return connector; 168 + }
+18
drivers/gpu/drm/msm/dp/dp_drm.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_DRM_H_ 7 + #define _DP_DRM_H_ 8 + 9 + #include <linux/types.h> 10 + #include <drm/drm_crtc.h> 11 + #include <drm/drm_crtc_helper.h> 12 + 13 + #include "msm_drv.h" 14 + #include "dp_display.h" 15 + 16 + struct drm_connector *dp_drm_connector_init(struct msm_dp *dp_display); 17 + 18 + #endif /* _DP_DRM_H_ */
+69
drivers/gpu/drm/msm/dp/dp_hpd.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ 7 + 8 + #include <linux/slab.h> 9 + #include <linux/device.h> 10 + 11 + #include "dp_hpd.h" 12 + 13 + /* DP specific VDM commands */ 14 + #define DP_USBPD_VDM_STATUS 0x10 15 + #define DP_USBPD_VDM_CONFIGURE 0x11 16 + 17 + /* USBPD-TypeC specific Macros */ 18 + #define VDM_VERSION 0x0 19 + #define USB_C_DP_SID 0xFF01 20 + 21 + struct dp_hpd_private { 22 + struct device *dev; 23 + struct dp_usbpd_cb *dp_cb; 24 + struct dp_usbpd dp_usbpd; 25 + }; 26 + 27 + static int dp_hpd_connect(struct dp_usbpd *dp_usbpd, bool hpd) 28 + { 29 + int rc = 0; 30 + struct dp_hpd_private *hpd_priv; 31 + 32 + hpd_priv = container_of(dp_usbpd, struct dp_hpd_private, 33 + dp_usbpd); 34 + 35 + dp_usbpd->hpd_high = hpd; 36 + 37 + if (!hpd_priv->dp_cb && !hpd_priv->dp_cb->configure 38 + && !hpd_priv->dp_cb->disconnect) { 39 + pr_err("hpd dp_cb not initialized\n"); 40 + return -EINVAL; 41 + } 42 + if (hpd) 43 + hpd_priv->dp_cb->configure(hpd_priv->dev); 44 + else 45 + hpd_priv->dp_cb->disconnect(hpd_priv->dev); 46 + 47 + return rc; 48 + } 49 + 50 + struct dp_usbpd *dp_hpd_get(struct device *dev, struct dp_usbpd_cb *cb) 51 + { 52 + struct dp_hpd_private *dp_hpd; 53 + 54 + if (!cb) { 55 + pr_err("invalid cb data\n"); 56 + return ERR_PTR(-EINVAL); 57 + } 58 + 59 + dp_hpd = devm_kzalloc(dev, sizeof(*dp_hpd), GFP_KERNEL); 60 + if (!dp_hpd) 61 + return ERR_PTR(-ENOMEM); 62 + 63 + dp_hpd->dev = dev; 64 + dp_hpd->dp_cb = cb; 65 + 66 + dp_hpd->dp_usbpd.connect = dp_hpd_connect; 67 + 68 + return &dp_hpd->dp_usbpd; 69 + }
+79
drivers/gpu/drm/msm/dp/dp_hpd.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_HPD_H_ 7 + #define _DP_HPD_H_ 8 + 9 + //#include <linux/usb/usbpd.h> 10 + 11 + #include <linux/types.h> 12 + #include <linux/device.h> 13 + 14 + enum plug_orientation { 15 + ORIENTATION_NONE, 16 + ORIENTATION_CC1, 17 + ORIENTATION_CC2, 18 + }; 19 + 20 + /** 21 + * struct dp_usbpd - DisplayPort status 22 + * 23 + * @orientation: plug orientation configuration 24 + * @low_pow_st: low power state 25 + * @adaptor_dp_en: adaptor functionality enabled 26 + * @multi_func: multi-function preferred 27 + * @usb_config_req: request to switch to usb 28 + * @exit_dp_mode: request exit from displayport mode 29 + * @hpd_high: Hot Plug Detect signal is high. 30 + * @hpd_irq: Change in the status since last message 31 + * @alt_mode_cfg_done: bool to specify alt mode status 32 + * @debug_en: bool to specify debug mode 33 + * @connect: simulate disconnect or connect for debug mode 34 + */ 35 + struct dp_usbpd { 36 + enum plug_orientation orientation; 37 + bool low_pow_st; 38 + bool adaptor_dp_en; 39 + bool multi_func; 40 + bool usb_config_req; 41 + bool exit_dp_mode; 42 + bool hpd_high; 43 + bool hpd_irq; 44 + bool alt_mode_cfg_done; 45 + bool debug_en; 46 + 47 + int (*connect)(struct dp_usbpd *dp_usbpd, bool hpd); 48 + }; 49 + 50 + /** 51 + * struct dp_usbpd_cb - callback functions provided by the client 52 + * 53 + * @configure: called by usbpd module when PD communication has 54 + * been completed and the usb peripheral has been configured on 55 + * dp mode. 56 + * @disconnect: notify the cable disconnect issued by usb. 57 + * @attention: notify any attention message issued by usb. 58 + */ 59 + struct dp_usbpd_cb { 60 + int (*configure)(struct device *dev); 61 + int (*disconnect)(struct device *dev); 62 + int (*attention)(struct device *dev); 63 + }; 64 + 65 + /** 66 + * dp_hpd_get() - setup hpd module 67 + * 68 + * @dev: device instance of the caller 69 + * @cb: struct containing callback function pointers. 70 + * 71 + * This function allows the client to initialize the usbpd 72 + * module. The module will communicate with HPD module. 73 + */ 74 + struct dp_usbpd *dp_hpd_get(struct device *dev, struct dp_usbpd_cb *cb); 75 + 76 + int dp_hpd_register(struct dp_usbpd *dp_usbpd); 77 + void dp_hpd_unregister(struct dp_usbpd *dp_usbpd); 78 + 79 + #endif /* _DP_HPD_H_ */
+1214
drivers/gpu/drm/msm/dp/dp_link.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ 7 + 8 + #include "dp_link.h" 9 + #include "dp_panel.h" 10 + 11 + #define DP_TEST_REQUEST_MASK 0x7F 12 + 13 + enum audio_sample_rate { 14 + AUDIO_SAMPLE_RATE_32_KHZ = 0x00, 15 + AUDIO_SAMPLE_RATE_44_1_KHZ = 0x01, 16 + AUDIO_SAMPLE_RATE_48_KHZ = 0x02, 17 + AUDIO_SAMPLE_RATE_88_2_KHZ = 0x03, 18 + AUDIO_SAMPLE_RATE_96_KHZ = 0x04, 19 + AUDIO_SAMPLE_RATE_176_4_KHZ = 0x05, 20 + AUDIO_SAMPLE_RATE_192_KHZ = 0x06, 21 + }; 22 + 23 + enum audio_pattern_type { 24 + AUDIO_TEST_PATTERN_OPERATOR_DEFINED = 0x00, 25 + AUDIO_TEST_PATTERN_SAWTOOTH = 0x01, 26 + }; 27 + 28 + struct dp_link_request { 29 + u32 test_requested; 30 + u32 test_link_rate; 31 + u32 test_lane_count; 32 + }; 33 + 34 + struct dp_link_private { 35 + u32 prev_sink_count; 36 + struct device *dev; 37 + struct drm_dp_aux *aux; 38 + struct dp_link dp_link; 39 + 40 + struct dp_link_request request; 41 + struct mutex test_response_mutex; 42 + struct mutex psm_mutex; 43 + u8 link_status[DP_LINK_STATUS_SIZE]; 44 + }; 45 + 46 + static int dp_aux_link_power_up(struct drm_dp_aux *aux, 47 + struct dp_link_info *link) 48 + { 49 + u8 value; 50 + int err; 51 + 52 + if (link->revision < 0x11) 53 + return 0; 54 + 55 + err = drm_dp_dpcd_readb(aux, DP_SET_POWER, &value); 56 + if (err < 0) 57 + return err; 58 + 59 + value &= ~DP_SET_POWER_MASK; 60 + value |= DP_SET_POWER_D0; 61 + 62 + err = drm_dp_dpcd_writeb(aux, DP_SET_POWER, value); 63 + if (err < 0) 64 + return err; 65 + 66 + usleep_range(1000, 2000); 67 + 68 + return 0; 69 + } 70 + 71 + static int dp_aux_link_power_down(struct drm_dp_aux *aux, 72 + struct dp_link_info *link) 73 + { 74 + u8 value; 75 + int err; 76 + 77 + if (link->revision < 0x11) 78 + return 0; 79 + 80 + err = drm_dp_dpcd_readb(aux, DP_SET_POWER, &value); 81 + if (err < 0) 82 + return err; 83 + 84 + value &= ~DP_SET_POWER_MASK; 85 + value |= DP_SET_POWER_D3; 86 + 87 + err = drm_dp_dpcd_writeb(aux, DP_SET_POWER, value); 88 + if (err < 0) 89 + return err; 90 + 91 + return 0; 92 + } 93 + 94 + static int dp_link_get_period(struct dp_link_private *link, int const addr) 95 + { 96 + int ret = 0; 97 + u8 data; 98 + u32 const max_audio_period = 0xA; 99 + 100 + /* TEST_AUDIO_PERIOD_CH_XX */ 101 + if (drm_dp_dpcd_readb(link->aux, addr, &data) < 0) { 102 + DRM_ERROR("failed to read test_audio_period (0x%x)\n", addr); 103 + ret = -EINVAL; 104 + goto exit; 105 + } 106 + 107 + /* Period - Bits 3:0 */ 108 + data = data & 0xF; 109 + if ((int)data > max_audio_period) { 110 + DRM_ERROR("invalid test_audio_period_ch_1 = 0x%x\n", data); 111 + ret = -EINVAL; 112 + goto exit; 113 + } 114 + 115 + ret = data; 116 + exit: 117 + return ret; 118 + } 119 + 120 + static int dp_link_parse_audio_channel_period(struct dp_link_private *link) 121 + { 122 + int ret = 0; 123 + struct dp_link_test_audio *req = &link->dp_link.test_audio; 124 + 125 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH1); 126 + if (ret == -EINVAL) 127 + goto exit; 128 + 129 + req->test_audio_period_ch_1 = ret; 130 + DRM_DEBUG_DP("test_audio_period_ch_1 = 0x%x\n", ret); 131 + 132 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH2); 133 + if (ret == -EINVAL) 134 + goto exit; 135 + 136 + req->test_audio_period_ch_2 = ret; 137 + DRM_DEBUG_DP("test_audio_period_ch_2 = 0x%x\n", ret); 138 + 139 + /* TEST_AUDIO_PERIOD_CH_3 (Byte 0x275) */ 140 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH3); 141 + if (ret == -EINVAL) 142 + goto exit; 143 + 144 + req->test_audio_period_ch_3 = ret; 145 + DRM_DEBUG_DP("test_audio_period_ch_3 = 0x%x\n", ret); 146 + 147 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH4); 148 + if (ret == -EINVAL) 149 + goto exit; 150 + 151 + req->test_audio_period_ch_4 = ret; 152 + DRM_DEBUG_DP("test_audio_period_ch_4 = 0x%x\n", ret); 153 + 154 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH5); 155 + if (ret == -EINVAL) 156 + goto exit; 157 + 158 + req->test_audio_period_ch_5 = ret; 159 + DRM_DEBUG_DP("test_audio_period_ch_5 = 0x%x\n", ret); 160 + 161 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH6); 162 + if (ret == -EINVAL) 163 + goto exit; 164 + 165 + req->test_audio_period_ch_6 = ret; 166 + DRM_DEBUG_DP("test_audio_period_ch_6 = 0x%x\n", ret); 167 + 168 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH7); 169 + if (ret == -EINVAL) 170 + goto exit; 171 + 172 + req->test_audio_period_ch_7 = ret; 173 + DRM_DEBUG_DP("test_audio_period_ch_7 = 0x%x\n", ret); 174 + 175 + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH8); 176 + if (ret == -EINVAL) 177 + goto exit; 178 + 179 + req->test_audio_period_ch_8 = ret; 180 + DRM_DEBUG_DP("test_audio_period_ch_8 = 0x%x\n", ret); 181 + exit: 182 + return ret; 183 + } 184 + 185 + static int dp_link_parse_audio_pattern_type(struct dp_link_private *link) 186 + { 187 + int ret = 0; 188 + u8 data; 189 + ssize_t rlen; 190 + int const max_audio_pattern_type = 0x1; 191 + 192 + rlen = drm_dp_dpcd_readb(link->aux, 193 + DP_TEST_AUDIO_PATTERN_TYPE, &data); 194 + if (rlen < 0) { 195 + DRM_ERROR("failed to read link audio mode. rlen=%zd\n", rlen); 196 + return rlen; 197 + } 198 + 199 + /* Audio Pattern Type - Bits 7:0 */ 200 + if ((int)data > max_audio_pattern_type) { 201 + DRM_ERROR("invalid audio pattern type = 0x%x\n", data); 202 + ret = -EINVAL; 203 + goto exit; 204 + } 205 + 206 + link->dp_link.test_audio.test_audio_pattern_type = data; 207 + DRM_DEBUG_DP("audio pattern type = 0x%x\n", data); 208 + exit: 209 + return ret; 210 + } 211 + 212 + static int dp_link_parse_audio_mode(struct dp_link_private *link) 213 + { 214 + int ret = 0; 215 + u8 data; 216 + ssize_t rlen; 217 + int const max_audio_sampling_rate = 0x6; 218 + int const max_audio_channel_count = 0x8; 219 + int sampling_rate = 0x0; 220 + int channel_count = 0x0; 221 + 222 + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_AUDIO_MODE, &data); 223 + if (rlen < 0) { 224 + DRM_ERROR("failed to read link audio mode. rlen=%zd\n", rlen); 225 + return rlen; 226 + } 227 + 228 + /* Sampling Rate - Bits 3:0 */ 229 + sampling_rate = data & 0xF; 230 + if (sampling_rate > max_audio_sampling_rate) { 231 + DRM_ERROR("sampling rate (0x%x) greater than max (0x%x)\n", 232 + sampling_rate, max_audio_sampling_rate); 233 + ret = -EINVAL; 234 + goto exit; 235 + } 236 + 237 + /* Channel Count - Bits 7:4 */ 238 + channel_count = ((data & 0xF0) >> 4) + 1; 239 + if (channel_count > max_audio_channel_count) { 240 + DRM_ERROR("channel_count (0x%x) greater than max (0x%x)\n", 241 + channel_count, max_audio_channel_count); 242 + ret = -EINVAL; 243 + goto exit; 244 + } 245 + 246 + link->dp_link.test_audio.test_audio_sampling_rate = sampling_rate; 247 + link->dp_link.test_audio.test_audio_channel_count = channel_count; 248 + DRM_DEBUG_DP("sampling_rate = 0x%x, channel_count = 0x%x\n", 249 + sampling_rate, channel_count); 250 + exit: 251 + return ret; 252 + } 253 + 254 + static int dp_link_parse_audio_pattern_params(struct dp_link_private *link) 255 + { 256 + int ret = 0; 257 + 258 + ret = dp_link_parse_audio_mode(link); 259 + if (ret) 260 + goto exit; 261 + 262 + ret = dp_link_parse_audio_pattern_type(link); 263 + if (ret) 264 + goto exit; 265 + 266 + ret = dp_link_parse_audio_channel_period(link); 267 + 268 + exit: 269 + return ret; 270 + } 271 + 272 + static bool dp_link_is_video_pattern_valid(u32 pattern) 273 + { 274 + switch (pattern) { 275 + case DP_NO_TEST_PATTERN: 276 + case DP_COLOR_RAMP: 277 + case DP_BLACK_AND_WHITE_VERTICAL_LINES: 278 + case DP_COLOR_SQUARE: 279 + return true; 280 + default: 281 + return false; 282 + } 283 + } 284 + 285 + /** 286 + * dp_link_is_bit_depth_valid() - validates the bit depth requested 287 + * @tbd: bit depth requested by the sink 288 + * 289 + * Returns true if the requested bit depth is supported. 290 + */ 291 + static bool dp_link_is_bit_depth_valid(u32 tbd) 292 + { 293 + /* DP_TEST_VIDEO_PATTERN_NONE is treated as invalid */ 294 + switch (tbd) { 295 + case DP_TEST_BIT_DEPTH_6: 296 + case DP_TEST_BIT_DEPTH_8: 297 + case DP_TEST_BIT_DEPTH_10: 298 + return true; 299 + default: 300 + return false; 301 + } 302 + } 303 + 304 + static int dp_link_parse_timing_params1(struct dp_link_private *link, 305 + int addr, int len, u32 *val) 306 + { 307 + u8 bp[2]; 308 + int rlen; 309 + 310 + if (len != 2) 311 + return -EINVAL; 312 + 313 + /* Read the requested video link pattern (Byte 0x221). */ 314 + rlen = drm_dp_dpcd_read(link->aux, addr, bp, len); 315 + if (rlen < len) { 316 + DRM_ERROR("failed to read 0x%x\n", addr); 317 + return -EINVAL; 318 + } 319 + 320 + *val = bp[1] | (bp[0] << 8); 321 + 322 + return 0; 323 + } 324 + 325 + static int dp_link_parse_timing_params2(struct dp_link_private *link, 326 + int addr, int len, 327 + u32 *val1, u32 *val2) 328 + { 329 + u8 bp[2]; 330 + int rlen; 331 + 332 + if (len != 2) 333 + return -EINVAL; 334 + 335 + /* Read the requested video link pattern (Byte 0x221). */ 336 + rlen = drm_dp_dpcd_read(link->aux, addr, bp, len); 337 + if (rlen < len) { 338 + DRM_ERROR("failed to read 0x%x\n", addr); 339 + return -EINVAL; 340 + } 341 + 342 + *val1 = (bp[0] & BIT(7)) >> 7; 343 + *val2 = bp[1] | ((bp[0] & 0x7F) << 8); 344 + 345 + return 0; 346 + } 347 + 348 + static int dp_link_parse_timing_params3(struct dp_link_private *link, 349 + int addr, u32 *val) 350 + { 351 + u8 bp; 352 + u32 len = 1; 353 + int rlen; 354 + 355 + rlen = drm_dp_dpcd_read(link->aux, addr, &bp, len); 356 + if (rlen < 1) { 357 + DRM_ERROR("failed to read 0x%x\n", addr); 358 + return -EINVAL; 359 + } 360 + *val = bp; 361 + 362 + return 0; 363 + } 364 + 365 + /** 366 + * dp_parse_video_pattern_params() - parses video pattern parameters from DPCD 367 + * @link: Display Port Driver data 368 + * 369 + * Returns 0 if it successfully parses the video link pattern and the link 370 + * bit depth requested by the sink and, and if the values parsed are valid. 371 + */ 372 + static int dp_link_parse_video_pattern_params(struct dp_link_private *link) 373 + { 374 + int ret = 0; 375 + ssize_t rlen; 376 + u8 bp; 377 + 378 + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_PATTERN, &bp); 379 + if (rlen < 0) { 380 + DRM_ERROR("failed to read link video pattern. rlen=%zd\n", 381 + rlen); 382 + return rlen; 383 + } 384 + 385 + if (!dp_link_is_video_pattern_valid(bp)) { 386 + DRM_ERROR("invalid link video pattern = 0x%x\n", bp); 387 + ret = -EINVAL; 388 + return ret; 389 + } 390 + 391 + link->dp_link.test_video.test_video_pattern = bp; 392 + 393 + /* Read the requested color bit depth and dynamic range (Byte 0x232) */ 394 + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_MISC0, &bp); 395 + if (rlen < 0) { 396 + DRM_ERROR("failed to read link bit depth. rlen=%zd\n", rlen); 397 + return rlen; 398 + } 399 + 400 + /* Dynamic Range */ 401 + link->dp_link.test_video.test_dyn_range = 402 + (bp & DP_TEST_DYNAMIC_RANGE_CEA); 403 + 404 + /* Color bit depth */ 405 + bp &= DP_TEST_BIT_DEPTH_MASK; 406 + if (!dp_link_is_bit_depth_valid(bp)) { 407 + DRM_ERROR("invalid link bit depth = 0x%x\n", bp); 408 + ret = -EINVAL; 409 + return ret; 410 + } 411 + 412 + link->dp_link.test_video.test_bit_depth = bp; 413 + 414 + /* resolution timing params */ 415 + ret = dp_link_parse_timing_params1(link, DP_TEST_H_TOTAL_HI, 2, 416 + &link->dp_link.test_video.test_h_total); 417 + if (ret) { 418 + DRM_ERROR("failed to parse test_htotal(DP_TEST_H_TOTAL_HI)\n"); 419 + return ret; 420 + } 421 + 422 + ret = dp_link_parse_timing_params1(link, DP_TEST_V_TOTAL_HI, 2, 423 + &link->dp_link.test_video.test_v_total); 424 + if (ret) { 425 + DRM_ERROR("failed to parse test_v_total(DP_TEST_V_TOTAL_HI)\n"); 426 + return ret; 427 + } 428 + 429 + ret = dp_link_parse_timing_params1(link, DP_TEST_H_START_HI, 2, 430 + &link->dp_link.test_video.test_h_start); 431 + if (ret) { 432 + DRM_ERROR("failed to parse test_h_start(DP_TEST_H_START_HI)\n"); 433 + return ret; 434 + } 435 + 436 + ret = dp_link_parse_timing_params1(link, DP_TEST_V_START_HI, 2, 437 + &link->dp_link.test_video.test_v_start); 438 + if (ret) { 439 + DRM_ERROR("failed to parse test_v_start(DP_TEST_V_START_HI)\n"); 440 + return ret; 441 + } 442 + 443 + ret = dp_link_parse_timing_params2(link, DP_TEST_HSYNC_HI, 2, 444 + &link->dp_link.test_video.test_hsync_pol, 445 + &link->dp_link.test_video.test_hsync_width); 446 + if (ret) { 447 + DRM_ERROR("failed to parse (DP_TEST_HSYNC_HI)\n"); 448 + return ret; 449 + } 450 + 451 + ret = dp_link_parse_timing_params2(link, DP_TEST_VSYNC_HI, 2, 452 + &link->dp_link.test_video.test_vsync_pol, 453 + &link->dp_link.test_video.test_vsync_width); 454 + if (ret) { 455 + DRM_ERROR("failed to parse (DP_TEST_VSYNC_HI)\n"); 456 + return ret; 457 + } 458 + 459 + ret = dp_link_parse_timing_params1(link, DP_TEST_H_WIDTH_HI, 2, 460 + &link->dp_link.test_video.test_h_width); 461 + if (ret) { 462 + DRM_ERROR("failed to parse test_h_width(DP_TEST_H_WIDTH_HI)\n"); 463 + return ret; 464 + } 465 + 466 + ret = dp_link_parse_timing_params1(link, DP_TEST_V_HEIGHT_HI, 2, 467 + &link->dp_link.test_video.test_v_height); 468 + if (ret) { 469 + DRM_ERROR("failed to parse test_v_height\n"); 470 + return ret; 471 + } 472 + 473 + ret = dp_link_parse_timing_params3(link, DP_TEST_MISC1, 474 + &link->dp_link.test_video.test_rr_d); 475 + link->dp_link.test_video.test_rr_d &= DP_TEST_REFRESH_DENOMINATOR; 476 + if (ret) { 477 + DRM_ERROR("failed to parse test_rr_d (DP_TEST_MISC1)\n"); 478 + return ret; 479 + } 480 + 481 + ret = dp_link_parse_timing_params3(link, DP_TEST_REFRESH_RATE_NUMERATOR, 482 + &link->dp_link.test_video.test_rr_n); 483 + if (ret) { 484 + DRM_ERROR("failed to parse test_rr_n\n"); 485 + return ret; 486 + } 487 + 488 + DRM_DEBUG_DP("link video pattern = 0x%x\n" 489 + "link dynamic range = 0x%x\n" 490 + "link bit depth = 0x%x\n" 491 + "TEST_H_TOTAL = %d, TEST_V_TOTAL = %d\n" 492 + "TEST_H_START = %d, TEST_V_START = %d\n" 493 + "TEST_HSYNC_POL = %d\n" 494 + "TEST_HSYNC_WIDTH = %d\n" 495 + "TEST_VSYNC_POL = %d\n" 496 + "TEST_VSYNC_WIDTH = %d\n" 497 + "TEST_H_WIDTH = %d\n" 498 + "TEST_V_HEIGHT = %d\n" 499 + "TEST_REFRESH_DENOMINATOR = %d\n" 500 + "TEST_REFRESH_NUMERATOR = %d\n", 501 + link->dp_link.test_video.test_video_pattern, 502 + link->dp_link.test_video.test_dyn_range, 503 + link->dp_link.test_video.test_bit_depth, 504 + link->dp_link.test_video.test_h_total, 505 + link->dp_link.test_video.test_v_total, 506 + link->dp_link.test_video.test_h_start, 507 + link->dp_link.test_video.test_v_start, 508 + link->dp_link.test_video.test_hsync_pol, 509 + link->dp_link.test_video.test_hsync_width, 510 + link->dp_link.test_video.test_vsync_pol, 511 + link->dp_link.test_video.test_vsync_width, 512 + link->dp_link.test_video.test_h_width, 513 + link->dp_link.test_video.test_v_height, 514 + link->dp_link.test_video.test_rr_d, 515 + link->dp_link.test_video.test_rr_n); 516 + 517 + return ret; 518 + } 519 + 520 + /** 521 + * dp_link_parse_link_training_params() - parses link training parameters from 522 + * DPCD 523 + * @link: Display Port Driver data 524 + * 525 + * Returns 0 if it successfully parses the link rate (Byte 0x219) and lane 526 + * count (Byte 0x220), and if these values parse are valid. 527 + */ 528 + static int dp_link_parse_link_training_params(struct dp_link_private *link) 529 + { 530 + u8 bp; 531 + ssize_t rlen; 532 + 533 + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_LINK_RATE, &bp); 534 + if (rlen < 0) { 535 + DRM_ERROR("failed to read link rate. rlen=%zd\n", rlen); 536 + return rlen; 537 + } 538 + 539 + if (!is_link_rate_valid(bp)) { 540 + DRM_ERROR("invalid link rate = 0x%x\n", bp); 541 + return -EINVAL; 542 + } 543 + 544 + link->request.test_link_rate = bp; 545 + DRM_DEBUG_DP("link rate = 0x%x\n", link->request.test_link_rate); 546 + 547 + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_LANE_COUNT, &bp); 548 + if (rlen < 0) { 549 + DRM_ERROR("failed to read lane count. rlen=%zd\n", rlen); 550 + return rlen; 551 + } 552 + bp &= DP_MAX_LANE_COUNT_MASK; 553 + 554 + if (!is_lane_count_valid(bp)) { 555 + DRM_ERROR("invalid lane count = 0x%x\n", bp); 556 + return -EINVAL; 557 + } 558 + 559 + link->request.test_lane_count = bp; 560 + DRM_DEBUG_DP("lane count = 0x%x\n", link->request.test_lane_count); 561 + return 0; 562 + } 563 + 564 + /** 565 + * dp_parse_phy_test_params() - parses the phy link parameters 566 + * @link: Display Port Driver data 567 + * 568 + * Parses the DPCD (Byte 0x248) for the DP PHY link pattern that is being 569 + * requested. 570 + */ 571 + static int dp_link_parse_phy_test_params(struct dp_link_private *link) 572 + { 573 + u8 data; 574 + ssize_t rlen; 575 + 576 + rlen = drm_dp_dpcd_readb(link->aux, DP_PHY_TEST_PATTERN, 577 + &data); 578 + if (rlen < 0) { 579 + DRM_ERROR("failed to read phy link pattern. rlen=%zd\n", rlen); 580 + return rlen; 581 + } 582 + 583 + link->dp_link.phy_params.phy_test_pattern_sel = data; 584 + 585 + DRM_DEBUG_DP("phy_test_pattern_sel = 0x%x\n", data); 586 + 587 + switch (data) { 588 + case DP_LINK_QUAL_PATTERN_DISABLE: 589 + case DP_LINK_QUAL_PATTERN_D10_2: 590 + case DP_LINK_QUAL_PATTERN_ERROR_RATE: 591 + case DP_LINK_QUAL_PATTERN_PRBS7: 592 + case DP_LINK_QUAL_PATTERN_80BIT_CUSTOM: 593 + case DP_LINK_QUAL_PATTERN_HBR2_EYE: 594 + return 0; 595 + default: 596 + return -EINVAL; 597 + } 598 + } 599 + 600 + /** 601 + * dp_link_is_video_audio_test_requested() - checks for audio/video link request 602 + * @link: link requested by the sink 603 + * 604 + * Returns true if the requested link is a permitted audio/video link. 605 + */ 606 + static bool dp_link_is_video_audio_test_requested(u32 link) 607 + { 608 + u8 video_audio_test = (DP_TEST_LINK_VIDEO_PATTERN | 609 + DP_TEST_LINK_AUDIO_PATTERN | 610 + DP_TEST_LINK_AUDIO_DISABLED_VIDEO); 611 + 612 + return ((link & video_audio_test) && 613 + !(link & ~video_audio_test)); 614 + } 615 + 616 + /** 617 + * dp_link_parse_request() - parses link request parameters from sink 618 + * @link: Display Port Driver data 619 + * 620 + * Parses the DPCD to check if an automated link is requested (Byte 0x201), 621 + * and what type of link automation is being requested (Byte 0x218). 622 + */ 623 + static int dp_link_parse_request(struct dp_link_private *link) 624 + { 625 + int ret = 0; 626 + u8 data; 627 + ssize_t rlen; 628 + 629 + /** 630 + * Read the device service IRQ vector (Byte 0x201) to determine 631 + * whether an automated link has been requested by the sink. 632 + */ 633 + rlen = drm_dp_dpcd_readb(link->aux, 634 + DP_DEVICE_SERVICE_IRQ_VECTOR, &data); 635 + if (rlen < 0) { 636 + DRM_ERROR("aux read failed. rlen=%zd\n", rlen); 637 + return rlen; 638 + } 639 + 640 + DRM_DEBUG_DP("device service irq vector = 0x%x\n", data); 641 + 642 + if (!(data & DP_AUTOMATED_TEST_REQUEST)) { 643 + DRM_DEBUG_DP("no test requested\n"); 644 + return 0; 645 + } 646 + 647 + /** 648 + * Read the link request byte (Byte 0x218) to determine what type 649 + * of automated link has been requested by the sink. 650 + */ 651 + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_REQUEST, &data); 652 + if (rlen < 0) { 653 + DRM_ERROR("aux read failed. rlen=%zd\n", rlen); 654 + return rlen; 655 + } 656 + 657 + if (!data || (data == DP_TEST_LINK_FAUX_PATTERN)) { 658 + DRM_DEBUG_DP("link 0x%x not supported\n", data); 659 + goto end; 660 + } 661 + 662 + DRM_DEBUG_DP("Test:(0x%x) requested\n", data); 663 + link->request.test_requested = data; 664 + 665 + if (link->request.test_requested == DP_TEST_LINK_PHY_TEST_PATTERN) { 666 + ret = dp_link_parse_phy_test_params(link); 667 + if (ret) 668 + goto end; 669 + ret = dp_link_parse_link_training_params(link); 670 + if (ret) 671 + goto end; 672 + } 673 + 674 + if (link->request.test_requested == DP_TEST_LINK_TRAINING) { 675 + ret = dp_link_parse_link_training_params(link); 676 + if (ret) 677 + goto end; 678 + } 679 + 680 + if (dp_link_is_video_audio_test_requested( 681 + link->request.test_requested)) { 682 + ret = dp_link_parse_video_pattern_params(link); 683 + if (ret) 684 + goto end; 685 + 686 + ret = dp_link_parse_audio_pattern_params(link); 687 + } 688 + end: 689 + /* 690 + * Send a DP_TEST_ACK if all link parameters are valid, otherwise send 691 + * a DP_TEST_NAK. 692 + */ 693 + if (ret) { 694 + link->dp_link.test_response = DP_TEST_NAK; 695 + } else { 696 + if (link->request.test_requested != DP_TEST_LINK_EDID_READ) 697 + link->dp_link.test_response = DP_TEST_ACK; 698 + else 699 + link->dp_link.test_response = 700 + DP_TEST_EDID_CHECKSUM_WRITE; 701 + } 702 + 703 + return ret; 704 + } 705 + 706 + /** 707 + * dp_link_parse_sink_count() - parses the sink count 708 + * @dp_link: pointer to link module data 709 + * 710 + * Parses the DPCD to check if there is an update to the sink count 711 + * (Byte 0x200), and whether all the sink devices connected have Content 712 + * Protection enabled. 713 + */ 714 + static int dp_link_parse_sink_count(struct dp_link *dp_link) 715 + { 716 + ssize_t rlen; 717 + bool cp_ready; 718 + 719 + struct dp_link_private *link = container_of(dp_link, 720 + struct dp_link_private, dp_link); 721 + 722 + rlen = drm_dp_dpcd_readb(link->aux, DP_SINK_COUNT, 723 + &link->dp_link.sink_count); 724 + if (rlen < 0) { 725 + DRM_ERROR("sink count read failed. rlen=%zd\n", rlen); 726 + return rlen; 727 + } 728 + 729 + cp_ready = link->dp_link.sink_count & DP_SINK_CP_READY; 730 + 731 + link->dp_link.sink_count = 732 + DP_GET_SINK_COUNT(link->dp_link.sink_count); 733 + 734 + DRM_DEBUG_DP("sink_count = 0x%x, cp_ready = 0x%x\n", 735 + link->dp_link.sink_count, cp_ready); 736 + return 0; 737 + } 738 + 739 + static void dp_link_parse_sink_status_field(struct dp_link_private *link) 740 + { 741 + int len = 0; 742 + 743 + link->prev_sink_count = link->dp_link.sink_count; 744 + dp_link_parse_sink_count(&link->dp_link); 745 + 746 + len = drm_dp_dpcd_read_link_status(link->aux, 747 + link->link_status); 748 + if (len < DP_LINK_STATUS_SIZE) 749 + DRM_ERROR("DP link status read failed\n"); 750 + dp_link_parse_request(link); 751 + } 752 + 753 + /** 754 + * dp_link_process_link_training_request() - processes new training requests 755 + * @link: Display Port link data 756 + * 757 + * This function will handle new link training requests that are initiated by 758 + * the sink. In particular, it will update the requested lane count and link 759 + * rate, and then trigger the link retraining procedure. 760 + * 761 + * The function will return 0 if a link training request has been processed, 762 + * otherwise it will return -EINVAL. 763 + */ 764 + static int dp_link_process_link_training_request(struct dp_link_private *link) 765 + { 766 + if (link->request.test_requested != DP_TEST_LINK_TRAINING) 767 + return -EINVAL; 768 + 769 + DRM_DEBUG_DP("Test:0x%x link rate = 0x%x, lane count = 0x%x\n", 770 + DP_TEST_LINK_TRAINING, 771 + link->request.test_link_rate, 772 + link->request.test_lane_count); 773 + 774 + link->dp_link.link_params.num_lanes = link->request.test_lane_count; 775 + link->dp_link.link_params.rate = link->request.test_link_rate; 776 + 777 + return 0; 778 + } 779 + 780 + bool dp_link_send_test_response(struct dp_link *dp_link) 781 + { 782 + struct dp_link_private *link = NULL; 783 + int ret = 0; 784 + 785 + if (!dp_link) { 786 + DRM_ERROR("invalid input\n"); 787 + return false; 788 + } 789 + 790 + link = container_of(dp_link, struct dp_link_private, dp_link); 791 + 792 + mutex_lock(&link->test_response_mutex); 793 + ret = drm_dp_dpcd_writeb(link->aux, DP_TEST_RESPONSE, 794 + dp_link->test_response); 795 + mutex_unlock(&link->test_response_mutex); 796 + 797 + return ret == 1; 798 + } 799 + 800 + int dp_link_psm_config(struct dp_link *dp_link, 801 + struct dp_link_info *link_info, bool enable) 802 + { 803 + struct dp_link_private *link = NULL; 804 + int ret = 0; 805 + 806 + if (!dp_link) { 807 + DRM_ERROR("invalid params\n"); 808 + return -EINVAL; 809 + } 810 + 811 + link = container_of(dp_link, struct dp_link_private, dp_link); 812 + 813 + mutex_lock(&link->psm_mutex); 814 + if (enable) 815 + ret = dp_aux_link_power_down(link->aux, link_info); 816 + else 817 + ret = dp_aux_link_power_up(link->aux, link_info); 818 + 819 + if (ret) 820 + DRM_ERROR("Failed to %s low power mode\n", enable ? 821 + "enter" : "exit"); 822 + else 823 + dp_link->psm_enabled = enable; 824 + 825 + mutex_unlock(&link->psm_mutex); 826 + return ret; 827 + } 828 + 829 + bool dp_link_send_edid_checksum(struct dp_link *dp_link, u8 checksum) 830 + { 831 + struct dp_link_private *link = NULL; 832 + int ret = 0; 833 + 834 + if (!dp_link) { 835 + DRM_ERROR("invalid input\n"); 836 + return false; 837 + } 838 + 839 + link = container_of(dp_link, struct dp_link_private, dp_link); 840 + 841 + ret = drm_dp_dpcd_writeb(link->aux, DP_TEST_EDID_CHECKSUM, 842 + checksum); 843 + return ret == 1; 844 + } 845 + 846 + static int dp_link_parse_vx_px(struct dp_link_private *link) 847 + { 848 + int ret = 0; 849 + 850 + DRM_DEBUG_DP("vx: 0=%d, 1=%d, 2=%d, 3=%d\n", 851 + drm_dp_get_adjust_request_voltage(link->link_status, 0), 852 + drm_dp_get_adjust_request_voltage(link->link_status, 1), 853 + drm_dp_get_adjust_request_voltage(link->link_status, 2), 854 + drm_dp_get_adjust_request_voltage(link->link_status, 3)); 855 + 856 + DRM_DEBUG_DP("px: 0=%d, 1=%d, 2=%d, 3=%d\n", 857 + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 0), 858 + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 1), 859 + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 2), 860 + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 3)); 861 + 862 + /** 863 + * Update the voltage and pre-emphasis levels as per DPCD request 864 + * vector. 865 + */ 866 + DRM_DEBUG_DP("Current: v_level = 0x%x, p_level = 0x%x\n", 867 + link->dp_link.phy_params.v_level, 868 + link->dp_link.phy_params.p_level); 869 + link->dp_link.phy_params.v_level = 870 + drm_dp_get_adjust_request_voltage(link->link_status, 0); 871 + link->dp_link.phy_params.p_level = 872 + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 0); 873 + DRM_DEBUG_DP("Requested: v_level = 0x%x, p_level = 0x%x\n", 874 + link->dp_link.phy_params.v_level, 875 + link->dp_link.phy_params.p_level); 876 + 877 + return ret; 878 + } 879 + 880 + /** 881 + * dp_link_process_phy_test_pattern_request() - process new phy link requests 882 + * @link: Display Port Driver data 883 + * 884 + * This function will handle new phy link pattern requests that are initiated 885 + * by the sink. The function will return 0 if a phy link pattern has been 886 + * processed, otherwise it will return -EINVAL. 887 + */ 888 + static int dp_link_process_phy_test_pattern_request( 889 + struct dp_link_private *link) 890 + { 891 + int ret = 0; 892 + 893 + if (!(link->request.test_requested & DP_TEST_LINK_PHY_TEST_PATTERN)) { 894 + DRM_DEBUG_DP("no phy test\n"); 895 + return -EINVAL; 896 + } 897 + 898 + if (!is_link_rate_valid(link->request.test_link_rate) || 899 + !is_lane_count_valid(link->request.test_lane_count)) { 900 + DRM_ERROR("Invalid: link rate = 0x%x,lane count = 0x%x\n", 901 + link->request.test_link_rate, 902 + link->request.test_lane_count); 903 + return -EINVAL; 904 + } 905 + 906 + DRM_DEBUG_DP("Current: rate = 0x%x, lane count = 0x%x\n", 907 + link->dp_link.link_params.rate, 908 + link->dp_link.link_params.num_lanes); 909 + 910 + DRM_DEBUG_DP("Requested: rate = 0x%x, lane count = 0x%x\n", 911 + link->request.test_link_rate, 912 + link->request.test_lane_count); 913 + 914 + link->dp_link.link_params.num_lanes = link->request.test_lane_count; 915 + link->dp_link.link_params.rate = link->request.test_link_rate; 916 + 917 + ret = dp_link_parse_vx_px(link); 918 + 919 + if (ret) 920 + DRM_ERROR("parse_vx_px failed. ret=%d\n", ret); 921 + 922 + return ret; 923 + } 924 + 925 + static u8 get_link_status(const u8 link_status[DP_LINK_STATUS_SIZE], int r) 926 + { 927 + return link_status[r - DP_LANE0_1_STATUS]; 928 + } 929 + 930 + /** 931 + * dp_link_process_link_status_update() - processes link status updates 932 + * @link: Display Port link module data 933 + * 934 + * This function will check for changes in the link status, e.g. clock 935 + * recovery done on all lanes, and trigger link training if there is a 936 + * failure/error on the link. 937 + * 938 + * The function will return 0 if the a link status update has been processed, 939 + * otherwise it will return -EINVAL. 940 + */ 941 + static int dp_link_process_link_status_update(struct dp_link_private *link) 942 + { 943 + if (!(get_link_status(link->link_status, 944 + DP_LANE_ALIGN_STATUS_UPDATED) & 945 + DP_LINK_STATUS_UPDATED) || 946 + (drm_dp_clock_recovery_ok(link->link_status, 947 + link->dp_link.link_params.num_lanes) && 948 + drm_dp_channel_eq_ok(link->link_status, 949 + link->dp_link.link_params.num_lanes))) 950 + return -EINVAL; 951 + 952 + DRM_DEBUG_DP("channel_eq_done = %d, clock_recovery_done = %d\n", 953 + drm_dp_clock_recovery_ok(link->link_status, 954 + link->dp_link.link_params.num_lanes), 955 + drm_dp_clock_recovery_ok(link->link_status, 956 + link->dp_link.link_params.num_lanes)); 957 + 958 + return 0; 959 + } 960 + 961 + /** 962 + * dp_link_process_downstream_port_status_change() - process port status changes 963 + * @link: Display Port Driver data 964 + * 965 + * This function will handle downstream port updates that are initiated by 966 + * the sink. If the downstream port status has changed, the EDID is read via 967 + * AUX. 968 + * 969 + * The function will return 0 if a downstream port update has been 970 + * processed, otherwise it will return -EINVAL. 971 + */ 972 + static int dp_link_process_ds_port_status_change(struct dp_link_private *link) 973 + { 974 + if (get_link_status(link->link_status, DP_LANE_ALIGN_STATUS_UPDATED) & 975 + DP_DOWNSTREAM_PORT_STATUS_CHANGED) 976 + goto reset; 977 + 978 + if (link->prev_sink_count == link->dp_link.sink_count) 979 + return -EINVAL; 980 + 981 + reset: 982 + /* reset prev_sink_count */ 983 + link->prev_sink_count = link->dp_link.sink_count; 984 + 985 + return 0; 986 + } 987 + 988 + static bool dp_link_is_video_pattern_requested(struct dp_link_private *link) 989 + { 990 + return (link->request.test_requested & DP_TEST_LINK_VIDEO_PATTERN) 991 + && !(link->request.test_requested & 992 + DP_TEST_LINK_AUDIO_DISABLED_VIDEO); 993 + } 994 + 995 + static bool dp_link_is_audio_pattern_requested(struct dp_link_private *link) 996 + { 997 + return (link->request.test_requested & DP_TEST_LINK_AUDIO_PATTERN); 998 + } 999 + 1000 + static void dp_link_reset_data(struct dp_link_private *link) 1001 + { 1002 + link->request = (const struct dp_link_request){ 0 }; 1003 + link->dp_link.test_video = (const struct dp_link_test_video){ 0 }; 1004 + link->dp_link.test_video.test_bit_depth = DP_TEST_BIT_DEPTH_UNKNOWN; 1005 + link->dp_link.test_audio = (const struct dp_link_test_audio){ 0 }; 1006 + link->dp_link.phy_params.phy_test_pattern_sel = 0; 1007 + link->dp_link.sink_request = 0; 1008 + link->dp_link.test_response = 0; 1009 + } 1010 + 1011 + /** 1012 + * dp_link_process_request() - handle HPD IRQ transition to HIGH 1013 + * @dp_link: pointer to link module data 1014 + * 1015 + * This function will handle the HPD IRQ state transitions from LOW to HIGH 1016 + * (including cases when there are back to back HPD IRQ HIGH) indicating 1017 + * the start of a new link training request or sink status update. 1018 + */ 1019 + int dp_link_process_request(struct dp_link *dp_link) 1020 + { 1021 + int ret = 0; 1022 + struct dp_link_private *link; 1023 + 1024 + if (!dp_link) { 1025 + DRM_ERROR("invalid input\n"); 1026 + return -EINVAL; 1027 + } 1028 + 1029 + link = container_of(dp_link, struct dp_link_private, dp_link); 1030 + 1031 + mutex_lock(&link->test_response_mutex); 1032 + dp_link_reset_data(link); 1033 + 1034 + dp_link_parse_sink_status_field(link); 1035 + mutex_unlock(&link->test_response_mutex); 1036 + 1037 + if (link->request.test_requested == DP_TEST_LINK_EDID_READ) { 1038 + dp_link->sink_request |= DP_TEST_LINK_EDID_READ; 1039 + return ret; 1040 + } 1041 + 1042 + ret = dp_link_process_ds_port_status_change(link); 1043 + if (!ret) { 1044 + dp_link->sink_request |= DS_PORT_STATUS_CHANGED; 1045 + return ret; 1046 + } 1047 + 1048 + ret = dp_link_process_link_training_request(link); 1049 + if (!ret) { 1050 + dp_link->sink_request |= DP_TEST_LINK_TRAINING; 1051 + return ret; 1052 + } 1053 + 1054 + ret = dp_link_process_phy_test_pattern_request(link); 1055 + if (!ret) { 1056 + dp_link->sink_request |= DP_TEST_LINK_PHY_TEST_PATTERN; 1057 + return ret; 1058 + } 1059 + 1060 + ret = dp_link_process_link_status_update(link); 1061 + if (!ret) { 1062 + dp_link->sink_request |= DP_LINK_STATUS_UPDATED; 1063 + return ret; 1064 + } 1065 + 1066 + if (dp_link_is_video_pattern_requested(link)) { 1067 + dp_link->sink_request |= DP_TEST_LINK_VIDEO_PATTERN; 1068 + return -EINVAL; 1069 + } 1070 + 1071 + if (dp_link_is_audio_pattern_requested(link)) { 1072 + dp_link->sink_request |= DP_TEST_LINK_AUDIO_PATTERN; 1073 + return -EINVAL; 1074 + } 1075 + 1076 + return ret; 1077 + } 1078 + 1079 + int dp_link_get_colorimetry_config(struct dp_link *dp_link) 1080 + { 1081 + u32 cc; 1082 + struct dp_link_private *link; 1083 + 1084 + if (!dp_link) { 1085 + DRM_ERROR("invalid input\n"); 1086 + return -EINVAL; 1087 + } 1088 + 1089 + link = container_of(dp_link, struct dp_link_private, dp_link); 1090 + 1091 + /* 1092 + * Unless a video pattern CTS test is ongoing, use RGB_VESA 1093 + * Only RGB_VESA and RGB_CEA supported for now 1094 + */ 1095 + if (dp_link_is_video_pattern_requested(link)) 1096 + cc = link->dp_link.test_video.test_dyn_range; 1097 + else 1098 + cc = DP_TEST_DYNAMIC_RANGE_VESA; 1099 + 1100 + return cc; 1101 + } 1102 + 1103 + int dp_link_adjust_levels(struct dp_link *dp_link, u8 *link_status) 1104 + { 1105 + int i; 1106 + int v_max = 0, p_max = 0; 1107 + 1108 + if (!dp_link) { 1109 + DRM_ERROR("invalid input\n"); 1110 + return -EINVAL; 1111 + } 1112 + 1113 + /* use the max level across lanes */ 1114 + for (i = 0; i < dp_link->link_params.num_lanes; i++) { 1115 + u8 data_v = drm_dp_get_adjust_request_voltage(link_status, i); 1116 + u8 data_p = drm_dp_get_adjust_request_pre_emphasis(link_status, 1117 + i); 1118 + DRM_DEBUG_DP("lane=%d req_vol_swing=%d req_pre_emphasis=%d\n", 1119 + i, data_v, data_p); 1120 + if (v_max < data_v) 1121 + v_max = data_v; 1122 + if (p_max < data_p) 1123 + p_max = data_p; 1124 + } 1125 + 1126 + dp_link->phy_params.v_level = v_max >> DP_TRAIN_VOLTAGE_SWING_SHIFT; 1127 + dp_link->phy_params.p_level = p_max >> DP_TRAIN_PRE_EMPHASIS_SHIFT; 1128 + 1129 + /** 1130 + * Adjust the voltage swing and pre-emphasis level combination to within 1131 + * the allowable range. 1132 + */ 1133 + if (dp_link->phy_params.v_level > DP_TRAIN_VOLTAGE_SWING_MAX) { 1134 + DRM_DEBUG_DP("Requested vSwingLevel=%d, change to %d\n", 1135 + dp_link->phy_params.v_level, 1136 + DP_TRAIN_VOLTAGE_SWING_MAX); 1137 + dp_link->phy_params.v_level = DP_TRAIN_VOLTAGE_SWING_MAX; 1138 + } 1139 + 1140 + if (dp_link->phy_params.p_level > DP_TRAIN_PRE_EMPHASIS_MAX) { 1141 + DRM_DEBUG_DP("Requested preEmphasisLevel=%d, change to %d\n", 1142 + dp_link->phy_params.p_level, 1143 + DP_TRAIN_PRE_EMPHASIS_MAX); 1144 + dp_link->phy_params.p_level = DP_TRAIN_PRE_EMPHASIS_MAX; 1145 + } 1146 + 1147 + if ((dp_link->phy_params.p_level > DP_TRAIN_PRE_EMPHASIS_LVL_1) 1148 + && (dp_link->phy_params.v_level == 1149 + DP_TRAIN_VOLTAGE_SWING_LVL_2)) { 1150 + DRM_DEBUG_DP("Requested preEmphasisLevel=%d, change to %d\n", 1151 + dp_link->phy_params.p_level, 1152 + DP_TRAIN_PRE_EMPHASIS_LVL_1); 1153 + dp_link->phy_params.p_level = DP_TRAIN_PRE_EMPHASIS_LVL_1; 1154 + } 1155 + 1156 + DRM_DEBUG_DP("adjusted: v_level=%d, p_level=%d\n", 1157 + dp_link->phy_params.v_level, dp_link->phy_params.p_level); 1158 + 1159 + return 0; 1160 + } 1161 + 1162 + u32 dp_link_get_test_bits_depth(struct dp_link *dp_link, u32 bpp) 1163 + { 1164 + u32 tbd; 1165 + 1166 + /* 1167 + * Few simplistic rules and assumptions made here: 1168 + * 1. Test bit depth is bit depth per color component 1169 + * 2. Assume 3 color components 1170 + */ 1171 + switch (bpp) { 1172 + case 18: 1173 + tbd = DP_TEST_BIT_DEPTH_6; 1174 + break; 1175 + case 24: 1176 + tbd = DP_TEST_BIT_DEPTH_8; 1177 + break; 1178 + case 30: 1179 + tbd = DP_TEST_BIT_DEPTH_10; 1180 + break; 1181 + default: 1182 + tbd = DP_TEST_BIT_DEPTH_UNKNOWN; 1183 + break; 1184 + } 1185 + 1186 + if (tbd != DP_TEST_BIT_DEPTH_UNKNOWN) 1187 + tbd = (tbd >> DP_TEST_BIT_DEPTH_SHIFT); 1188 + 1189 + return tbd; 1190 + } 1191 + 1192 + struct dp_link *dp_link_get(struct device *dev, struct drm_dp_aux *aux) 1193 + { 1194 + struct dp_link_private *link; 1195 + struct dp_link *dp_link; 1196 + 1197 + if (!dev || !aux) { 1198 + DRM_ERROR("invalid input\n"); 1199 + return ERR_PTR(-EINVAL); 1200 + } 1201 + 1202 + link = devm_kzalloc(dev, sizeof(*link), GFP_KERNEL); 1203 + if (!link) 1204 + return ERR_PTR(-ENOMEM); 1205 + 1206 + link->dev = dev; 1207 + link->aux = aux; 1208 + 1209 + mutex_init(&link->test_response_mutex); 1210 + mutex_init(&link->psm_mutex); 1211 + dp_link = &link->dp_link; 1212 + 1213 + return dp_link; 1214 + }
+132
drivers/gpu/drm/msm/dp/dp_link.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_LINK_H_ 7 + #define _DP_LINK_H_ 8 + 9 + #include "dp_aux.h" 10 + 11 + #define DS_PORT_STATUS_CHANGED 0x200 12 + #define DP_TEST_BIT_DEPTH_UNKNOWN 0xFFFFFFFF 13 + #define DP_LINK_CAP_ENHANCED_FRAMING (1 << 0) 14 + 15 + struct dp_link_info { 16 + unsigned char revision; 17 + unsigned int rate; 18 + unsigned int num_lanes; 19 + unsigned long capabilities; 20 + }; 21 + 22 + enum dp_link_voltage_level { 23 + DP_TRAIN_VOLTAGE_SWING_LVL_0 = 0, 24 + DP_TRAIN_VOLTAGE_SWING_LVL_1 = 1, 25 + DP_TRAIN_VOLTAGE_SWING_LVL_2 = 2, 26 + DP_TRAIN_VOLTAGE_SWING_MAX = DP_TRAIN_VOLTAGE_SWING_LVL_2, 27 + }; 28 + 29 + enum dp_link_preemaphasis_level { 30 + DP_TRAIN_PRE_EMPHASIS_LVL_0 = 0, 31 + DP_TRAIN_PRE_EMPHASIS_LVL_1 = 1, 32 + DP_TRAIN_PRE_EMPHASIS_LVL_2 = 2, 33 + DP_TRAIN_PRE_EMPHASIS_MAX = DP_TRAIN_PRE_EMPHASIS_LVL_2, 34 + }; 35 + 36 + struct dp_link_test_video { 37 + u32 test_video_pattern; 38 + u32 test_bit_depth; 39 + u32 test_dyn_range; 40 + u32 test_h_total; 41 + u32 test_v_total; 42 + u32 test_h_start; 43 + u32 test_v_start; 44 + u32 test_hsync_pol; 45 + u32 test_hsync_width; 46 + u32 test_vsync_pol; 47 + u32 test_vsync_width; 48 + u32 test_h_width; 49 + u32 test_v_height; 50 + u32 test_rr_d; 51 + u32 test_rr_n; 52 + }; 53 + 54 + struct dp_link_test_audio { 55 + u32 test_audio_sampling_rate; 56 + u32 test_audio_channel_count; 57 + u32 test_audio_pattern_type; 58 + u32 test_audio_period_ch_1; 59 + u32 test_audio_period_ch_2; 60 + u32 test_audio_period_ch_3; 61 + u32 test_audio_period_ch_4; 62 + u32 test_audio_period_ch_5; 63 + u32 test_audio_period_ch_6; 64 + u32 test_audio_period_ch_7; 65 + u32 test_audio_period_ch_8; 66 + }; 67 + 68 + struct dp_link_phy_params { 69 + u32 phy_test_pattern_sel; 70 + u8 v_level; 71 + u8 p_level; 72 + }; 73 + 74 + struct dp_link { 75 + u32 sink_request; 76 + u32 test_response; 77 + bool psm_enabled; 78 + 79 + u8 sink_count; 80 + struct dp_link_test_video test_video; 81 + struct dp_link_test_audio test_audio; 82 + struct dp_link_phy_params phy_params; 83 + struct dp_link_info link_params; 84 + }; 85 + 86 + /** 87 + * mdss_dp_test_bit_depth_to_bpp() - convert test bit depth to bpp 88 + * @tbd: test bit depth 89 + * 90 + * Returns the bits per pixel (bpp) to be used corresponding to the 91 + * git bit depth value. This function assumes that bit depth has 92 + * already been validated. 93 + */ 94 + static inline u32 dp_link_bit_depth_to_bpp(u32 tbd) 95 + { 96 + /* 97 + * Few simplistic rules and assumptions made here: 98 + * 1. Bit depth is per color component 99 + * 2. If bit depth is unknown return 0 100 + * 3. Assume 3 color components 101 + */ 102 + switch (tbd) { 103 + case DP_TEST_BIT_DEPTH_6: 104 + return 18; 105 + case DP_TEST_BIT_DEPTH_8: 106 + return 24; 107 + case DP_TEST_BIT_DEPTH_10: 108 + return 30; 109 + case DP_TEST_BIT_DEPTH_UNKNOWN: 110 + default: 111 + return 0; 112 + } 113 + } 114 + 115 + u32 dp_link_get_test_bits_depth(struct dp_link *dp_link, u32 bpp); 116 + int dp_link_process_request(struct dp_link *dp_link); 117 + int dp_link_get_colorimetry_config(struct dp_link *dp_link); 118 + int dp_link_adjust_levels(struct dp_link *dp_link, u8 *link_status); 119 + bool dp_link_send_test_response(struct dp_link *dp_link); 120 + int dp_link_psm_config(struct dp_link *dp_link, 121 + struct dp_link_info *link_info, bool enable); 122 + bool dp_link_send_edid_checksum(struct dp_link *dp_link, u8 checksum); 123 + 124 + /** 125 + * dp_link_get() - get the functionalities of dp test module 126 + * 127 + * 128 + * return: a pointer to dp_link struct 129 + */ 130 + struct dp_link *dp_link_get(struct device *dev, struct drm_dp_aux *aux); 131 + 132 + #endif /* _DP_LINK_H_ */
+486
drivers/gpu/drm/msm/dp/dp_panel.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #include "dp_panel.h" 7 + 8 + #include <drm/drm_connector.h> 9 + #include <drm/drm_edid.h> 10 + 11 + #define DP_MAX_DS_PORT_COUNT 1 12 + 13 + struct dp_panel_private { 14 + struct device *dev; 15 + struct dp_panel dp_panel; 16 + struct drm_dp_aux *aux; 17 + struct dp_link *link; 18 + struct dp_catalog *catalog; 19 + bool panel_on; 20 + bool aux_cfg_update_done; 21 + }; 22 + 23 + static int dp_panel_read_dpcd(struct dp_panel *dp_panel) 24 + { 25 + int rc = 0; 26 + size_t rlen; 27 + struct dp_panel_private *panel; 28 + struct dp_link_info *link_info; 29 + u8 *dpcd, major = 0, minor = 0, temp; 30 + u32 dfp_count = 0, offset = DP_DPCD_REV; 31 + 32 + dpcd = dp_panel->dpcd; 33 + 34 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 35 + link_info = &dp_panel->link_info; 36 + 37 + rlen = drm_dp_dpcd_read(panel->aux, 38 + DP_TRAINING_AUX_RD_INTERVAL, &temp, 1); 39 + if (rlen < 0) { 40 + DRM_ERROR("err reading DP_TRAINING_AUX_RD_INTERVAL,rlen=%zd\n", 41 + rlen); 42 + rc = -EINVAL; 43 + goto end; 44 + } 45 + 46 + /* check for EXTENDED_RECEIVER_CAPABILITY_FIELD_PRESENT */ 47 + if (temp & BIT(7)) { 48 + DRM_DEBUG_DP("using EXTENDED_RECEIVER_CAPABILITY_FIELD\n"); 49 + offset = DPRX_EXTENDED_DPCD_FIELD; 50 + } 51 + 52 + rlen = drm_dp_dpcd_read(panel->aux, offset, 53 + dpcd, (DP_RECEIVER_CAP_SIZE + 1)); 54 + if (rlen < (DP_RECEIVER_CAP_SIZE + 1)) { 55 + DRM_ERROR("dpcd read failed, rlen=%zd\n", rlen); 56 + if (rlen == -ETIMEDOUT) 57 + rc = rlen; 58 + else 59 + rc = -EINVAL; 60 + 61 + goto end; 62 + } 63 + 64 + print_hex_dump(KERN_DEBUG, "[drm-dp] SINK DPCD: ", 65 + DUMP_PREFIX_NONE, 8, 1, dp_panel->dpcd, rlen, false); 66 + 67 + link_info->revision = dpcd[DP_DPCD_REV]; 68 + major = (link_info->revision >> 4) & 0x0f; 69 + minor = link_info->revision & 0x0f; 70 + 71 + link_info->rate = drm_dp_bw_code_to_link_rate(dpcd[DP_MAX_LINK_RATE]); 72 + link_info->num_lanes = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 73 + 74 + if (link_info->num_lanes > dp_panel->max_dp_lanes) 75 + link_info->num_lanes = dp_panel->max_dp_lanes; 76 + 77 + /* Limit support upto HBR2 until HBR3 support is added */ 78 + if (link_info->rate >= (drm_dp_bw_code_to_link_rate(DP_LINK_BW_5_4))) 79 + link_info->rate = drm_dp_bw_code_to_link_rate(DP_LINK_BW_5_4); 80 + 81 + DRM_DEBUG_DP("version: %d.%d\n", major, minor); 82 + DRM_DEBUG_DP("link_rate=%d\n", link_info->rate); 83 + DRM_DEBUG_DP("lane_count=%d\n", link_info->num_lanes); 84 + 85 + if (drm_dp_enhanced_frame_cap(dpcd)) 86 + link_info->capabilities |= DP_LINK_CAP_ENHANCED_FRAMING; 87 + 88 + dfp_count = dpcd[DP_DOWN_STREAM_PORT_COUNT] & 89 + DP_DOWN_STREAM_PORT_COUNT; 90 + 91 + if (dfp_count > DP_MAX_DS_PORT_COUNT) { 92 + DRM_ERROR("DS port count %d greater that max (%d) supported\n", 93 + dfp_count, DP_MAX_DS_PORT_COUNT); 94 + return -EINVAL; 95 + } 96 + end: 97 + return rc; 98 + } 99 + 100 + static u32 dp_panel_get_supported_bpp(struct dp_panel *dp_panel, 101 + u32 mode_edid_bpp, u32 mode_pclk_khz) 102 + { 103 + struct dp_link_info *link_info; 104 + const u32 max_supported_bpp = 30, min_supported_bpp = 18; 105 + u32 bpp = 0, data_rate_khz = 0; 106 + 107 + bpp = min_t(u32, mode_edid_bpp, max_supported_bpp); 108 + 109 + link_info = &dp_panel->link_info; 110 + data_rate_khz = link_info->num_lanes * link_info->rate * 8; 111 + 112 + while (bpp > min_supported_bpp) { 113 + if (mode_pclk_khz * bpp <= data_rate_khz) 114 + break; 115 + bpp -= 6; 116 + } 117 + 118 + return bpp; 119 + } 120 + 121 + static void dp_panel_set_test_mode(struct dp_panel_private *panel, 122 + struct dp_display_mode *mode) 123 + { 124 + struct drm_display_mode *drm_mode = NULL; 125 + struct dp_link_test_video *test_info = NULL; 126 + 127 + drm_mode = &mode->drm_mode; 128 + test_info = &panel->link->test_video; 129 + 130 + drm_mode->hdisplay = test_info->test_h_width; 131 + drm_mode->hsync_start = drm_mode->hdisplay + test_info->test_h_total - 132 + (test_info->test_h_start + test_info->test_h_width); 133 + drm_mode->hsync_end = drm_mode->hsync_start + 134 + test_info->test_hsync_width; 135 + drm_mode->htotal = drm_mode->hsync_end + test_info->test_h_start - 136 + test_info->test_hsync_width; 137 + 138 + drm_mode->vdisplay = test_info->test_v_height; 139 + drm_mode->vsync_start = drm_mode->vdisplay + test_info->test_v_total - 140 + (test_info->test_v_start + test_info->test_v_height); 141 + drm_mode->vsync_end = drm_mode->vsync_start + 142 + test_info->test_vsync_width; 143 + drm_mode->vtotal = drm_mode->vsync_end + test_info->test_v_start - 144 + test_info->test_vsync_width; 145 + 146 + drm_mode->clock = test_info->test_h_total * 147 + test_info->test_v_total * test_info->test_rr_n; 148 + 149 + drm_mode->type = 0x48; 150 + drm_mode_set_name(drm_mode); 151 + 152 + if (test_info->test_rr_d == 0) 153 + drm_mode->clock /= 1000; 154 + else 155 + drm_mode->clock /= 1001; 156 + 157 + if (test_info->test_h_width == 640) 158 + drm_mode->clock = 25170; 159 + } 160 + 161 + static int dp_panel_update_modes(struct drm_connector *connector, 162 + struct edid *edid) 163 + { 164 + int rc = 0; 165 + 166 + if (edid) { 167 + rc = drm_connector_update_edid_property(connector, edid); 168 + if (rc) { 169 + DRM_ERROR("failed to update edid property %d\n", rc); 170 + return rc; 171 + } 172 + rc = drm_add_edid_modes(connector, edid); 173 + DRM_DEBUG_DP("%s -", __func__); 174 + return rc; 175 + } 176 + 177 + rc = drm_connector_update_edid_property(connector, NULL); 178 + if (rc) 179 + DRM_ERROR("failed to update edid property %d\n", rc); 180 + 181 + return rc; 182 + } 183 + 184 + int dp_panel_read_sink_caps(struct dp_panel *dp_panel, 185 + struct drm_connector *connector) 186 + { 187 + int rc = 0, bw_code; 188 + struct dp_panel_private *panel; 189 + 190 + if (!dp_panel || !connector) { 191 + DRM_ERROR("invalid input\n"); 192 + return -EINVAL; 193 + } 194 + 195 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 196 + 197 + rc = dp_panel_read_dpcd(dp_panel); 198 + bw_code = drm_dp_link_rate_to_bw_code(dp_panel->link_info.rate); 199 + if (rc || !is_link_rate_valid(bw_code) || 200 + !is_lane_count_valid(dp_panel->link_info.num_lanes) || 201 + (bw_code > dp_panel->max_bw_code)) { 202 + DRM_ERROR("read dpcd failed %d\n", rc); 203 + return rc; 204 + } 205 + rc = drm_dp_read_desc(panel->aux, &dp_panel->desc, 206 + drm_dp_is_branch(dp_panel->dpcd)); 207 + if (rc) { 208 + DRM_ERROR("read sink/branch descriptor failed %d\n", rc); 209 + return rc; 210 + } 211 + 212 + kfree(dp_panel->edid); 213 + dp_panel->edid = NULL; 214 + 215 + dp_panel->edid = drm_get_edid(connector, 216 + &panel->aux->ddc); 217 + if (!dp_panel->edid) { 218 + DRM_ERROR("panel edid read failed\n"); 219 + return -EINVAL; 220 + } 221 + 222 + if (panel->aux_cfg_update_done) { 223 + DRM_DEBUG_DP("read DPCD with updated AUX config\n"); 224 + rc = dp_panel_read_dpcd(dp_panel); 225 + bw_code = drm_dp_link_rate_to_bw_code(dp_panel->link_info.rate); 226 + if (rc || !is_link_rate_valid(bw_code) || 227 + !is_lane_count_valid(dp_panel->link_info.num_lanes) 228 + || (bw_code > dp_panel->max_bw_code)) { 229 + DRM_ERROR("read dpcd failed %d\n", rc); 230 + return rc; 231 + } 232 + panel->aux_cfg_update_done = false; 233 + } 234 + 235 + return 0; 236 + } 237 + 238 + u32 dp_panel_get_mode_bpp(struct dp_panel *dp_panel, 239 + u32 mode_edid_bpp, u32 mode_pclk_khz) 240 + { 241 + struct dp_panel_private *panel; 242 + u32 bpp = mode_edid_bpp; 243 + 244 + if (!dp_panel || !mode_edid_bpp || !mode_pclk_khz) { 245 + DRM_ERROR("invalid input\n"); 246 + return 0; 247 + } 248 + 249 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 250 + 251 + if (dp_panel->video_test) 252 + bpp = dp_link_bit_depth_to_bpp( 253 + panel->link->test_video.test_bit_depth); 254 + else 255 + bpp = dp_panel_get_supported_bpp(dp_panel, mode_edid_bpp, 256 + mode_pclk_khz); 257 + 258 + return bpp; 259 + } 260 + 261 + int dp_panel_get_modes(struct dp_panel *dp_panel, 262 + struct drm_connector *connector, struct dp_display_mode *mode) 263 + { 264 + struct dp_panel_private *panel; 265 + 266 + if (!dp_panel) { 267 + DRM_ERROR("invalid input\n"); 268 + return -EINVAL; 269 + } 270 + 271 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 272 + 273 + if (dp_panel->video_test) { 274 + dp_panel_set_test_mode(panel, mode); 275 + return 1; 276 + } else if (dp_panel->edid) { 277 + return dp_panel_update_modes(connector, dp_panel->edid); 278 + } 279 + 280 + return 0; 281 + } 282 + 283 + static u8 dp_panel_get_edid_checksum(struct edid *edid) 284 + { 285 + struct edid *last_block; 286 + u8 *raw_edid; 287 + bool is_edid_corrupt; 288 + 289 + if (!edid) { 290 + DRM_ERROR("invalid edid input\n"); 291 + return 0; 292 + } 293 + 294 + raw_edid = (u8 *)edid; 295 + raw_edid += (edid->extensions * EDID_LENGTH); 296 + last_block = (struct edid *)raw_edid; 297 + 298 + /* block type extension */ 299 + drm_edid_block_valid(raw_edid, 1, false, &is_edid_corrupt); 300 + if (!is_edid_corrupt) 301 + return last_block->checksum; 302 + 303 + DRM_ERROR("Invalid block, no checksum\n"); 304 + return 0; 305 + } 306 + 307 + void dp_panel_handle_sink_request(struct dp_panel *dp_panel) 308 + { 309 + struct dp_panel_private *panel; 310 + 311 + if (!dp_panel) { 312 + DRM_ERROR("invalid input\n"); 313 + return; 314 + } 315 + 316 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 317 + 318 + if (panel->link->sink_request & DP_TEST_LINK_EDID_READ) { 319 + u8 checksum = dp_panel_get_edid_checksum(dp_panel->edid); 320 + 321 + dp_link_send_edid_checksum(panel->link, checksum); 322 + dp_link_send_test_response(panel->link); 323 + } 324 + } 325 + 326 + void dp_panel_tpg_config(struct dp_panel *dp_panel, bool enable) 327 + { 328 + struct dp_catalog *catalog; 329 + struct dp_panel_private *panel; 330 + 331 + if (!dp_panel) { 332 + DRM_ERROR("invalid input\n"); 333 + return; 334 + } 335 + 336 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 337 + catalog = panel->catalog; 338 + 339 + if (!panel->panel_on) { 340 + DRM_DEBUG_DP("DP panel not enabled, handle TPG on next on\n"); 341 + return; 342 + } 343 + 344 + if (!enable) { 345 + dp_catalog_panel_tpg_disable(catalog); 346 + return; 347 + } 348 + 349 + DRM_DEBUG_DP("%s: calling catalog tpg_enable\n", __func__); 350 + dp_catalog_panel_tpg_enable(catalog, &panel->dp_panel.dp_mode.drm_mode); 351 + } 352 + 353 + void dp_panel_dump_regs(struct dp_panel *dp_panel) 354 + { 355 + struct dp_catalog *catalog; 356 + struct dp_panel_private *panel; 357 + 358 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 359 + catalog = panel->catalog; 360 + 361 + dp_catalog_dump_regs(catalog); 362 + } 363 + 364 + int dp_panel_timing_cfg(struct dp_panel *dp_panel) 365 + { 366 + int rc = 0; 367 + u32 data, total_ver, total_hor; 368 + struct dp_catalog *catalog; 369 + struct dp_panel_private *panel; 370 + struct drm_display_mode *drm_mode; 371 + 372 + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); 373 + catalog = panel->catalog; 374 + drm_mode = &panel->dp_panel.dp_mode.drm_mode; 375 + 376 + DRM_DEBUG_DP("width=%d hporch= %d %d %d\n", 377 + drm_mode->hdisplay, drm_mode->htotal - drm_mode->hsync_end, 378 + drm_mode->hsync_start - drm_mode->hdisplay, 379 + drm_mode->hsync_end - drm_mode->hsync_start); 380 + 381 + DRM_DEBUG_DP("height=%d vporch= %d %d %d\n", 382 + drm_mode->vdisplay, drm_mode->vtotal - drm_mode->vsync_end, 383 + drm_mode->vsync_start - drm_mode->vdisplay, 384 + drm_mode->vsync_end - drm_mode->vsync_start); 385 + 386 + total_hor = drm_mode->htotal; 387 + 388 + total_ver = drm_mode->vtotal; 389 + 390 + data = total_ver; 391 + data <<= 16; 392 + data |= total_hor; 393 + 394 + catalog->total = data; 395 + 396 + data = (drm_mode->vtotal - drm_mode->vsync_start); 397 + data <<= 16; 398 + data |= (drm_mode->htotal - drm_mode->hsync_start); 399 + 400 + catalog->sync_start = data; 401 + 402 + data = drm_mode->vsync_end - drm_mode->vsync_start; 403 + data <<= 16; 404 + data |= (panel->dp_panel.dp_mode.v_active_low << 31); 405 + data |= drm_mode->hsync_end - drm_mode->hsync_start; 406 + data |= (panel->dp_panel.dp_mode.h_active_low << 15); 407 + 408 + catalog->width_blanking = data; 409 + 410 + data = drm_mode->vdisplay; 411 + data <<= 16; 412 + data |= drm_mode->hdisplay; 413 + 414 + catalog->dp_active = data; 415 + 416 + dp_catalog_panel_timing_cfg(catalog); 417 + panel->panel_on = true; 418 + 419 + return rc; 420 + } 421 + 422 + int dp_panel_init_panel_info(struct dp_panel *dp_panel) 423 + { 424 + int rc = 0; 425 + struct drm_display_mode *drm_mode; 426 + 427 + drm_mode = &dp_panel->dp_mode.drm_mode; 428 + 429 + /* 430 + * print resolution info as this is a result 431 + * of user initiated action of cable connection 432 + */ 433 + DRM_DEBUG_DP("SET NEW RESOLUTION:\n"); 434 + DRM_DEBUG_DP("%dx%d@%dfps\n", drm_mode->hdisplay, 435 + drm_mode->vdisplay, drm_mode_vrefresh(drm_mode)); 436 + DRM_DEBUG_DP("h_porches(back|front|width) = (%d|%d|%d)\n", 437 + drm_mode->htotal - drm_mode->hsync_end, 438 + drm_mode->hsync_start - drm_mode->hdisplay, 439 + drm_mode->hsync_end - drm_mode->hsync_start); 440 + DRM_DEBUG_DP("v_porches(back|front|width) = (%d|%d|%d)\n", 441 + drm_mode->vtotal - drm_mode->vsync_end, 442 + drm_mode->vsync_start - drm_mode->vdisplay, 443 + drm_mode->vsync_end - drm_mode->vsync_start); 444 + DRM_DEBUG_DP("pixel clock (KHz)=(%d)\n", drm_mode->clock); 445 + DRM_DEBUG_DP("bpp = %d\n", dp_panel->dp_mode.bpp); 446 + 447 + dp_panel->dp_mode.bpp = max_t(u32, 18, 448 + min_t(u32, dp_panel->dp_mode.bpp, 30)); 449 + DRM_DEBUG_DP("updated bpp = %d\n", dp_panel->dp_mode.bpp); 450 + 451 + return rc; 452 + } 453 + 454 + struct dp_panel *dp_panel_get(struct dp_panel_in *in) 455 + { 456 + struct dp_panel_private *panel; 457 + struct dp_panel *dp_panel; 458 + 459 + if (!in->dev || !in->catalog || !in->aux || !in->link) { 460 + DRM_ERROR("invalid input\n"); 461 + return ERR_PTR(-EINVAL); 462 + } 463 + 464 + panel = devm_kzalloc(in->dev, sizeof(*panel), GFP_KERNEL); 465 + if (!panel) 466 + return ERR_PTR(-ENOMEM); 467 + 468 + panel->dev = in->dev; 469 + panel->aux = in->aux; 470 + panel->catalog = in->catalog; 471 + panel->link = in->link; 472 + 473 + dp_panel = &panel->dp_panel; 474 + dp_panel->max_bw_code = DP_LINK_BW_8_1; 475 + panel->aux_cfg_update_done = false; 476 + 477 + return dp_panel; 478 + } 479 + 480 + void dp_panel_put(struct dp_panel *dp_panel) 481 + { 482 + if (!dp_panel) 483 + return; 484 + 485 + kfree(dp_panel->edid); 486 + }
+95
drivers/gpu/drm/msm/dp/dp_panel.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_PANEL_H_ 7 + #define _DP_PANEL_H_ 8 + 9 + #include <drm/msm_drm.h> 10 + 11 + #include "dp_aux.h" 12 + #include "dp_link.h" 13 + #include "dp_hpd.h" 14 + 15 + struct edid; 16 + 17 + #define DP_MAX_DOWNSTREAM_PORTS 0x10 18 + #define DPRX_EXTENDED_DPCD_FIELD 0x2200 19 + 20 + struct dp_display_mode { 21 + struct drm_display_mode drm_mode; 22 + u32 capabilities; 23 + u32 bpp; 24 + u32 h_active_low; 25 + u32 v_active_low; 26 + }; 27 + 28 + struct dp_panel_in { 29 + struct device *dev; 30 + struct drm_dp_aux *aux; 31 + struct dp_link *link; 32 + struct dp_catalog *catalog; 33 + }; 34 + 35 + struct dp_panel { 36 + /* dpcd raw data */ 37 + u8 dpcd[DP_RECEIVER_CAP_SIZE + 1]; 38 + 39 + struct dp_link_info link_info; 40 + struct drm_dp_desc desc; 41 + struct edid *edid; 42 + struct drm_connector *connector; 43 + struct dp_display_mode dp_mode; 44 + bool video_test; 45 + 46 + u32 vic; 47 + u32 max_pclk_khz; 48 + u32 max_dp_lanes; 49 + 50 + u32 max_bw_code; 51 + }; 52 + 53 + int dp_panel_init_panel_info(struct dp_panel *dp_panel); 54 + int dp_panel_deinit(struct dp_panel *dp_panel); 55 + int dp_panel_timing_cfg(struct dp_panel *dp_panel); 56 + void dp_panel_dump_regs(struct dp_panel *dp_panel); 57 + int dp_panel_read_sink_caps(struct dp_panel *dp_panel, 58 + struct drm_connector *connector); 59 + u32 dp_panel_get_mode_bpp(struct dp_panel *dp_panel, u32 mode_max_bpp, 60 + u32 mode_pclk_khz); 61 + int dp_panel_get_modes(struct dp_panel *dp_panel, 62 + struct drm_connector *connector, struct dp_display_mode *mode); 63 + void dp_panel_handle_sink_request(struct dp_panel *dp_panel); 64 + void dp_panel_tpg_config(struct dp_panel *dp_panel, bool enable); 65 + 66 + /** 67 + * is_link_rate_valid() - validates the link rate 68 + * @lane_rate: link rate requested by the sink 69 + * 70 + * Returns true if the requested link rate is supported. 71 + */ 72 + static inline bool is_link_rate_valid(u32 bw_code) 73 + { 74 + return (bw_code == DP_LINK_BW_1_62 || 75 + bw_code == DP_LINK_BW_2_7 || 76 + bw_code == DP_LINK_BW_5_4 || 77 + bw_code == DP_LINK_BW_8_1); 78 + } 79 + 80 + /** 81 + * dp_link_is_lane_count_valid() - validates the lane count 82 + * @lane_count: lane count requested by the sink 83 + * 84 + * Returns true if the requested lane count is supported. 85 + */ 86 + static inline bool is_lane_count_valid(u32 lane_count) 87 + { 88 + return (lane_count == 1 || 89 + lane_count == 2 || 90 + lane_count == 4); 91 + } 92 + 93 + struct dp_panel *dp_panel_get(struct dp_panel_in *in); 94 + void dp_panel_put(struct dp_panel *dp_panel); 95 + #endif /* _DP_PANEL_H_ */
+265
drivers/gpu/drm/msm/dp/dp_parser.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #include <linux/of_gpio.h> 7 + 8 + #include "dp_parser.h" 9 + 10 + static const struct dp_regulator_cfg sdm845_dp_reg_cfg = { 11 + .num = 2, 12 + .regs = { 13 + {"vdda-1p2", 21800, 4 }, /* 1.2 V */ 14 + {"vdda-0p9", 36000, 32 }, /* 0.9 V */ 15 + }, 16 + }; 17 + 18 + static int msm_dss_ioremap(struct platform_device *pdev, 19 + struct dss_io_data *io_data) 20 + { 21 + struct resource *res = NULL; 22 + 23 + res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 24 + if (!res) { 25 + DRM_ERROR("%pS->%s: msm_dss_get_res failed\n", 26 + __builtin_return_address(0), __func__); 27 + return -ENODEV; 28 + } 29 + 30 + io_data->len = (u32)resource_size(res); 31 + io_data->base = ioremap(res->start, io_data->len); 32 + if (!io_data->base) { 33 + DRM_ERROR("%pS->%s: ioremap failed\n", 34 + __builtin_return_address(0), __func__); 35 + return -EIO; 36 + } 37 + 38 + return 0; 39 + } 40 + 41 + static void msm_dss_iounmap(struct dss_io_data *io_data) 42 + { 43 + if (io_data->base) { 44 + iounmap(io_data->base); 45 + io_data->base = NULL; 46 + } 47 + io_data->len = 0; 48 + } 49 + 50 + static void dp_parser_unmap_io_resources(struct dp_parser *parser) 51 + { 52 + struct dp_io *io = &parser->io; 53 + 54 + msm_dss_iounmap(&io->dp_controller); 55 + msm_dss_iounmap(&io->usb3_dp_com); 56 + } 57 + 58 + static int dp_parser_ctrl_res(struct dp_parser *parser) 59 + { 60 + int rc = 0; 61 + struct platform_device *pdev = parser->pdev; 62 + struct dp_io *io = &parser->io; 63 + 64 + rc = msm_dss_ioremap(pdev, &io->dp_controller); 65 + if (rc) { 66 + DRM_ERROR("unable to remap dp io resources, rc=%d\n", rc); 67 + goto err; 68 + } 69 + 70 + return 0; 71 + err: 72 + dp_parser_unmap_io_resources(parser); 73 + return rc; 74 + } 75 + 76 + static int dp_parser_misc(struct dp_parser *parser) 77 + { 78 + struct device_node *of_node = parser->pdev->dev.of_node; 79 + int len = 0; 80 + const char *data_lane_property = "data-lanes"; 81 + 82 + len = of_property_count_elems_of_size(of_node, 83 + data_lane_property, sizeof(u32)); 84 + if (len < 0) { 85 + DRM_WARN("Invalid property %s, default max DP lanes = %d\n", 86 + data_lane_property, DP_MAX_NUM_DP_LANES); 87 + len = DP_MAX_NUM_DP_LANES; 88 + } 89 + 90 + parser->max_dp_lanes = len; 91 + 92 + return 0; 93 + } 94 + 95 + static inline bool dp_parser_check_prefix(const char *clk_prefix, 96 + const char *clk_name) 97 + { 98 + return !strncmp(clk_prefix, clk_name, strlen(clk_prefix)); 99 + } 100 + 101 + static int dp_parser_init_clk_data(struct dp_parser *parser) 102 + { 103 + int num_clk, i, rc; 104 + int core_clk_count = 0, ctrl_clk_count = 0; 105 + const char *clk_name; 106 + struct device *dev = &parser->pdev->dev; 107 + struct dss_module_power *core_power = &parser->mp[DP_CORE_PM]; 108 + struct dss_module_power *ctrl_power = &parser->mp[DP_CTRL_PM]; 109 + 110 + num_clk = of_property_count_strings(dev->of_node, "clock-names"); 111 + if (num_clk <= 0) { 112 + DRM_ERROR("no clocks are defined\n"); 113 + return -EINVAL; 114 + } 115 + 116 + for (i = 0; i < num_clk; i++) { 117 + rc = of_property_read_string_index(dev->of_node, 118 + "clock-names", i, &clk_name); 119 + if (rc < 0) 120 + return rc; 121 + 122 + if (dp_parser_check_prefix("core", clk_name)) 123 + core_clk_count++; 124 + 125 + if (dp_parser_check_prefix("ctrl", clk_name)) 126 + ctrl_clk_count++; 127 + 128 + if (dp_parser_check_prefix("stream", clk_name)) 129 + ctrl_clk_count++; 130 + } 131 + 132 + /* Initialize the CORE power module */ 133 + if (core_clk_count == 0) { 134 + DRM_ERROR("no core clocks are defined\n"); 135 + return -EINVAL; 136 + } 137 + 138 + core_power->num_clk = core_clk_count; 139 + core_power->clk_config = devm_kzalloc(dev, 140 + sizeof(struct dss_clk) * core_power->num_clk, 141 + GFP_KERNEL); 142 + if (!core_power->clk_config) 143 + return -EINVAL; 144 + 145 + /* Initialize the CTRL power module */ 146 + if (ctrl_clk_count == 0) { 147 + DRM_ERROR("no ctrl clocks are defined\n"); 148 + return -EINVAL; 149 + } 150 + 151 + ctrl_power->num_clk = ctrl_clk_count; 152 + ctrl_power->clk_config = devm_kzalloc(dev, 153 + sizeof(struct dss_clk) * ctrl_power->num_clk, 154 + GFP_KERNEL); 155 + if (!ctrl_power->clk_config) { 156 + ctrl_power->num_clk = 0; 157 + return -EINVAL; 158 + } 159 + 160 + return 0; 161 + } 162 + 163 + static int dp_parser_clock(struct dp_parser *parser) 164 + { 165 + int rc = 0, i = 0; 166 + int num_clk = 0; 167 + int core_clk_index = 0, ctrl_clk_index = 0; 168 + int core_clk_count = 0, ctrl_clk_count = 0; 169 + const char *clk_name; 170 + struct device *dev = &parser->pdev->dev; 171 + struct dss_module_power *core_power = &parser->mp[DP_CORE_PM]; 172 + struct dss_module_power *ctrl_power = &parser->mp[DP_CTRL_PM]; 173 + 174 + core_power = &parser->mp[DP_CORE_PM]; 175 + ctrl_power = &parser->mp[DP_CTRL_PM]; 176 + 177 + rc = dp_parser_init_clk_data(parser); 178 + if (rc) { 179 + DRM_ERROR("failed to initialize power data %d\n", rc); 180 + return -EINVAL; 181 + } 182 + 183 + core_clk_count = core_power->num_clk; 184 + ctrl_clk_count = ctrl_power->num_clk; 185 + 186 + num_clk = core_clk_count + ctrl_clk_count; 187 + 188 + for (i = 0; i < num_clk; i++) { 189 + rc = of_property_read_string_index(dev->of_node, "clock-names", 190 + i, &clk_name); 191 + if (rc) { 192 + DRM_ERROR("error reading clock-names %d\n", rc); 193 + return rc; 194 + } 195 + if (dp_parser_check_prefix("core", clk_name) && 196 + core_clk_index < core_clk_count) { 197 + struct dss_clk *clk = 198 + &core_power->clk_config[core_clk_index]; 199 + strlcpy(clk->clk_name, clk_name, sizeof(clk->clk_name)); 200 + clk->type = DSS_CLK_AHB; 201 + core_clk_index++; 202 + } else if ((dp_parser_check_prefix("ctrl", clk_name) || 203 + dp_parser_check_prefix("stream", clk_name)) && 204 + ctrl_clk_index < ctrl_clk_count) { 205 + struct dss_clk *clk = 206 + &ctrl_power->clk_config[ctrl_clk_index]; 207 + strlcpy(clk->clk_name, clk_name, sizeof(clk->clk_name)); 208 + ctrl_clk_index++; 209 + 210 + if (dp_parser_check_prefix("ctrl_link", clk_name) || 211 + dp_parser_check_prefix("stream_pixel", clk_name)) 212 + clk->type = DSS_CLK_PCLK; 213 + else 214 + clk->type = DSS_CLK_AHB; 215 + } 216 + } 217 + 218 + DRM_DEBUG_DP("clock parsing successful\n"); 219 + 220 + return 0; 221 + } 222 + 223 + static int dp_parser_parse(struct dp_parser *parser) 224 + { 225 + int rc = 0; 226 + 227 + if (!parser) { 228 + DRM_ERROR("invalid input\n"); 229 + return -EINVAL; 230 + } 231 + 232 + rc = dp_parser_ctrl_res(parser); 233 + if (rc) 234 + return rc; 235 + 236 + rc = dp_parser_misc(parser); 237 + if (rc) 238 + return rc; 239 + 240 + rc = dp_parser_clock(parser); 241 + if (rc) 242 + return rc; 243 + 244 + /* Map the corresponding regulator information according to 245 + * version. Currently, since we only have one supported platform, 246 + * mapping the regulator directly. 247 + */ 248 + parser->regulator_cfg = &sdm845_dp_reg_cfg; 249 + 250 + return 0; 251 + } 252 + 253 + struct dp_parser *dp_parser_get(struct platform_device *pdev) 254 + { 255 + struct dp_parser *parser; 256 + 257 + parser = devm_kzalloc(&pdev->dev, sizeof(*parser), GFP_KERNEL); 258 + if (!parser) 259 + return ERR_PTR(-ENOMEM); 260 + 261 + parser->parse = dp_parser_parse; 262 + parser->pdev = pdev; 263 + 264 + return parser; 265 + }
+139
drivers/gpu/drm/msm/dp/dp_parser.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_PARSER_H_ 7 + #define _DP_PARSER_H_ 8 + 9 + #include <linux/platform_device.h> 10 + 11 + #include "dpu_io_util.h" 12 + #include "msm_drv.h" 13 + 14 + #define DP_LABEL "MDSS DP DISPLAY" 15 + #define DP_MAX_PIXEL_CLK_KHZ 675000 16 + #define DP_MAX_NUM_DP_LANES 4 17 + 18 + enum dp_pm_type { 19 + DP_CORE_PM, 20 + DP_CTRL_PM, 21 + DP_PHY_PM, 22 + DP_MAX_PM 23 + }; 24 + 25 + struct dss_io_data { 26 + u32 len; 27 + void __iomem *base; 28 + }; 29 + 30 + static inline const char *dp_parser_pm_name(enum dp_pm_type module) 31 + { 32 + switch (module) { 33 + case DP_CORE_PM: return "DP_CORE_PM"; 34 + case DP_CTRL_PM: return "DP_CTRL_PM"; 35 + case DP_PHY_PM: return "DP_PHY_PM"; 36 + default: return "???"; 37 + } 38 + } 39 + 40 + /** 41 + * struct dp_display_data - display related device tree data. 42 + * 43 + * @ctrl_node: referece to controller device 44 + * @phy_node: reference to phy device 45 + * @is_active: is the controller currently active 46 + * @name: name of the display 47 + * @display_type: type of the display 48 + */ 49 + struct dp_display_data { 50 + struct device_node *ctrl_node; 51 + struct device_node *phy_node; 52 + bool is_active; 53 + const char *name; 54 + const char *display_type; 55 + }; 56 + 57 + /** 58 + * struct dp_ctrl_resource - controller's IO related data 59 + * 60 + * @dp_controller: Display Port controller mapped memory address 61 + * @phy_io: phy's mapped memory address 62 + * @ln_tx0_io: USB-DP lane TX0's mapped memory address 63 + * @ln_tx1_io: USB-DP lane TX1's mapped memory address 64 + * @dp_pll_io: DP PLL mapped memory address 65 + * @usb3_dp_com: USB3 DP PHY combo mapped memory address 66 + */ 67 + struct dp_io { 68 + struct dss_io_data dp_controller; 69 + struct dss_io_data phy_io; 70 + struct dss_io_data ln_tx0_io; 71 + struct dss_io_data ln_tx1_io; 72 + struct dss_io_data dp_pll_io; 73 + struct dss_io_data usb3_dp_com; 74 + }; 75 + 76 + /** 77 + * struct dp_pinctrl - DP's pin control 78 + * 79 + * @pin: pin-controller's instance 80 + * @state_active: active state pin control 81 + * @state_hpd_active: hpd active state pin control 82 + * @state_suspend: suspend state pin control 83 + */ 84 + struct dp_pinctrl { 85 + struct pinctrl *pin; 86 + struct pinctrl_state *state_active; 87 + struct pinctrl_state *state_hpd_active; 88 + struct pinctrl_state *state_suspend; 89 + }; 90 + 91 + #define DP_DEV_REGULATOR_MAX 4 92 + 93 + /* Regulators for DP devices */ 94 + struct dp_reg_entry { 95 + char name[32]; 96 + int enable_load; 97 + int disable_load; 98 + }; 99 + 100 + struct dp_regulator_cfg { 101 + int num; 102 + struct dp_reg_entry regs[DP_DEV_REGULATOR_MAX]; 103 + }; 104 + 105 + /** 106 + * struct dp_parser - DP parser's data exposed to clients 107 + * 108 + * @pdev: platform data of the client 109 + * @mp: gpio, regulator and clock related data 110 + * @pinctrl: pin-control related data 111 + * @disp_data: controller's display related data 112 + * @parse: function to be called by client to parse device tree. 113 + */ 114 + struct dp_parser { 115 + struct platform_device *pdev; 116 + struct dss_module_power mp[DP_MAX_PM]; 117 + struct dp_pinctrl pinctrl; 118 + struct dp_io io; 119 + struct dp_display_data disp_data; 120 + const struct dp_regulator_cfg *regulator_cfg; 121 + u32 max_dp_lanes; 122 + 123 + int (*parse)(struct dp_parser *parser); 124 + }; 125 + 126 + /** 127 + * dp_parser_get() - get the DP's device tree parser module 128 + * 129 + * @pdev: platform data of the client 130 + * return: pointer to dp_parser structure. 131 + * 132 + * This function provides client capability to parse the 133 + * device tree and populate the data structures. The data 134 + * related to clock, regulators, pin-control and other 135 + * can be parsed using this module. 136 + */ 137 + struct dp_parser *dp_parser_get(struct platform_device *pdev); 138 + 139 + #endif
+363
drivers/gpu/drm/msm/dp/dp_power.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ 7 + 8 + #include <linux/clk.h> 9 + #include <linux/clk-provider.h> 10 + #include <linux/regulator/consumer.h> 11 + #include "dp_power.h" 12 + 13 + struct dp_power_private { 14 + struct dp_parser *parser; 15 + struct platform_device *pdev; 16 + struct clk *link_clk_src; 17 + struct clk *pixel_provider; 18 + struct clk *link_provider; 19 + struct regulator_bulk_data supplies[DP_DEV_REGULATOR_MAX]; 20 + 21 + struct dp_power dp_power; 22 + }; 23 + 24 + static void dp_power_regulator_disable(struct dp_power_private *power) 25 + { 26 + struct regulator_bulk_data *s = power->supplies; 27 + const struct dp_reg_entry *regs = power->parser->regulator_cfg->regs; 28 + int num = power->parser->regulator_cfg->num; 29 + int i; 30 + 31 + DBG(""); 32 + for (i = num - 1; i >= 0; i--) 33 + if (regs[i].disable_load >= 0) 34 + regulator_set_load(s[i].consumer, 35 + regs[i].disable_load); 36 + 37 + regulator_bulk_disable(num, s); 38 + } 39 + 40 + static int dp_power_regulator_enable(struct dp_power_private *power) 41 + { 42 + struct regulator_bulk_data *s = power->supplies; 43 + const struct dp_reg_entry *regs = power->parser->regulator_cfg->regs; 44 + int num = power->parser->regulator_cfg->num; 45 + int ret, i; 46 + 47 + DBG(""); 48 + for (i = 0; i < num; i++) { 49 + if (regs[i].enable_load >= 0) { 50 + ret = regulator_set_load(s[i].consumer, 51 + regs[i].enable_load); 52 + if (ret < 0) { 53 + pr_err("regulator %d set op mode failed, %d\n", 54 + i, ret); 55 + goto fail; 56 + } 57 + } 58 + } 59 + 60 + ret = regulator_bulk_enable(num, s); 61 + if (ret < 0) { 62 + pr_err("regulator enable failed, %d\n", ret); 63 + goto fail; 64 + } 65 + 66 + return 0; 67 + 68 + fail: 69 + for (i--; i >= 0; i--) 70 + regulator_set_load(s[i].consumer, regs[i].disable_load); 71 + return ret; 72 + } 73 + 74 + static int dp_power_regulator_init(struct dp_power_private *power) 75 + { 76 + struct regulator_bulk_data *s = power->supplies; 77 + const struct dp_reg_entry *regs = power->parser->regulator_cfg->regs; 78 + struct platform_device *pdev = power->pdev; 79 + int num = power->parser->regulator_cfg->num; 80 + int i, ret; 81 + 82 + for (i = 0; i < num; i++) 83 + s[i].supply = regs[i].name; 84 + 85 + ret = devm_regulator_bulk_get(&pdev->dev, num, s); 86 + if (ret < 0) { 87 + pr_err("%s: failed to init regulator, ret=%d\n", 88 + __func__, ret); 89 + return ret; 90 + } 91 + 92 + return 0; 93 + } 94 + 95 + static int dp_power_clk_init(struct dp_power_private *power) 96 + { 97 + int rc = 0; 98 + struct dss_module_power *core, *ctrl; 99 + struct device *dev = &power->pdev->dev; 100 + 101 + core = &power->parser->mp[DP_CORE_PM]; 102 + ctrl = &power->parser->mp[DP_CTRL_PM]; 103 + 104 + rc = msm_dss_get_clk(dev, core->clk_config, core->num_clk); 105 + if (rc) { 106 + DRM_ERROR("failed to get %s clk. err=%d\n", 107 + dp_parser_pm_name(DP_CORE_PM), rc); 108 + return rc; 109 + } 110 + 111 + rc = msm_dss_get_clk(dev, ctrl->clk_config, ctrl->num_clk); 112 + if (rc) { 113 + DRM_ERROR("failed to get %s clk. err=%d\n", 114 + dp_parser_pm_name(DP_CTRL_PM), rc); 115 + msm_dss_put_clk(core->clk_config, core->num_clk); 116 + return -ENODEV; 117 + } 118 + 119 + return 0; 120 + } 121 + 122 + static int dp_power_clk_deinit(struct dp_power_private *power) 123 + { 124 + struct dss_module_power *core, *ctrl; 125 + 126 + core = &power->parser->mp[DP_CORE_PM]; 127 + ctrl = &power->parser->mp[DP_CTRL_PM]; 128 + 129 + if (!core || !ctrl) 130 + return -EINVAL; 131 + 132 + msm_dss_put_clk(ctrl->clk_config, ctrl->num_clk); 133 + msm_dss_put_clk(core->clk_config, core->num_clk); 134 + return 0; 135 + } 136 + 137 + static int dp_power_clk_set_rate(struct dp_power_private *power, 138 + enum dp_pm_type module, bool enable) 139 + { 140 + int rc = 0; 141 + struct dss_module_power *mp = &power->parser->mp[module]; 142 + 143 + if (enable) { 144 + rc = msm_dss_clk_set_rate(mp->clk_config, mp->num_clk); 145 + if (rc) { 146 + DRM_ERROR("failed to set clks rate.\n"); 147 + return rc; 148 + } 149 + } 150 + 151 + rc = msm_dss_enable_clk(mp->clk_config, mp->num_clk, enable); 152 + if (rc) { 153 + DRM_ERROR("failed to %d clks, err: %d\n", enable, rc); 154 + return rc; 155 + } 156 + 157 + return 0; 158 + } 159 + 160 + int dp_power_clk_enable(struct dp_power *dp_power, 161 + enum dp_pm_type pm_type, bool enable) 162 + { 163 + int rc = 0; 164 + struct dp_power_private *power; 165 + 166 + power = container_of(dp_power, struct dp_power_private, dp_power); 167 + 168 + if (pm_type != DP_CORE_PM && pm_type != DP_CTRL_PM) { 169 + DRM_ERROR("unsupported power module: %s\n", 170 + dp_parser_pm_name(pm_type)); 171 + return -EINVAL; 172 + } 173 + 174 + if (enable) { 175 + if (pm_type == DP_CORE_PM && dp_power->core_clks_on) { 176 + DRM_DEBUG_DP("core clks already enabled\n"); 177 + return 0; 178 + } 179 + 180 + if (pm_type == DP_CTRL_PM && dp_power->link_clks_on) { 181 + DRM_DEBUG_DP("links clks already enabled\n"); 182 + return 0; 183 + } 184 + 185 + if ((pm_type == DP_CTRL_PM) && (!dp_power->core_clks_on)) { 186 + DRM_DEBUG_DP("Enable core clks before link clks\n"); 187 + 188 + rc = dp_power_clk_set_rate(power, DP_CORE_PM, enable); 189 + if (rc) { 190 + DRM_ERROR("fail to enable clks: %s. err=%d\n", 191 + dp_parser_pm_name(DP_CORE_PM), rc); 192 + return rc; 193 + } 194 + dp_power->core_clks_on = true; 195 + } 196 + } 197 + 198 + rc = dp_power_clk_set_rate(power, pm_type, enable); 199 + if (rc) { 200 + DRM_ERROR("failed to '%s' clks for: %s. err=%d\n", 201 + enable ? "enable" : "disable", 202 + dp_parser_pm_name(pm_type), rc); 203 + return rc; 204 + } 205 + 206 + if (pm_type == DP_CORE_PM) 207 + dp_power->core_clks_on = enable; 208 + else 209 + dp_power->link_clks_on = enable; 210 + 211 + DRM_DEBUG_DP("%s clocks for %s\n", 212 + enable ? "enable" : "disable", 213 + dp_parser_pm_name(pm_type)); 214 + DRM_DEBUG_DP("link_clks:%s core_clks:%s\n", 215 + dp_power->link_clks_on ? "on" : "off", 216 + dp_power->core_clks_on ? "on" : "off"); 217 + 218 + return 0; 219 + } 220 + 221 + int dp_power_client_init(struct dp_power *dp_power) 222 + { 223 + int rc = 0; 224 + struct dp_power_private *power; 225 + 226 + if (!dp_power) { 227 + DRM_ERROR("invalid power data\n"); 228 + return -EINVAL; 229 + } 230 + 231 + power = container_of(dp_power, struct dp_power_private, dp_power); 232 + 233 + pm_runtime_enable(&power->pdev->dev); 234 + 235 + rc = dp_power_regulator_init(power); 236 + if (rc) { 237 + DRM_ERROR("failed to init regulators %d\n", rc); 238 + goto error; 239 + } 240 + 241 + rc = dp_power_clk_init(power); 242 + if (rc) { 243 + DRM_ERROR("failed to init clocks %d\n", rc); 244 + goto error; 245 + } 246 + return 0; 247 + 248 + error: 249 + pm_runtime_disable(&power->pdev->dev); 250 + return rc; 251 + } 252 + 253 + void dp_power_client_deinit(struct dp_power *dp_power) 254 + { 255 + struct dp_power_private *power; 256 + 257 + if (!dp_power) { 258 + DRM_ERROR("invalid power data\n"); 259 + return; 260 + } 261 + 262 + power = container_of(dp_power, struct dp_power_private, dp_power); 263 + 264 + dp_power_clk_deinit(power); 265 + pm_runtime_disable(&power->pdev->dev); 266 + 267 + } 268 + 269 + int dp_power_set_link_clk_parent(struct dp_power *dp_power) 270 + { 271 + int rc = 0; 272 + struct dp_power_private *power; 273 + u32 num; 274 + struct dss_clk *cfg; 275 + char *name = "ctrl_link"; 276 + 277 + if (!dp_power) { 278 + DRM_ERROR("invalid power data\n"); 279 + rc = -EINVAL; 280 + goto exit; 281 + } 282 + 283 + power = container_of(dp_power, struct dp_power_private, dp_power); 284 + 285 + num = power->parser->mp[DP_CTRL_PM].num_clk; 286 + cfg = power->parser->mp[DP_CTRL_PM].clk_config; 287 + 288 + while (num && strcmp(cfg->clk_name, name)) { 289 + num--; 290 + cfg++; 291 + } 292 + 293 + exit: 294 + return rc; 295 + } 296 + 297 + int dp_power_init(struct dp_power *dp_power, bool flip) 298 + { 299 + int rc = 0; 300 + struct dp_power_private *power = NULL; 301 + 302 + if (!dp_power) { 303 + DRM_ERROR("invalid power data\n"); 304 + return -EINVAL; 305 + } 306 + 307 + power = container_of(dp_power, struct dp_power_private, dp_power); 308 + 309 + pm_runtime_get_sync(&power->pdev->dev); 310 + rc = dp_power_regulator_enable(power); 311 + if (rc) { 312 + DRM_ERROR("failed to enable regulators, %d\n", rc); 313 + goto exit; 314 + } 315 + 316 + rc = dp_power_clk_enable(dp_power, DP_CORE_PM, true); 317 + if (rc) { 318 + DRM_ERROR("failed to enable DP core clocks, %d\n", rc); 319 + goto err_clk; 320 + } 321 + 322 + return 0; 323 + 324 + err_clk: 325 + dp_power_regulator_disable(power); 326 + exit: 327 + pm_runtime_put_sync(&power->pdev->dev); 328 + return rc; 329 + } 330 + 331 + int dp_power_deinit(struct dp_power *dp_power) 332 + { 333 + struct dp_power_private *power; 334 + 335 + power = container_of(dp_power, struct dp_power_private, dp_power); 336 + 337 + dp_power_clk_enable(dp_power, DP_CORE_PM, false); 338 + dp_power_regulator_disable(power); 339 + pm_runtime_put_sync(&power->pdev->dev); 340 + return 0; 341 + } 342 + 343 + struct dp_power *dp_power_get(struct dp_parser *parser) 344 + { 345 + struct dp_power_private *power; 346 + struct dp_power *dp_power; 347 + 348 + if (!parser) { 349 + DRM_ERROR("invalid input\n"); 350 + return ERR_PTR(-EINVAL); 351 + } 352 + 353 + power = devm_kzalloc(&parser->pdev->dev, sizeof(*power), GFP_KERNEL); 354 + if (!power) 355 + return ERR_PTR(-ENOMEM); 356 + 357 + power->parser = parser; 358 + power->pdev = parser->pdev; 359 + 360 + dp_power = &power->dp_power; 361 + 362 + return dp_power; 363 + }
+65
drivers/gpu/drm/msm/dp/dp_power.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_POWER_H_ 7 + #define _DP_POWER_H_ 8 + 9 + #include "dp_parser.h" 10 + 11 + /** 12 + * sruct dp_power - DisplayPort's power related data 13 + * 14 + * @init: initializes the regulators/core clocks/GPIOs/pinctrl 15 + * @deinit: turns off the regulators/core clocks/GPIOs/pinctrl 16 + * @clk_enable: enable/disable the DP clocks 17 + * @set_link_clk_parent: set the parent of DP link clock 18 + * @set_pixel_clk_parent: set the parent of DP pixel clock 19 + */ 20 + struct dp_power { 21 + bool core_clks_on; 22 + bool link_clks_on; 23 + }; 24 + 25 + int dp_power_init(struct dp_power *power, bool flip); 26 + int dp_power_deinit(struct dp_power *power); 27 + int dp_power_clk_enable(struct dp_power *power, enum dp_pm_type pm_type, 28 + bool enable); 29 + int dp_power_set_link_clk_parent(struct dp_power *power); 30 + 31 + /** 32 + * dp_power_client_init() - initialize clock and regulator modules 33 + * 34 + * @power: instance of power module 35 + * return: 0 for success, error for failure. 36 + * 37 + * This API will configure the DisplayPort's clocks and regulator 38 + * modules. 39 + */ 40 + int dp_power_client_init(struct dp_power *power); 41 + 42 + /** 43 + * dp_power_clinet_deinit() - de-initialize clock and regulator modules 44 + * 45 + * @power: instance of power module 46 + * return: 0 for success, error for failure. 47 + * 48 + * This API will de-initialize the DisplayPort's clocks and regulator 49 + * modueles. 50 + */ 51 + void dp_power_client_deinit(struct dp_power *power); 52 + 53 + /** 54 + * dp_power_get() - configure and get the DisplayPort power module data 55 + * 56 + * @parser: instance of parser module 57 + * return: pointer to allocated power module data 58 + * 59 + * This API will configure the DisplayPort's power module and provides 60 + * methods to be called by the client to configure the power related 61 + * modueles. 62 + */ 63 + struct dp_power *dp_power_get(struct dp_parser *parser); 64 + 65 + #endif /* _DP_POWER_H_ */
+490
drivers/gpu/drm/msm/dp/dp_reg.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 + */ 5 + 6 + #ifndef _DP_REG_H_ 7 + #define _DP_REG_H_ 8 + 9 + /* DP_TX Registers */ 10 + #define REG_DP_HW_VERSION (0x00000000) 11 + 12 + #define REG_DP_SW_RESET (0x00000010) 13 + #define DP_SW_RESET (0x00000001) 14 + 15 + #define REG_DP_PHY_CTRL (0x00000014) 16 + #define DP_PHY_CTRL_SW_RESET_PLL (0x00000001) 17 + #define DP_PHY_CTRL_SW_RESET (0x00000004) 18 + 19 + #define REG_DP_CLK_CTRL (0x00000018) 20 + #define REG_DP_CLK_ACTIVE (0x0000001C) 21 + #define REG_DP_INTR_STATUS (0x00000020) 22 + #define REG_DP_INTR_STATUS2 (0x00000024) 23 + #define REG_DP_INTR_STATUS3 (0x00000028) 24 + 25 + #define REG_DP_DP_HPD_CTRL (0x00000000) 26 + #define DP_DP_HPD_CTRL_HPD_EN (0x00000001) 27 + 28 + #define REG_DP_DP_HPD_INT_STATUS (0x00000004) 29 + 30 + #define REG_DP_DP_HPD_INT_ACK (0x00000008) 31 + #define DP_DP_HPD_PLUG_INT_ACK (0x00000001) 32 + #define DP_DP_IRQ_HPD_INT_ACK (0x00000002) 33 + #define DP_DP_HPD_REPLUG_INT_ACK (0x00000004) 34 + #define DP_DP_HPD_UNPLUG_INT_ACK (0x00000008) 35 + 36 + #define REG_DP_DP_HPD_INT_MASK (0x0000000C) 37 + #define DP_DP_HPD_PLUG_INT_MASK (0x00000001) 38 + #define DP_DP_IRQ_HPD_INT_MASK (0x00000002) 39 + #define DP_DP_HPD_REPLUG_INT_MASK (0x00000004) 40 + #define DP_DP_HPD_UNPLUG_INT_MASK (0x00000008) 41 + 42 + #define REG_DP_DP_HPD_REFTIMER (0x00000018) 43 + #define REG_DP_DP_HPD_EVENT_TIME_0 (0x0000001C) 44 + #define REG_DP_DP_HPD_EVENT_TIME_1 (0x00000020) 45 + 46 + #define REG_DP_AUX_CTRL (0x00000030) 47 + #define DP_AUX_CTRL_ENABLE (0x00000001) 48 + #define DP_AUX_CTRL_RESET (0x00000002) 49 + 50 + #define REG_DP_AUX_DATA (0x00000034) 51 + #define DP_AUX_DATA_READ (0x00000001) 52 + #define DP_AUX_DATA_WRITE (0x00000000) 53 + #define DP_AUX_DATA_OFFSET (0x00000008) 54 + #define DP_AUX_DATA_INDEX_OFFSET (0x00000010) 55 + #define DP_AUX_DATA_MASK (0x0000ff00) 56 + #define DP_AUX_DATA_INDEX_WRITE (0x80000000) 57 + 58 + #define REG_DP_AUX_TRANS_CTRL (0x00000038) 59 + #define DP_AUX_TRANS_CTRL_I2C (0x00000100) 60 + #define DP_AUX_TRANS_CTRL_GO (0x00000200) 61 + #define DP_AUX_TRANS_CTRL_NO_SEND_ADDR (0x00000400) 62 + #define DP_AUX_TRANS_CTRL_NO_SEND_STOP (0x00000800) 63 + 64 + #define REG_DP_TIMEOUT_COUNT (0x0000003C) 65 + #define REG_DP_AUX_LIMITS (0x00000040) 66 + #define REG_DP_AUX_STATUS (0x00000044) 67 + 68 + #define DP_DPCD_CP_IRQ (0x201) 69 + #define DP_DPCD_RXSTATUS (0x69493) 70 + 71 + #define DP_INTERRUPT_TRANS_NUM (0x000000A0) 72 + 73 + #define REG_DP_MAINLINK_CTRL (0x00000000) 74 + #define DP_MAINLINK_CTRL_ENABLE (0x00000001) 75 + #define DP_MAINLINK_CTRL_RESET (0x00000002) 76 + #define DP_MAINLINK_FB_BOUNDARY_SEL (0x02000000) 77 + 78 + #define REG_DP_STATE_CTRL (0x00000004) 79 + #define DP_STATE_CTRL_LINK_TRAINING_PATTERN1 (0x00000001) 80 + #define DP_STATE_CTRL_LINK_TRAINING_PATTERN2 (0x00000002) 81 + #define DP_STATE_CTRL_LINK_TRAINING_PATTERN3 (0x00000004) 82 + #define DP_STATE_CTRL_LINK_TRAINING_PATTERN4 (0x00000008) 83 + #define DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE (0x00000010) 84 + #define DP_STATE_CTRL_LINK_PRBS7 (0x00000020) 85 + #define DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN (0x00000040) 86 + #define DP_STATE_CTRL_SEND_VIDEO (0x00000080) 87 + #define DP_STATE_CTRL_PUSH_IDLE (0x00000100) 88 + 89 + #define REG_DP_CONFIGURATION_CTRL (0x00000008) 90 + #define DP_CONFIGURATION_CTRL_SYNC_ASYNC_CLK (0x00000001) 91 + #define DP_CONFIGURATION_CTRL_STATIC_DYNAMIC_CN (0x00000002) 92 + #define DP_CONFIGURATION_CTRL_P_INTERLACED (0x00000004) 93 + #define DP_CONFIGURATION_CTRL_INTERLACED_BTF (0x00000008) 94 + #define DP_CONFIGURATION_CTRL_NUM_OF_LANES (0x00000010) 95 + #define DP_CONFIGURATION_CTRL_ENHANCED_FRAMING (0x00000040) 96 + #define DP_CONFIGURATION_CTRL_SEND_VSC (0x00000080) 97 + #define DP_CONFIGURATION_CTRL_BPC (0x00000100) 98 + #define DP_CONFIGURATION_CTRL_ASSR (0x00000400) 99 + #define DP_CONFIGURATION_CTRL_RGB_YUV (0x00000800) 100 + #define DP_CONFIGURATION_CTRL_LSCLK_DIV (0x00002000) 101 + #define DP_CONFIGURATION_CTRL_NUM_OF_LANES_SHIFT (0x04) 102 + #define DP_CONFIGURATION_CTRL_BPC_SHIFT (0x08) 103 + #define DP_CONFIGURATION_CTRL_LSCLK_DIV_SHIFT (0x0D) 104 + 105 + #define REG_DP_SOFTWARE_MVID (0x00000010) 106 + #define REG_DP_SOFTWARE_NVID (0x00000018) 107 + #define REG_DP_TOTAL_HOR_VER (0x0000001C) 108 + #define REG_DP_START_HOR_VER_FROM_SYNC (0x00000020) 109 + #define REG_DP_HSYNC_VSYNC_WIDTH_POLARITY (0x00000024) 110 + #define REG_DP_ACTIVE_HOR_VER (0x00000028) 111 + 112 + #define REG_DP_MISC1_MISC0 (0x0000002C) 113 + #define DP_MISC0_SYNCHRONOUS_CLK (0x00000001) 114 + #define DP_MISC0_COLORIMETRY_CFG_SHIFT (0x00000001) 115 + #define DP_MISC0_TEST_BITS_DEPTH_SHIFT (0x00000005) 116 + 117 + #define REG_DP_VALID_BOUNDARY (0x00000030) 118 + #define REG_DP_VALID_BOUNDARY_2 (0x00000034) 119 + 120 + #define REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING (0x00000038) 121 + #define LANE0_MAPPING_SHIFT (0x00000000) 122 + #define LANE1_MAPPING_SHIFT (0x00000002) 123 + #define LANE2_MAPPING_SHIFT (0x00000004) 124 + #define LANE3_MAPPING_SHIFT (0x00000006) 125 + 126 + #define REG_DP_MAINLINK_READY (0x00000040) 127 + #define DP_MAINLINK_READY_FOR_VIDEO (0x00000001) 128 + #define DP_MAINLINK_READY_LINK_TRAINING_SHIFT (0x00000003) 129 + 130 + #define REG_DP_MAINLINK_LEVELS (0x00000044) 131 + #define DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2 (0x00000002) 132 + 133 + 134 + #define REG_DP_TU (0x0000004C) 135 + 136 + #define REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET (0x00000054) 137 + #define DP_HBR2_ERM_PATTERN (0x00010000) 138 + 139 + #define REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0 (0x000000C0) 140 + #define REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1 (0x000000C4) 141 + #define REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2 (0x000000C8) 142 + 143 + #define MMSS_DP_MISC1_MISC0 (0x0000002C) 144 + #define MMSS_DP_AUDIO_TIMING_GEN (0x00000080) 145 + #define MMSS_DP_AUDIO_TIMING_RBR_32 (0x00000084) 146 + #define MMSS_DP_AUDIO_TIMING_HBR_32 (0x00000088) 147 + #define MMSS_DP_AUDIO_TIMING_RBR_44 (0x0000008C) 148 + #define MMSS_DP_AUDIO_TIMING_HBR_44 (0x00000090) 149 + #define MMSS_DP_AUDIO_TIMING_RBR_48 (0x00000094) 150 + #define MMSS_DP_AUDIO_TIMING_HBR_48 (0x00000098) 151 + 152 + #define MMSS_DP_PSR_CRC_RG (0x00000154) 153 + #define MMSS_DP_PSR_CRC_B (0x00000158) 154 + 155 + #define REG_DP_COMPRESSION_MODE_CTRL (0x00000180) 156 + 157 + #define MMSS_DP_AUDIO_CFG (0x00000200) 158 + #define MMSS_DP_AUDIO_STATUS (0x00000204) 159 + #define MMSS_DP_AUDIO_PKT_CTRL (0x00000208) 160 + #define MMSS_DP_AUDIO_PKT_CTRL2 (0x0000020C) 161 + #define MMSS_DP_AUDIO_ACR_CTRL (0x00000210) 162 + #define MMSS_DP_AUDIO_CTRL_RESET (0x00000214) 163 + 164 + #define MMSS_DP_SDP_CFG (0x00000228) 165 + #define MMSS_DP_SDP_CFG2 (0x0000022C) 166 + #define MMSS_DP_AUDIO_TIMESTAMP_0 (0x00000230) 167 + #define MMSS_DP_AUDIO_TIMESTAMP_1 (0x00000234) 168 + 169 + #define MMSS_DP_AUDIO_STREAM_0 (0x00000240) 170 + #define MMSS_DP_AUDIO_STREAM_1 (0x00000244) 171 + 172 + #define MMSS_DP_EXTENSION_0 (0x00000250) 173 + #define MMSS_DP_EXTENSION_1 (0x00000254) 174 + #define MMSS_DP_EXTENSION_2 (0x00000258) 175 + #define MMSS_DP_EXTENSION_3 (0x0000025C) 176 + #define MMSS_DP_EXTENSION_4 (0x00000260) 177 + #define MMSS_DP_EXTENSION_5 (0x00000264) 178 + #define MMSS_DP_EXTENSION_6 (0x00000268) 179 + #define MMSS_DP_EXTENSION_7 (0x0000026C) 180 + #define MMSS_DP_EXTENSION_8 (0x00000270) 181 + #define MMSS_DP_EXTENSION_9 (0x00000274) 182 + #define MMSS_DP_AUDIO_COPYMANAGEMENT_0 (0x00000278) 183 + #define MMSS_DP_AUDIO_COPYMANAGEMENT_1 (0x0000027C) 184 + #define MMSS_DP_AUDIO_COPYMANAGEMENT_2 (0x00000280) 185 + #define MMSS_DP_AUDIO_COPYMANAGEMENT_3 (0x00000284) 186 + #define MMSS_DP_AUDIO_COPYMANAGEMENT_4 (0x00000288) 187 + #define MMSS_DP_AUDIO_COPYMANAGEMENT_5 (0x0000028C) 188 + #define MMSS_DP_AUDIO_ISRC_0 (0x00000290) 189 + #define MMSS_DP_AUDIO_ISRC_1 (0x00000294) 190 + #define MMSS_DP_AUDIO_ISRC_2 (0x00000298) 191 + #define MMSS_DP_AUDIO_ISRC_3 (0x0000029C) 192 + #define MMSS_DP_AUDIO_ISRC_4 (0x000002A0) 193 + #define MMSS_DP_AUDIO_ISRC_5 (0x000002A4) 194 + #define MMSS_DP_AUDIO_INFOFRAME_0 (0x000002A8) 195 + #define MMSS_DP_AUDIO_INFOFRAME_1 (0x000002AC) 196 + #define MMSS_DP_AUDIO_INFOFRAME_2 (0x000002B0) 197 + 198 + #define MMSS_DP_GENERIC0_0 (0x00000300) 199 + #define MMSS_DP_GENERIC0_1 (0x00000304) 200 + #define MMSS_DP_GENERIC0_2 (0x00000308) 201 + #define MMSS_DP_GENERIC0_3 (0x0000030C) 202 + #define MMSS_DP_GENERIC0_4 (0x00000310) 203 + #define MMSS_DP_GENERIC0_5 (0x00000314) 204 + #define MMSS_DP_GENERIC0_6 (0x00000318) 205 + #define MMSS_DP_GENERIC0_7 (0x0000031C) 206 + #define MMSS_DP_GENERIC0_8 (0x00000320) 207 + #define MMSS_DP_GENERIC0_9 (0x00000324) 208 + #define MMSS_DP_GENERIC1_0 (0x00000328) 209 + #define MMSS_DP_GENERIC1_1 (0x0000032C) 210 + #define MMSS_DP_GENERIC1_2 (0x00000330) 211 + #define MMSS_DP_GENERIC1_3 (0x00000334) 212 + #define MMSS_DP_GENERIC1_4 (0x00000338) 213 + #define MMSS_DP_GENERIC1_5 (0x0000033C) 214 + #define MMSS_DP_GENERIC1_6 (0x00000340) 215 + #define MMSS_DP_GENERIC1_7 (0x00000344) 216 + #define MMSS_DP_GENERIC1_8 (0x00000348) 217 + #define MMSS_DP_GENERIC1_9 (0x0000034C) 218 + 219 + #define MMSS_DP_VSCEXT_0 (0x000002D0) 220 + #define MMSS_DP_VSCEXT_1 (0x000002D4) 221 + #define MMSS_DP_VSCEXT_2 (0x000002D8) 222 + #define MMSS_DP_VSCEXT_3 (0x000002DC) 223 + #define MMSS_DP_VSCEXT_4 (0x000002E0) 224 + #define MMSS_DP_VSCEXT_5 (0x000002E4) 225 + #define MMSS_DP_VSCEXT_6 (0x000002E8) 226 + #define MMSS_DP_VSCEXT_7 (0x000002EC) 227 + #define MMSS_DP_VSCEXT_8 (0x000002F0) 228 + #define MMSS_DP_VSCEXT_9 (0x000002F4) 229 + 230 + #define MMSS_DP_BIST_ENABLE (0x00000000) 231 + #define DP_BIST_ENABLE_DPBIST_EN (0x00000001) 232 + 233 + #define MMSS_DP_TIMING_ENGINE_EN (0x00000010) 234 + #define DP_TIMING_ENGINE_EN_EN (0x00000001) 235 + 236 + #define MMSS_DP_INTF_CONFIG (0x00000014) 237 + #define MMSS_DP_INTF_HSYNC_CTL (0x00000018) 238 + #define MMSS_DP_INTF_VSYNC_PERIOD_F0 (0x0000001C) 239 + #define MMSS_DP_INTF_VSYNC_PERIOD_F1 (0x00000020) 240 + #define MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0 (0x00000024) 241 + #define MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1 (0x00000028) 242 + #define MMSS_INTF_DISPLAY_V_START_F0 (0x0000002C) 243 + #define MMSS_INTF_DISPLAY_V_START_F1 (0x00000030) 244 + #define MMSS_DP_INTF_DISPLAY_V_END_F0 (0x00000034) 245 + #define MMSS_DP_INTF_DISPLAY_V_END_F1 (0x00000038) 246 + #define MMSS_DP_INTF_ACTIVE_V_START_F0 (0x0000003C) 247 + #define MMSS_DP_INTF_ACTIVE_V_START_F1 (0x00000040) 248 + #define MMSS_DP_INTF_ACTIVE_V_END_F0 (0x00000044) 249 + #define MMSS_DP_INTF_ACTIVE_V_END_F1 (0x00000048) 250 + #define MMSS_DP_INTF_DISPLAY_HCTL (0x0000004C) 251 + #define MMSS_DP_INTF_ACTIVE_HCTL (0x00000050) 252 + #define MMSS_DP_INTF_POLARITY_CTL (0x00000058) 253 + 254 + #define MMSS_DP_TPG_MAIN_CONTROL (0x00000060) 255 + #define MMSS_DP_DSC_DTO (0x0000007C) 256 + #define DP_TPG_CHECKERED_RECT_PATTERN (0x00000100) 257 + 258 + #define MMSS_DP_TPG_VIDEO_CONFIG (0x00000064) 259 + #define DP_TPG_VIDEO_CONFIG_BPP_8BIT (0x00000001) 260 + #define DP_TPG_VIDEO_CONFIG_RGB (0x00000004) 261 + 262 + #define MMSS_DP_ASYNC_FIFO_CONFIG (0x00000088) 263 + 264 + /*DP PHY Register offsets */ 265 + #define REG_DP_PHY_REVISION_ID0 (0x00000000) 266 + #define REG_DP_PHY_REVISION_ID1 (0x00000004) 267 + #define REG_DP_PHY_REVISION_ID2 (0x00000008) 268 + #define REG_DP_PHY_REVISION_ID3 (0x0000000C) 269 + 270 + #define REG_DP_PHY_CFG (0x00000010) 271 + 272 + #define REG_DP_PHY_PD_CTL (0x00000018) 273 + #define DP_PHY_PD_CTL_PWRDN (0x00000001) 274 + #define DP_PHY_PD_CTL_PSR_PWRDN (0x00000002) 275 + #define DP_PHY_PD_CTL_AUX_PWRDN (0x00000004) 276 + #define DP_PHY_PD_CTL_LANE_0_1_PWRDN (0x00000008) 277 + #define DP_PHY_PD_CTL_LANE_2_3_PWRDN (0x00000010) 278 + #define DP_PHY_PD_CTL_PLL_PWRDN (0x00000020) 279 + #define DP_PHY_PD_CTL_DP_CLAMP_EN (0x00000040) 280 + 281 + #define REG_DP_PHY_MODE (0x0000001C) 282 + 283 + #define REG_DP_PHY_AUX_CFG0 (0x00000020) 284 + #define REG_DP_PHY_AUX_CFG1 (0x00000024) 285 + #define REG_DP_PHY_AUX_CFG2 (0x00000028) 286 + #define REG_DP_PHY_AUX_CFG3 (0x0000002C) 287 + #define REG_DP_PHY_AUX_CFG4 (0x00000030) 288 + #define REG_DP_PHY_AUX_CFG5 (0x00000034) 289 + #define REG_DP_PHY_AUX_CFG6 (0x00000038) 290 + #define REG_DP_PHY_AUX_CFG7 (0x0000003C) 291 + #define REG_DP_PHY_AUX_CFG8 (0x00000040) 292 + #define REG_DP_PHY_AUX_CFG9 (0x00000044) 293 + 294 + #define REG_DP_PHY_AUX_INTERRUPT_MASK (0x00000048) 295 + #define PHY_AUX_STOP_ERR_MASK (0x00000001) 296 + #define PHY_AUX_DEC_ERR_MASK (0x00000002) 297 + #define PHY_AUX_SYNC_ERR_MASK (0x00000004) 298 + #define PHY_AUX_ALIGN_ERR_MASK (0x00000008) 299 + #define PHY_AUX_REQ_ERR_MASK (0x00000010) 300 + 301 + 302 + #define REG_DP_PHY_AUX_INTERRUPT_CLEAR (0x0000004C) 303 + #define REG_DP_PHY_AUX_BIST_CFG (0x00000050) 304 + #define REG_DP_PHY_AUX_INTERRUPT_STATUS (0x000000BC) 305 + 306 + #define REG_DP_PHY_VCO_DIV 0x0064 307 + #define REG_DP_PHY_TX0_TX1_LANE_CTL 0x006C 308 + #define REG_DP_PHY_TX2_TX3_LANE_CTL 0x0088 309 + 310 + #define REG_DP_PHY_SPARE0 (0x00AC) 311 + #define DP_PHY_SPARE0_MASK (0x000F) 312 + #define DP_PHY_SPARE0_ORIENTATION_INFO_SHIFT (0x0004) 313 + 314 + #define REG_DP_PHY_STATUS (0x00C0) 315 + 316 + /* Tx registers */ 317 + #define REG_DP_PHY_TXn_BIST_MODE_LANENO 0x0000 318 + #define REG_DP_PHY_TXn_CLKBUF_ENABLE 0x0008 319 + 320 + #define REG_DP_PHY_TXn_TX_EMP_POST1_LVL 0x000C 321 + #define DP_PHY_TXn_TX_EMP_POST1_LVL_MASK 0x001F 322 + #define DP_PHY_TXn_TX_EMP_POST1_LVL_MUX_EN 0x0020 323 + 324 + #define REG_DP_PHY_TXn_TX_DRV_LVL 0x001C 325 + #define DP_PHY_TXn_TX_DRV_LVL_MASK 0x001F 326 + #define DP_PHY_TXn_TX_DRV_LVL_MUX_EN 0x0020 327 + 328 + #define REG_DP_PHY_TXn_RESET_TSYNC_EN 0x0024 329 + #define REG_DP_PHY_TXn_PRE_STALL_LDO_BOOST_EN 0x0028 330 + #define REG_DP_PHY_TXn_TX_BAND 0x002C 331 + #define REG_DP_PHY_TXn_SLEW_CNTL 0x0030 332 + #define REG_DP_PHY_TXn_INTERFACE_SELECT 0x0034 333 + 334 + #define REG_DP_PHY_TXn_RES_CODE_LANE_TX 0x003C 335 + #define REG_DP_PHY_TXn_RES_CODE_LANE_RX 0x0040 336 + #define REG_DP_PHY_TXn_RES_CODE_LANE_OFFSET_TX 0x0044 337 + #define REG_DP_PHY_TXn_RES_CODE_LANE_OFFSET_RX 0x0048 338 + 339 + #define REG_DP_PHY_TXn_DEBUG_BUS_SEL 0x0058 340 + #define REG_DP_PHY_TXn_TRANSCEIVER_BIAS_EN 0x005C 341 + #define REG_DP_PHY_TXn_HIGHZ_DRVR_EN 0x0060 342 + #define REG_DP_PHY_TXn_TX_POL_INV 0x0064 343 + #define REG_DP_PHY_TXn_PARRATE_REC_DETECT_IDLE_EN 0x0068 344 + 345 + #define REG_DP_PHY_TXn_LANE_MODE_1 0x008C 346 + 347 + #define REG_DP_PHY_TXn_TRAN_DRVR_EMP_EN 0x00C0 348 + #define REG_DP_PHY_TXn_TX_INTERFACE_MODE 0x00C4 349 + 350 + #define REG_DP_PHY_TXn_VMODE_CTRL1 0x00F0 351 + 352 + /* PLL register offset */ 353 + #define QSERDES_COM_ATB_SEL1 0x0000 354 + #define QSERDES_COM_ATB_SEL2 0x0004 355 + #define QSERDES_COM_FREQ_UPDATE 0x0008 356 + #define QSERDES_COM_BG_TIMER 0x000C 357 + #define QSERDES_COM_SSC_EN_CENTER 0x0010 358 + #define QSERDES_COM_SSC_ADJ_PER1 0x0014 359 + #define QSERDES_COM_SSC_ADJ_PER2 0x0018 360 + #define QSERDES_COM_SSC_PER1 0x001C 361 + #define QSERDES_COM_SSC_PER2 0x0020 362 + #define QSERDES_COM_SSC_STEP_SIZE1 0x0024 363 + #define QSERDES_COM_SSC_STEP_SIZE2 0x0028 364 + #define QSERDES_COM_POST_DIV 0x002C 365 + #define QSERDES_COM_POST_DIV_MUX 0x0030 366 + 367 + #define QSERDES_COM_BIAS_EN_CLKBUFLR_EN 0x0034 368 + #define QSERDES_COM_BIAS_EN 0x0001 369 + #define QSERDES_COM_BIAS_EN_MUX 0x0002 370 + #define QSERDES_COM_CLKBUF_R_EN 0x0004 371 + #define QSERDES_COM_CLKBUF_L_EN 0x0008 372 + #define QSERDES_COM_EN_SYSCLK_TX_SEL 0x0010 373 + #define QSERDES_COM_CLKBUF_RX_DRIVE_L 0x0020 374 + #define QSERDES_COM_CLKBUF_RX_DRIVE_R 0x0040 375 + 376 + #define QSERDES_COM_CLK_ENABLE1 0x0038 377 + #define QSERDES_COM_SYS_CLK_CTRL 0x003C 378 + #define QSERDES_COM_SYSCLK_BUF_ENABLE 0x0040 379 + #define QSERDES_COM_PLL_EN 0x0044 380 + #define QSERDES_COM_PLL_IVCO 0x0048 381 + #define QSERDES_COM_CMN_IETRIM 0x004C 382 + #define QSERDES_COM_CMN_IPTRIM 0x0050 383 + 384 + #define QSERDES_COM_CP_CTRL_MODE0 0x0060 385 + #define QSERDES_COM_CP_CTRL_MODE1 0x0064 386 + #define QSERDES_COM_PLL_RCTRL_MODE0 0x0068 387 + #define QSERDES_COM_PLL_RCTRL_MODE1 0x006C 388 + #define QSERDES_COM_PLL_CCTRL_MODE0 0x0070 389 + #define QSERDES_COM_PLL_CCTRL_MODE1 0x0074 390 + #define QSERDES_COM_PLL_CNTRL 0x0078 391 + #define QSERDES_COM_BIAS_EN_CTRL_BY_PSM 0x007C 392 + #define QSERDES_COM_SYSCLK_EN_SEL 0x0080 393 + #define QSERDES_COM_CML_SYSCLK_SEL 0x0084 394 + #define QSERDES_COM_RESETSM_CNTRL 0x0088 395 + #define QSERDES_COM_RESETSM_CNTRL2 0x008C 396 + #define QSERDES_COM_LOCK_CMP_EN 0x0090 397 + #define QSERDES_COM_LOCK_CMP_CFG 0x0094 398 + #define QSERDES_COM_LOCK_CMP1_MODE0 0x0098 399 + #define QSERDES_COM_LOCK_CMP2_MODE0 0x009C 400 + #define QSERDES_COM_LOCK_CMP3_MODE0 0x00A0 401 + 402 + #define QSERDES_COM_DEC_START_MODE0 0x00B0 403 + #define QSERDES_COM_DEC_START_MODE1 0x00B4 404 + #define QSERDES_COM_DIV_FRAC_START1_MODE0 0x00B8 405 + #define QSERDES_COM_DIV_FRAC_START2_MODE0 0x00BC 406 + #define QSERDES_COM_DIV_FRAC_START3_MODE0 0x00C0 407 + #define QSERDES_COM_DIV_FRAC_START1_MODE1 0x00C4 408 + #define QSERDES_COM_DIV_FRAC_START2_MODE1 0x00C8 409 + #define QSERDES_COM_DIV_FRAC_START3_MODE1 0x00CC 410 + #define QSERDES_COM_INTEGLOOP_INITVAL 0x00D0 411 + #define QSERDES_COM_INTEGLOOP_EN 0x00D4 412 + #define QSERDES_COM_INTEGLOOP_GAIN0_MODE0 0x00D8 413 + #define QSERDES_COM_INTEGLOOP_GAIN1_MODE0 0x00DC 414 + #define QSERDES_COM_INTEGLOOP_GAIN0_MODE1 0x00E0 415 + #define QSERDES_COM_INTEGLOOP_GAIN1_MODE1 0x00E4 416 + #define QSERDES_COM_VCOCAL_DEADMAN_CTRL 0x00E8 417 + #define QSERDES_COM_VCO_TUNE_CTRL 0x00EC 418 + #define QSERDES_COM_VCO_TUNE_MAP 0x00F0 419 + 420 + #define QSERDES_COM_CMN_STATUS 0x0124 421 + #define QSERDES_COM_RESET_SM_STATUS 0x0128 422 + 423 + #define QSERDES_COM_CLK_SEL 0x0138 424 + #define QSERDES_COM_HSCLK_SEL 0x013C 425 + 426 + #define QSERDES_COM_CORECLK_DIV_MODE0 0x0148 427 + 428 + #define QSERDES_COM_SW_RESET 0x0150 429 + #define QSERDES_COM_CORE_CLK_EN 0x0154 430 + #define QSERDES_COM_C_READY_STATUS 0x0158 431 + #define QSERDES_COM_CMN_CONFIG 0x015C 432 + 433 + #define QSERDES_COM_SVS_MODE_CLK_SEL 0x0164 434 + 435 + /* DP MMSS_CC registers */ 436 + #define MMSS_DP_LINK_CMD_RCGR (0x0138) 437 + #define MMSS_DP_LINK_CFG_RCGR (0x013C) 438 + #define MMSS_DP_PIXEL_M (0x01B4) 439 + #define MMSS_DP_PIXEL_N (0x01B8) 440 + 441 + /* DP HDCP 1.3 registers */ 442 + #define DP_HDCP_CTRL (0x0A0) 443 + #define DP_HDCP_STATUS (0x0A4) 444 + #define DP_HDCP_SW_UPPER_AKSV (0x098) 445 + #define DP_HDCP_SW_LOWER_AKSV (0x09C) 446 + #define DP_HDCP_ENTROPY_CTRL0 (0x350) 447 + #define DP_HDCP_ENTROPY_CTRL1 (0x35C) 448 + #define DP_HDCP_SHA_STATUS (0x0C8) 449 + #define DP_HDCP_RCVPORT_DATA2_0 (0x0B0) 450 + #define DP_HDCP_RCVPORT_DATA3 (0x0A4) 451 + #define DP_HDCP_RCVPORT_DATA4 (0x0A8) 452 + #define DP_HDCP_RCVPORT_DATA5 (0x0C0) 453 + #define DP_HDCP_RCVPORT_DATA6 (0x0C4) 454 + 455 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_SHA_CTRL (0x024) 456 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_SHA_DATA (0x028) 457 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA0 (0x004) 458 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA1 (0x008) 459 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA7 (0x00C) 460 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA8 (0x010) 461 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA9 (0x014) 462 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA10 (0x018) 463 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA11 (0x01C) 464 + #define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA12 (0x020) 465 + 466 + /* USB3 DP COM registers */ 467 + #define REG_USB3_DP_COM_RESET_OVRD_CTRL (0x1C) 468 + #define USB3_DP_COM_OVRD_CTRL_SW_DPPHY_RESET (0x01) 469 + #define USB3_DP_COM_OVRD_CTRL_SW_DPPHY_RESET_MUX (0x02) 470 + #define USB3_DP_COM_OVRD_CTRL_SW_USB3PHY_RESET (0x04) 471 + #define USB3_DP_COM_OVRD_CTRL_SW_USB3PHY_RESET_MUX (0x08) 472 + 473 + #define REG_USB3_DP_COM_PHY_MODE_CTRL (0x00) 474 + #define USB3_DP_COM_PHY_MODE_DP (0x03) 475 + 476 + #define REG_USB3_DP_COM_SW_RESET (0x04) 477 + #define USB3_DP_COM_SW_RESET_SET (0x01) 478 + 479 + #define REG_USB3_DP_COM_TYPEC_CTRL (0x10) 480 + #define USB3_DP_COM_TYPEC_CTRL_PORTSEL (0x01) 481 + #define USB3_DP_COM_TYPEC_CTRL_PORTSEL_MUX (0x02) 482 + 483 + #define REG_USB3_DP_COM_SWI_CTRL (0x0c) 484 + 485 + #define REG_USB3_DP_COM_POWER_DOWN_CTRL (0x08) 486 + #define USB3_DP_COM_POWER_DOWN_CTRL_SW_PWRDN (0x01) 487 + 488 + 489 + 490 + #endif /* _DP_REG_H_ */
+2
drivers/gpu/drm/msm/msm_drv.c
··· 1352 1352 msm_dsi_register(); 1353 1353 msm_edp_register(); 1354 1354 msm_hdmi_register(); 1355 + msm_dp_register(); 1355 1356 adreno_register(); 1356 1357 return platform_driver_register(&msm_platform_driver); 1357 1358 } ··· 1361 1360 { 1362 1361 DBG("fini"); 1363 1362 platform_driver_unregister(&msm_platform_driver); 1363 + msm_dp_unregister(); 1364 1364 msm_hdmi_unregister(); 1365 1365 adreno_unregister(); 1366 1366 msm_edp_unregister();
+50 -3
drivers/gpu/drm/msm/msm_drv.h
··· 160 160 /* DSI is shared by mdp4 and mdp5 */ 161 161 struct msm_dsi *dsi[2]; 162 162 163 + struct msm_dp *dp; 164 + 163 165 /* when we have more than one 'msm_gpu' these need to be an array: */ 164 166 struct msm_gpu *gpu; 165 167 struct msm_file_private *lastctx; ··· 385 383 } 386 384 #endif 387 385 386 + #ifdef CONFIG_DRM_MSM_DP 387 + int __init msm_dp_register(void); 388 + void __exit msm_dp_unregister(void); 389 + int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev, 390 + struct drm_encoder *encoder); 391 + int msm_dp_display_enable(struct msm_dp *dp, struct drm_encoder *encoder); 392 + int msm_dp_display_disable(struct msm_dp *dp, struct drm_encoder *encoder); 393 + void msm_dp_display_mode_set(struct msm_dp *dp, struct drm_encoder *encoder, 394 + struct drm_display_mode *mode, 395 + struct drm_display_mode *adjusted_mode); 396 + 397 + #else 398 + static inline int __init msm_dp_register(void) 399 + { 400 + return -EINVAL; 401 + } 402 + static inline void __exit msm_dp_unregister(void) 403 + { 404 + } 405 + static inline int msm_dp_modeset_init(struct msm_dp *dp_display, 406 + struct drm_device *dev, 407 + struct drm_encoder *encoder) 408 + { 409 + return -EINVAL; 410 + } 411 + static inline int msm_dp_display_enable(struct msm_dp *dp, 412 + struct drm_encoder *encoder) 413 + { 414 + return -EINVAL; 415 + } 416 + static inline int msm_dp_display_disable(struct msm_dp *dp, 417 + struct drm_encoder *encoder) 418 + { 419 + return -EINVAL; 420 + } 421 + static inline void msm_dp_display_mode_set(struct msm_dp *dp, 422 + struct drm_encoder *encoder, 423 + struct drm_display_mode *mode, 424 + struct drm_display_mode *adjusted_mode) 425 + { 426 + } 427 + #endif 428 + 388 429 void __init msm_mdp_register(void); 389 430 void __exit msm_mdp_unregister(void); 390 431 void __init msm_dpu_register(void); ··· 448 403 #else 449 404 static inline int msm_debugfs_late_init(struct drm_device *dev) { return 0; } 450 405 __printf(3, 4) 451 - static inline void msm_rd_dump_submit(struct msm_rd_state *rd, struct msm_gem_submit *submit, 452 - const char *fmt, ...) {} 406 + static inline void msm_rd_dump_submit(struct msm_rd_state *rd, 407 + struct msm_gem_submit *submit, 408 + const char *fmt, ...) {} 453 409 static inline void msm_rd_debugfs_cleanup(struct msm_drm_private *priv) {} 454 410 static inline void msm_perf_debugfs_cleanup(struct msm_drm_private *priv) {} 455 411 #endif ··· 470 424 int msm_submitqueue_init(struct drm_device *drm, struct msm_file_private *ctx); 471 425 struct msm_gpu_submitqueue *msm_submitqueue_get(struct msm_file_private *ctx, 472 426 u32 id); 473 - int msm_submitqueue_create(struct drm_device *drm, struct msm_file_private *ctx, 427 + int msm_submitqueue_create(struct drm_device *drm, 428 + struct msm_file_private *ctx, 474 429 u32 prio, u32 flags, u32 *id); 475 430 int msm_submitqueue_query(struct drm_device *drm, struct msm_file_private *ctx, 476 431 struct drm_msm_submitqueue_query *args);