Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

drm/amd/display: Add dependant changes for DCN32/321

[Why&How]
This patch adds necessary changes needed in DC files outside DCN32/321
specific tree

v2: squash in updates (Alex)

Signed-off-by: Aurabindo Pillai <aurabindo.pillai@amd.com>
Acked-by: Alex Deucher <alexander.deucher@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>

authored by

Aurabindo Pillai and committed by
Alex Deucher
d3dfceb5 235c6763

+1521 -308
+2
drivers/gpu/drm/amd/display/dc/Makefile
··· 38 38 DC_LIBS += dcn31 39 39 DC_LIBS += dcn315 40 40 DC_LIBS += dcn316 41 + DC_LIBS += dcn32 42 + DC_LIBS += dcn321 41 43 endif 42 44 43 45 DC_LIBS += dce120
+758 -191
drivers/gpu/drm/amd/display/dc/bios/bios_parser2.c
··· 165 165 unsigned int count = 0; 166 166 unsigned int i; 167 167 168 - for (i = 0; i < bp->object_info_tbl.v1_4->number_of_path; i++) { 169 - if (bp->object_info_tbl.v1_4->display_path[i].encoderobjid != 0) 170 - count++; 168 + switch (bp->object_info_tbl.revision.minor) { 169 + default: 170 + case 4: 171 + for (i = 0; i < bp->object_info_tbl.v1_4->number_of_path; i++) 172 + if (bp->object_info_tbl.v1_4->display_path[i].encoderobjid != 0) 173 + count++; 174 + 175 + break; 176 + 177 + case 5: 178 + for (i = 0; i < bp->object_info_tbl.v1_5->number_of_path; i++) 179 + if (bp->object_info_tbl.v1_5->display_path[i].encoderobjid != 0) 180 + count++; 181 + 182 + break; 171 183 } 172 184 return count; 173 185 } ··· 194 182 struct object_info_table *tbl = &bp->object_info_tbl; 195 183 struct display_object_info_table_v1_4 *v1_4 = tbl->v1_4; 196 184 197 - if (v1_4->number_of_path > i) { 198 - /* If display_objid is generic object id, the encoderObj 199 - * /extencoderobjId should be 0 200 - */ 201 - if (v1_4->display_path[i].encoderobjid != 0 && 202 - v1_4->display_path[i].display_objid != 0) 203 - object_id = object_id_from_bios_object_id( 204 - v1_4->display_path[i].display_objid); 205 - } 185 + struct display_object_info_table_v1_5 *v1_5 = tbl->v1_5; 206 186 187 + switch (bp->object_info_tbl.revision.minor) { 188 + default: 189 + case 4: 190 + if (v1_4->number_of_path > i) { 191 + /* If display_objid is generic object id, the encoderObj 192 + * /extencoderobjId should be 0 193 + */ 194 + if (v1_4->display_path[i].encoderobjid != 0 && 195 + v1_4->display_path[i].display_objid != 0) 196 + object_id = object_id_from_bios_object_id( 197 + v1_4->display_path[i].display_objid); 198 + } 199 + break; 200 + 201 + case 5: 202 + if (v1_5->number_of_path > i) { 203 + /* If display_objid is generic object id, the encoderObjId 204 + * should be 0 205 + */ 206 + if (v1_5->display_path[i].encoderobjid != 0 && 207 + v1_5->display_path[i].display_objid != 0) 208 + object_id = object_id_from_bios_object_id( 209 + v1_5->display_path[i].display_objid); 210 + } 211 + break; 212 + } 207 213 return object_id; 208 214 } 209 215 ··· 231 201 { 232 202 struct bios_parser *bp = BP_FROM_DCB(dcb); 233 203 unsigned int i; 234 - enum bp_result bp_result = BP_RESULT_BADINPUT; 235 - struct graphics_object_id obj_id = {0}; 204 + enum bp_result bp_result = BP_RESULT_BADINPUT; 205 + struct graphics_object_id obj_id = { 0 }; 236 206 struct object_info_table *tbl = &bp->object_info_tbl; 237 207 238 208 if (!src_object_id) ··· 247 217 * If found in for loop, should break. 248 218 * DAL2 implementation may be changed too 249 219 */ 250 - for (i = 0; i < tbl->v1_4->number_of_path; i++) { 251 - obj_id = object_id_from_bios_object_id( 252 - tbl->v1_4->display_path[i].encoderobjid); 253 - if (object_id.type == obj_id.type && 254 - object_id.id == obj_id.id && 255 - object_id.enum_id == 256 - obj_id.enum_id) { 257 - *src_object_id = 258 - object_id_from_bios_object_id(0x1100); 259 - /* break; */ 220 + switch (bp->object_info_tbl.revision.minor) { 221 + default: 222 + case 4: 223 + for (i = 0; i < tbl->v1_4->number_of_path; i++) { 224 + obj_id = object_id_from_bios_object_id( 225 + tbl->v1_4->display_path[i].encoderobjid); 226 + if (object_id.type == obj_id.type && 227 + object_id.id == obj_id.id && 228 + object_id.enum_id == obj_id.enum_id) { 229 + *src_object_id = 230 + object_id_from_bios_object_id( 231 + 0x1100); 232 + /* break; */ 233 + } 260 234 } 235 + bp_result = BP_RESULT_OK; 236 + break; 237 + 238 + case 5: 239 + for (i = 0; i < tbl->v1_5->number_of_path; i++) { 240 + obj_id = object_id_from_bios_object_id( 241 + tbl->v1_5->display_path[i].encoderobjid); 242 + if (object_id.type == obj_id.type && 243 + object_id.id == obj_id.id && 244 + object_id.enum_id == obj_id.enum_id) { 245 + *src_object_id = 246 + object_id_from_bios_object_id( 247 + 0x1100); 248 + /* break; */ 249 + } 250 + } 251 + bp_result = BP_RESULT_OK; 252 + break; 261 253 } 262 - bp_result = BP_RESULT_OK; 263 254 break; 264 255 case OBJECT_TYPE_CONNECTOR: 265 - for (i = 0; i < tbl->v1_4->number_of_path; i++) { 266 - obj_id = object_id_from_bios_object_id( 267 - tbl->v1_4->display_path[i].display_objid); 256 + switch (bp->object_info_tbl.revision.minor) { 257 + default: 258 + case 4: 259 + for (i = 0; i < tbl->v1_4->number_of_path; i++) { 260 + obj_id = object_id_from_bios_object_id( 261 + tbl->v1_4->display_path[i] 262 + .display_objid); 268 263 269 - if (object_id.type == obj_id.type && 270 - object_id.id == obj_id.id && 271 - object_id.enum_id == obj_id.enum_id) { 272 - *src_object_id = 273 - object_id_from_bios_object_id( 274 - tbl->v1_4->display_path[i].encoderobjid); 275 - /* break; */ 264 + if (object_id.type == obj_id.type && 265 + object_id.id == obj_id.id && 266 + object_id.enum_id == obj_id.enum_id) { 267 + *src_object_id = 268 + object_id_from_bios_object_id( 269 + tbl->v1_4 270 + ->display_path[i] 271 + .encoderobjid); 272 + /* break; */ 273 + } 276 274 } 275 + bp_result = BP_RESULT_OK; 276 + break; 277 277 } 278 278 bp_result = BP_RESULT_OK; 279 279 break; 280 + case 5: 281 + for (i = 0; i < tbl->v1_5->number_of_path; i++) { 282 + obj_id = object_id_from_bios_object_id( 283 + tbl->v1_5->display_path[i].display_objid); 284 + 285 + if (object_id.type == obj_id.type && 286 + object_id.id == obj_id.id && 287 + object_id.enum_id == obj_id.enum_id) { 288 + *src_object_id = object_id_from_bios_object_id( 289 + tbl->v1_5->display_path[i].encoderobjid); 290 + /* break; */ 291 + } 292 + } 293 + bp_result = BP_RESULT_OK; 294 + break; 295 + 280 296 default: 297 + bp_result = BP_RESULT_OK; 281 298 break; 282 299 } 283 300 ··· 367 290 } 368 291 } 369 292 293 + /* from graphics_object_id, find display path which includes the object_id */ 294 + static struct atom_display_object_path_v3 *get_bios_object_from_path_v3( 295 + struct bios_parser *bp, 296 + struct graphics_object_id id) 297 + { 298 + unsigned int i; 299 + struct graphics_object_id obj_id = {0}; 300 + 301 + switch (id.type) { 302 + case OBJECT_TYPE_ENCODER: 303 + for (i = 0; i < bp->object_info_tbl.v1_5->number_of_path; i++) { 304 + obj_id = object_id_from_bios_object_id( 305 + bp->object_info_tbl.v1_5->display_path[i].encoderobjid); 306 + if (id.type == obj_id.type && id.id == obj_id.id 307 + && id.enum_id == obj_id.enum_id) 308 + return &bp->object_info_tbl.v1_5->display_path[i]; 309 + } 310 + break; 311 + 312 + case OBJECT_TYPE_CONNECTOR: 313 + case OBJECT_TYPE_GENERIC: 314 + /* Both Generic and Connector Object ID 315 + * will be stored on display_objid 316 + */ 317 + for (i = 0; i < bp->object_info_tbl.v1_5->number_of_path; i++) { 318 + obj_id = object_id_from_bios_object_id( 319 + bp->object_info_tbl.v1_5->display_path[i].display_objid); 320 + if (id.type == obj_id.type && id.id == obj_id.id 321 + && id.enum_id == obj_id.enum_id) 322 + return &bp->object_info_tbl.v1_5->display_path[i]; 323 + } 324 + break; 325 + 326 + default: 327 + return NULL; 328 + } 329 + 330 + return NULL; 331 + } 332 + 370 333 static enum bp_result bios_parser_get_i2c_info(struct dc_bios *dcb, 371 334 struct graphics_object_id id, 372 335 struct graphics_object_i2c_info *info) 373 336 { 374 337 uint32_t offset; 375 338 struct atom_display_object_path_v2 *object; 339 + 340 + struct atom_display_object_path_v3 *object_path_v3; 341 + 376 342 struct atom_common_record_header *header; 377 343 struct atom_i2c_record *record; 378 344 struct atom_i2c_record dummy_record = {0}; ··· 433 313 return BP_RESULT_NORECORD; 434 314 } 435 315 436 - object = get_bios_object(bp, id); 316 + switch (bp->object_info_tbl.revision.minor) { 317 + case 4: 318 + default: 319 + object = get_bios_object(bp, id); 437 320 438 - if (!object) 439 - return BP_RESULT_BADINPUT; 321 + if (!object) 322 + return BP_RESULT_BADINPUT; 440 323 441 - offset = object->disp_recordoffset + bp->object_info_tbl_offset; 324 + offset = object->disp_recordoffset + bp->object_info_tbl_offset; 325 + break; 326 + case 5: 327 + object_path_v3 = get_bios_object_from_path_v3(bp, id); 328 + 329 + if (!object_path_v3) 330 + return BP_RESULT_BADINPUT; 331 + 332 + offset = object_path_v3->disp_recordoffset + bp->object_info_tbl_offset; 333 + break; 334 + } 442 335 443 336 for (;;) { 444 337 header = GET_IMAGE(struct atom_common_record_header, offset); ··· 554 421 return BP_RESULT_OK; 555 422 } 556 423 424 + static struct atom_hpd_int_record *get_hpd_record_for_path_v3( 425 + struct bios_parser *bp, 426 + struct atom_display_object_path_v3 *object) 427 + { 428 + struct atom_common_record_header *header; 429 + uint32_t offset; 430 + 431 + if (!object) { 432 + BREAK_TO_DEBUGGER(); /* Invalid object */ 433 + return NULL; 434 + } 435 + 436 + offset = object->disp_recordoffset + bp->object_info_tbl_offset; 437 + 438 + for (;;) { 439 + header = GET_IMAGE(struct atom_common_record_header, offset); 440 + 441 + if (!header) 442 + return NULL; 443 + 444 + if (header->record_type == ATOM_RECORD_END_TYPE || 445 + !header->record_size) 446 + break; 447 + 448 + if (header->record_type == ATOM_HPD_INT_RECORD_TYPE 449 + && sizeof(struct atom_hpd_int_record) <= 450 + header->record_size) 451 + return (struct atom_hpd_int_record *) header; 452 + 453 + offset += header->record_size; 454 + } 455 + 456 + return NULL; 457 + } 458 + 557 459 static enum bp_result bios_parser_get_hpd_info( 558 460 struct dc_bios *dcb, 559 461 struct graphics_object_id id, ··· 596 428 { 597 429 struct bios_parser *bp = BP_FROM_DCB(dcb); 598 430 struct atom_display_object_path_v2 *object; 431 + struct atom_display_object_path_v3 *object_path_v3; 599 432 struct atom_hpd_int_record *record = NULL; 600 433 601 434 if (!info) 602 435 return BP_RESULT_BADINPUT; 603 436 604 - object = get_bios_object(bp, id); 437 + switch (bp->object_info_tbl.revision.minor) { 438 + case 4: 439 + default: 440 + object = get_bios_object(bp, id); 605 441 606 - if (!object) 607 - return BP_RESULT_BADINPUT; 442 + if (!object) 443 + return BP_RESULT_BADINPUT; 608 444 609 - record = get_hpd_record(bp, object); 445 + record = get_hpd_record(bp, object); 446 + 447 + break; 448 + case 5: 449 + object_path_v3 = get_bios_object_from_path_v3(bp, id); 450 + 451 + if (!object_path_v3) 452 + return BP_RESULT_BADINPUT; 453 + 454 + record = get_hpd_record_for_path_v3(bp, object_path_v3); 455 + break; 456 + } 610 457 611 458 if (record != NULL) { 612 459 info->hpd_int_gpio_uid = record->pin_id; ··· 709 526 return BP_RESULT_UNSUPPORTED; 710 527 711 528 /* Temporary hard code gpio pin info */ 712 - #if defined(FOR_SIMNOW_BOOT) 713 - { 714 - struct atom_gpio_pin_assignment gpio_pin[8] = { 715 - {0x5db5, 0, 0, 1, 0}, 716 - {0x5db5, 8, 8, 2, 0}, 717 - {0x5db5, 0x10, 0x10, 3, 0}, 718 - {0x5db5, 0x18, 0x14, 4, 0}, 719 - {0x5db5, 0x1A, 0x18, 5, 0}, 720 - {0x5db5, 0x1C, 0x1C, 6, 0}, 721 - }; 722 - 723 - count = 6; 724 - memmove(header->gpio_pin, gpio_pin, sizeof(gpio_pin)); 725 - } 726 - #else 727 529 count = (le16_to_cpu(header->table_header.structuresize) 728 530 - sizeof(struct atom_common_table_header)) 729 531 / sizeof(struct atom_gpio_pin_assignment); 730 - #endif 731 532 for (i = 0; i < count; ++i) { 732 533 if (header->gpio_pin[i].gpio_id != gpio_id) 733 534 continue; ··· 800 633 struct bios_parser *bp = BP_FROM_DCB(dcb); 801 634 struct atom_display_object_path_v2 *object; 802 635 636 + struct atom_display_object_path_v3 *object_path_v3; 637 + 638 + 803 639 if (!info) 804 640 return BP_RESULT_BADINPUT; 805 641 806 - /* getBiosObject will return MXM object */ 807 - object = get_bios_object(bp, connector_object_id); 642 + switch (bp->object_info_tbl.revision.minor) { 643 + case 4: 644 + default: 645 + /* getBiosObject will return MXM object */ 646 + object = get_bios_object(bp, connector_object_id); 808 647 809 - if (!object) { 810 - BREAK_TO_DEBUGGER(); /* Invalid object id */ 811 - return BP_RESULT_BADINPUT; 648 + if (!object) { 649 + BREAK_TO_DEBUGGER(); /* Invalid object id */ 650 + return BP_RESULT_BADINPUT; 651 + } 652 + 653 + info->acpi_device = 0; /* BIOS no longer provides this */ 654 + info->dev_id = device_type_from_device_id(object->device_tag); 655 + break; 656 + case 5: 657 + object_path_v3 = get_bios_object_from_path_v3(bp, connector_object_id); 658 + 659 + if (!object_path_v3) { 660 + BREAK_TO_DEBUGGER(); /* Invalid object id */ 661 + return BP_RESULT_BADINPUT; 662 + } 663 + info->acpi_device = 0; /* BIOS no longer provides this */ 664 + info->dev_id = device_type_from_device_id(object_path_v3->device_tag); 665 + break; 812 666 } 813 - 814 - info->acpi_device = 0; /* BIOS no longer provides this */ 815 - info->dev_id = device_type_from_device_id(object->device_tag); 816 667 817 668 return BP_RESULT_OK; 818 669 } ··· 988 803 return result; 989 804 } 990 805 806 + static enum bp_result get_ss_info_v4_5( 807 + struct bios_parser *bp, 808 + uint32_t id, 809 + uint32_t index, 810 + struct spread_spectrum_info *ss_info) 811 + { 812 + enum bp_result result = BP_RESULT_OK; 813 + struct atom_display_controller_info_v4_5 *disp_cntl_tbl = NULL; 814 + 815 + if (!ss_info) 816 + return BP_RESULT_BADINPUT; 817 + 818 + if (!DATA_TABLES(dce_info)) 819 + return BP_RESULT_BADBIOSTABLE; 820 + 821 + disp_cntl_tbl = GET_IMAGE(struct atom_display_controller_info_v4_5, 822 + DATA_TABLES(dce_info)); 823 + if (!disp_cntl_tbl) 824 + return BP_RESULT_BADBIOSTABLE; 825 + 826 + ss_info->type.STEP_AND_DELAY_INFO = false; 827 + ss_info->spread_percentage_divider = 1000; 828 + /* BIOS no longer uses target clock. Always enable for now */ 829 + ss_info->target_clock_range = 0xffffffff; 830 + 831 + switch (id) { 832 + case AS_SIGNAL_TYPE_DVI: 833 + ss_info->spread_spectrum_percentage = 834 + disp_cntl_tbl->dvi_ss_percentage; 835 + ss_info->spread_spectrum_range = 836 + disp_cntl_tbl->dvi_ss_rate_10hz * 10; 837 + if (disp_cntl_tbl->dvi_ss_mode & ATOM_SS_CENTRE_SPREAD_MODE) 838 + ss_info->type.CENTER_MODE = true; 839 + break; 840 + case AS_SIGNAL_TYPE_HDMI: 841 + ss_info->spread_spectrum_percentage = 842 + disp_cntl_tbl->hdmi_ss_percentage; 843 + ss_info->spread_spectrum_range = 844 + disp_cntl_tbl->hdmi_ss_rate_10hz * 10; 845 + if (disp_cntl_tbl->hdmi_ss_mode & ATOM_SS_CENTRE_SPREAD_MODE) 846 + ss_info->type.CENTER_MODE = true; 847 + break; 848 + case AS_SIGNAL_TYPE_DISPLAY_PORT: 849 + ss_info->spread_spectrum_percentage = 850 + disp_cntl_tbl->dp_ss_percentage; 851 + ss_info->spread_spectrum_range = 852 + disp_cntl_tbl->dp_ss_rate_10hz * 10; 853 + if (disp_cntl_tbl->dp_ss_mode & ATOM_SS_CENTRE_SPREAD_MODE) 854 + ss_info->type.CENTER_MODE = true; 855 + break; 856 + case AS_SIGNAL_TYPE_GPU_PLL: 857 + /* atom_smu_info_v4_0 does not have fields for SS for SMU Display PLL anymore. 858 + * SMU Display PLL supposed to be without spread. 859 + * Better place for it would be in atom_display_controller_info_v4_5 table. 860 + */ 861 + result = BP_RESULT_UNSUPPORTED; 862 + break; 863 + default: 864 + result = BP_RESULT_UNSUPPORTED; 865 + break; 866 + } 867 + 868 + return result; 869 + } 870 + 991 871 /** 992 872 * bios_parser_get_spread_spectrum_info 993 873 * Get spread spectrum information from the ASIC_InternalSS_Info(ver 2.1 or ··· 1097 847 case 3: 1098 848 case 4: 1099 849 return get_ss_info_v4_2(bp, signal, index, ss_info); 850 + case 5: 851 + return get_ss_info_v4_5(bp, signal, index, ss_info); 852 + 1100 853 default: 1101 854 ASSERT(0); 1102 855 break; ··· 1140 887 return result; 1141 888 } 1142 889 890 + static enum bp_result get_soc_bb_info_v4_5( 891 + struct bios_parser *bp, 892 + struct bp_soc_bb_info *soc_bb_info) 893 + { 894 + enum bp_result result = BP_RESULT_OK; 895 + struct atom_display_controller_info_v4_5 *disp_cntl_tbl = NULL; 896 + 897 + if (!soc_bb_info) 898 + return BP_RESULT_BADINPUT; 899 + 900 + if (!DATA_TABLES(dce_info)) 901 + return BP_RESULT_BADBIOSTABLE; 902 + 903 + disp_cntl_tbl = GET_IMAGE(struct atom_display_controller_info_v4_5, 904 + DATA_TABLES(dce_info)); 905 + if (!disp_cntl_tbl) 906 + return BP_RESULT_BADBIOSTABLE; 907 + 908 + soc_bb_info->dram_clock_change_latency_100ns = disp_cntl_tbl->max_mclk_chg_lat; 909 + soc_bb_info->dram_sr_enter_exit_latency_100ns = disp_cntl_tbl->max_sr_enter_exit_lat; 910 + soc_bb_info->dram_sr_exit_latency_100ns = disp_cntl_tbl->max_sr_exit_lat; 911 + 912 + return result; 913 + } 914 + 1143 915 static enum bp_result bios_parser_get_soc_bb_info( 1144 916 struct dc_bios *dcb, 1145 917 struct bp_soc_bb_info *soc_bb_info) ··· 1193 915 break; 1194 916 case 4: 1195 917 result = get_soc_bb_info_v4_4(bp, soc_bb_info); 918 + break; 919 + case 5: 920 + result = get_soc_bb_info_v4_5(bp, soc_bb_info); 1196 921 break; 1197 922 default: 1198 923 break; ··· 1304 1023 return result; 1305 1024 } 1306 1025 1026 + static enum bp_result get_disp_caps_v4_5( 1027 + struct bios_parser *bp, 1028 + uint8_t *dce_caps) 1029 + { 1030 + enum bp_result result = BP_RESULT_OK; 1031 + struct atom_display_controller_info_v4_5 *disp_cntl_tbl = NULL; 1032 + 1033 + if (!dce_caps) 1034 + return BP_RESULT_BADINPUT; 1035 + 1036 + if (!DATA_TABLES(dce_info)) 1037 + return BP_RESULT_BADBIOSTABLE; 1038 + 1039 + disp_cntl_tbl = GET_IMAGE(struct atom_display_controller_info_v4_5, 1040 + DATA_TABLES(dce_info)); 1041 + 1042 + if (!disp_cntl_tbl) 1043 + return BP_RESULT_BADBIOSTABLE; 1044 + 1045 + *dce_caps = disp_cntl_tbl->display_caps; 1046 + 1047 + return result; 1048 + } 1049 + 1307 1050 static enum bp_result bios_parser_get_lttpr_interop( 1308 1051 struct dc_bios *dcb, 1309 1052 uint8_t *dce_caps) ··· 1362 1057 result = get_disp_caps_v4_4(bp, dce_caps); 1363 1058 *dce_caps = !!(*dce_caps & DCE_INFO_CAPS_VBIOS_LTTPR_TRANSPARENT_ENABLE); 1364 1059 break; 1060 + case 5: 1061 + result = get_disp_caps_v4_5(bp, dce_caps); 1062 + *dce_caps = !!(*dce_caps & DCE_INFO_CAPS_VBIOS_LTTPR_TRANSPARENT_ENABLE); 1063 + break; 1064 + 1365 1065 default: 1366 1066 break; 1367 1067 } ··· 1412 1102 result = get_disp_caps_v4_4(bp, dce_caps); 1413 1103 *dce_caps = !!(*dce_caps & DCE_INFO_CAPS_LTTPR_SUPPORT_ENABLE); 1414 1104 break; 1105 + case 5: 1106 + result = get_disp_caps_v4_5(bp, dce_caps); 1107 + *dce_caps = !!(*dce_caps & DCE_INFO_CAPS_LTTPR_SUPPORT_ENABLE); 1108 + 1415 1109 default: 1416 1110 break; 1417 1111 } ··· 1532 1218 default: 1533 1219 break; 1534 1220 } 1535 - break; 1536 1221 default: 1537 1222 break; 1538 1223 } ··· 1587 1274 1588 1275 uint32_t mask = get_support_mask_for_device_id(id); 1589 1276 1590 - return (le16_to_cpu(bp->object_info_tbl.v1_4->supporteddevices) & 1591 - mask) != 0; 1277 + switch (bp->object_info_tbl.revision.minor) { 1278 + case 4: 1279 + default: 1280 + return (le16_to_cpu(bp->object_info_tbl.v1_4->supporteddevices) & mask) != 0; 1281 + break; 1282 + case 5: 1283 + return (le16_to_cpu(bp->object_info_tbl.v1_5->supporteddevices) & mask) != 0; 1284 + break; 1285 + } 1286 + 1287 + return false; 1592 1288 } 1593 1289 1594 1290 static uint32_t bios_parser_get_ss_entry_number( ··· 1730 1408 bios_set_scratch_critical_state(dcb, state); 1731 1409 } 1732 1410 1411 + struct atom_dig_transmitter_info_header_v5_3 { 1412 + struct atom_common_table_header table_header; 1413 + uint16_t dpphy_hdmi_settings_offset; 1414 + uint16_t dpphy_dvi_settings_offset; 1415 + uint16_t dpphy_dp_setting_table_offset; 1416 + uint16_t uniphy_xbar_settings_v2_table_offset; 1417 + uint16_t dpphy_internal_reg_overide_offset; 1418 + }; 1419 + 1733 1420 static enum bp_result bios_parser_get_firmware_info( 1734 1421 struct dc_bios *dcb, 1735 1422 struct dc_firmware_info *info) 1736 1423 { 1737 1424 struct bios_parser *bp = BP_FROM_DCB(dcb); 1738 - enum bp_result result = BP_RESULT_BADBIOSTABLE; 1425 + static enum bp_result result = BP_RESULT_BADBIOSTABLE; 1739 1426 struct atom_common_table_header *header; 1740 1427 1741 1428 struct atom_data_revision revision; ··· 1921 1590 struct atom_data_revision revision; 1922 1591 struct atom_display_controller_info_v4_1 *dce_info_v4_1 = NULL; 1923 1592 struct atom_display_controller_info_v4_4 *dce_info_v4_4 = NULL; 1593 + 1594 + struct atom_smu_info_v3_5 *smu_info_v3_5 = NULL; 1595 + struct atom_display_controller_info_v4_5 *dce_info_v4_5 = NULL; 1596 + struct atom_smu_info_v4_0 *smu_info_v4_0 = NULL; 1597 + 1924 1598 if (!info) 1925 1599 return BP_RESULT_BADINPUT; 1926 1600 ··· 1945 1609 switch (revision.major) { 1946 1610 case 4: 1947 1611 switch (revision.minor) { 1612 + case 5: 1613 + dce_info_v4_5 = GET_IMAGE(struct atom_display_controller_info_v4_5, 1614 + DATA_TABLES(dce_info)); 1615 + 1616 + if (!dce_info_v4_5) 1617 + return BP_RESULT_BADBIOSTABLE; 1618 + 1619 + /* 100MHz expected */ 1620 + info->pll_info.crystal_frequency = dce_info_v4_5->dce_refclk_10khz * 10; 1621 + info->dp_phy_ref_clk = dce_info_v4_5->dpphy_refclk_10khz * 10; 1622 + /* 50MHz expected */ 1623 + info->i2c_engine_ref_clk = dce_info_v4_5->i2c_engine_refclk_10khz * 10; 1624 + 1625 + /* For DCN32/321 Display PLL VCO Frequency from dce_info_v4_5 may not be reliable */ 1626 + break; 1627 + 1948 1628 case 4: 1949 1629 dce_info_v4_4 = GET_IMAGE(struct atom_display_controller_info_v4_4, 1950 1630 DATA_TABLES(dce_info)); ··· 2002 1650 DATA_TABLES(smu_info)); 2003 1651 get_atom_data_table_revision(header, &revision); 2004 1652 1653 + switch (revision.major) { 1654 + case 3: 1655 + switch (revision.minor) { 1656 + case 5: 1657 + smu_info_v3_5 = GET_IMAGE(struct atom_smu_info_v3_5, 1658 + DATA_TABLES(smu_info)); 1659 + 1660 + if (!smu_info_v3_5) 1661 + return BP_RESULT_BADBIOSTABLE; 1662 + 1663 + info->default_engine_clk = smu_info_v3_5->bootup_dcefclk_10khz * 10; 1664 + break; 1665 + 1666 + default: 1667 + break; 1668 + } 1669 + break; 1670 + 1671 + case 4: 1672 + switch (revision.minor) { 1673 + case 0: 1674 + smu_info_v4_0 = GET_IMAGE(struct atom_smu_info_v4_0, 1675 + DATA_TABLES(smu_info)); 1676 + 1677 + if (!smu_info_v4_0) 1678 + return BP_RESULT_BADBIOSTABLE; 1679 + 1680 + /* For DCN32/321 bootup DCFCLK from smu_info_v4_0 may not be reliable */ 1681 + break; 1682 + 1683 + default: 1684 + break; 1685 + } 1686 + break; 1687 + 1688 + default: 1689 + break; 1690 + } 1691 + 2005 1692 // We need to convert from 10KHz units into KHz units. 2006 1693 info->default_memory_clk = firmware_info->bootup_mclk_in10khz * 10; 2007 1694 ··· 2065 1674 2066 1675 if (!info) 2067 1676 return BP_RESULT_BADINPUT; 1677 + 1678 + #if defined(CONFIG_DRM_AMD_DC_DCN) 1679 + /* encoder cap record not available in v1_5 */ 1680 + if (bp->object_info_tbl.revision.minor == 5) 1681 + return BP_RESULT_NORECORD; 1682 + #endif 2068 1683 2069 1684 object = get_bios_object(bp, object_id); 2070 1685 ··· 2178 1781 return NULL; 2179 1782 } 2180 1783 1784 + static struct atom_connector_caps_record *get_connector_caps_record( 1785 + struct bios_parser *bp, 1786 + struct atom_display_object_path_v3 *object) 1787 + { 1788 + struct atom_common_record_header *header; 1789 + uint32_t offset; 1790 + 1791 + if (!object) { 1792 + BREAK_TO_DEBUGGER(); /* Invalid object */ 1793 + return NULL; 1794 + } 1795 + 1796 + offset = object->disp_recordoffset + bp->object_info_tbl_offset; 1797 + 1798 + for (;;) { 1799 + header = GET_IMAGE(struct atom_common_record_header, offset); 1800 + 1801 + if (!header) 1802 + return NULL; 1803 + 1804 + offset += header->record_size; 1805 + 1806 + if (header->record_type == ATOM_RECORD_END_TYPE || 1807 + !header->record_size) 1808 + break; 1809 + 1810 + if (header->record_type != ATOM_CONNECTOR_CAP_RECORD_TYPE) 1811 + continue; 1812 + 1813 + if (sizeof(struct atom_connector_caps_record) <= header->record_size) 1814 + return (struct atom_connector_caps_record *)header; 1815 + } 1816 + 1817 + return NULL; 1818 + } 1819 + 2181 1820 static enum bp_result bios_parser_get_disp_connector_caps_info( 2182 1821 struct dc_bios *dcb, 2183 1822 struct graphics_object_id object_id, ··· 2221 1788 { 2222 1789 struct bios_parser *bp = BP_FROM_DCB(dcb); 2223 1790 struct atom_display_object_path_v2 *object; 1791 + 1792 + struct atom_display_object_path_v3 *object_path_v3; 1793 + struct atom_connector_caps_record *record_path_v3; 1794 + 2224 1795 struct atom_disp_connector_caps_record *record = NULL; 2225 1796 2226 1797 if (!info) 2227 1798 return BP_RESULT_BADINPUT; 2228 1799 2229 - object = get_bios_object(bp, object_id); 1800 + switch (bp->object_info_tbl.revision.minor) { 1801 + case 4: 1802 + default: 1803 + object = get_bios_object(bp, object_id); 2230 1804 2231 - if (!object) 1805 + if (!object) 1806 + return BP_RESULT_BADINPUT; 1807 + 1808 + record = get_disp_connector_caps_record(bp, object); 1809 + if (!record) 1810 + return BP_RESULT_NORECORD; 1811 + 1812 + info->INTERNAL_DISPLAY = 1813 + (record->connectcaps & ATOM_CONNECTOR_CAP_INTERNAL_DISPLAY) ? 1 : 0; 1814 + info->INTERNAL_DISPLAY_BL = 1815 + (record->connectcaps & ATOM_CONNECTOR_CAP_INTERNAL_DISPLAY_BL) ? 1 : 0; 1816 + break; 1817 + case 5: 1818 + object_path_v3 = get_bios_object_from_path_v3(bp, object_id); 1819 + 1820 + if (!object_path_v3) 1821 + return BP_RESULT_BADINPUT; 1822 + 1823 + record_path_v3 = get_connector_caps_record(bp, object_path_v3); 1824 + if (!record_path_v3) 1825 + return BP_RESULT_NORECORD; 1826 + 1827 + info->INTERNAL_DISPLAY = (record_path_v3->connector_caps & ATOM_CONNECTOR_CAP_INTERNAL_DISPLAY) 1828 + ? 1 : 0; 1829 + info->INTERNAL_DISPLAY_BL = (record_path_v3->connector_caps & ATOM_CONNECTOR_CAP_INTERNAL_DISPLAY_BL) 1830 + ? 1 : 0; 1831 + break; 1832 + } 1833 + 1834 + return BP_RESULT_OK; 1835 + } 1836 + 1837 + static struct atom_connector_speed_record *get_connector_speed_cap_record( 1838 + struct bios_parser *bp, 1839 + struct atom_display_object_path_v3 *object) 1840 + { 1841 + struct atom_common_record_header *header; 1842 + uint32_t offset; 1843 + 1844 + if (!object) { 1845 + BREAK_TO_DEBUGGER(); /* Invalid object */ 1846 + return NULL; 1847 + } 1848 + 1849 + offset = object->disp_recordoffset + bp->object_info_tbl_offset; 1850 + 1851 + for (;;) { 1852 + header = GET_IMAGE(struct atom_common_record_header, offset); 1853 + 1854 + if (!header) 1855 + return NULL; 1856 + 1857 + offset += header->record_size; 1858 + 1859 + if (header->record_type == ATOM_RECORD_END_TYPE || 1860 + !header->record_size) 1861 + break; 1862 + 1863 + if (header->record_type != ATOM_CONNECTOR_SPEED_UPTO) 1864 + continue; 1865 + 1866 + if (sizeof(struct atom_connector_speed_record) <= header->record_size) 1867 + return (struct atom_connector_speed_record *)header; 1868 + } 1869 + 1870 + return NULL; 1871 + } 1872 + 1873 + static enum bp_result bios_parser_get_connector_speed_cap_info( 1874 + struct dc_bios *dcb, 1875 + struct graphics_object_id object_id, 1876 + struct bp_connector_speed_cap_info *info) 1877 + { 1878 + struct bios_parser *bp = BP_FROM_DCB(dcb); 1879 + struct atom_display_object_path_v3 *object_path_v3; 1880 + //struct atom_connector_speed_record *record = NULL; 1881 + struct atom_connector_speed_record *record; 1882 + 1883 + if (!info) 2232 1884 return BP_RESULT_BADINPUT; 2233 1885 2234 - record = get_disp_connector_caps_record(bp, object); 1886 + object_path_v3 = get_bios_object_from_path_v3(bp, object_id); 1887 + 1888 + if (!object_path_v3) 1889 + return BP_RESULT_BADINPUT; 1890 + 1891 + record = get_connector_speed_cap_record(bp, object_path_v3); 2235 1892 if (!record) 2236 1893 return BP_RESULT_NORECORD; 2237 1894 2238 - info->INTERNAL_DISPLAY = (record->connectcaps & ATOM_CONNECTOR_CAP_INTERNAL_DISPLAY) 2239 - ? 1 : 0; 2240 - info->INTERNAL_DISPLAY_BL = (record->connectcaps & ATOM_CONNECTOR_CAP_INTERNAL_DISPLAY_BL) 2241 - ? 1 : 0; 2242 - 1895 + info->DP_HBR2_EN = (record->connector_max_speed >= 5400) ? 1 : 0; 1896 + info->DP_HBR3_EN = (record->connector_max_speed >= 8100) ? 1 : 0; 1897 + info->HDMI_6GB_EN = (record->connector_max_speed >= 5940) ? 1 : 0; 1898 + info->DP_UHBR10_EN = (record->connector_max_speed >= 10000) ? 1 : 0; 1899 + info->DP_UHBR13_5_EN = (record->connector_max_speed >= 13500) ? 1 : 0; 1900 + info->DP_UHBR20_EN = (record->connector_max_speed >= 20000) ? 1 : 0; 2243 1901 return BP_RESULT_OK; 2244 1902 } 2245 1903 ··· 2339 1815 struct dc_vram_info *info) 2340 1816 { 2341 1817 struct atom_vram_info_header_v2_3 *info_v23; 2342 - enum bp_result result = BP_RESULT_OK; 1818 + static enum bp_result result = BP_RESULT_OK; 2343 1819 2344 1820 info_v23 = GET_IMAGE(struct atom_vram_info_header_v2_3, 2345 1821 DATA_TABLES(vram_info)); ··· 2358 1834 struct dc_vram_info *info) 2359 1835 { 2360 1836 struct atom_vram_info_header_v2_4 *info_v24; 2361 - enum bp_result result = BP_RESULT_OK; 1837 + static enum bp_result result = BP_RESULT_OK; 2362 1838 2363 1839 info_v24 = GET_IMAGE(struct atom_vram_info_header_v2_4, 2364 1840 DATA_TABLES(vram_info)); ··· 2377 1853 struct dc_vram_info *info) 2378 1854 { 2379 1855 struct atom_vram_info_header_v2_5 *info_v25; 2380 - enum bp_result result = BP_RESULT_OK; 1856 + static enum bp_result result = BP_RESULT_OK; 2381 1857 2382 1858 info_v25 = GET_IMAGE(struct atom_vram_info_header_v2_5, 2383 1859 DATA_TABLES(vram_info)); ··· 2402 1878 * integrated_info *info - [out] store and output integrated info 2403 1879 * 2404 1880 * @return 2405 - * enum bp_result - BP_RESULT_OK if information is available, 1881 + * static enum bp_result - BP_RESULT_OK if information is available, 2406 1882 * BP_RESULT_BADBIOSTABLE otherwise. 2407 1883 */ 2408 1884 static enum bp_result get_integrated_info_v11( ··· 2893 2369 * integrated_info *info - [out] store and output integrated info 2894 2370 * 2895 2371 * @return 2896 - * enum bp_result - BP_RESULT_OK if information is available, 2372 + * static enum bp_result - BP_RESULT_OK if information is available, 2897 2373 * BP_RESULT_BADBIOSTABLE otherwise. 2898 2374 */ 2899 2375 static enum bp_result construct_integrated_info( 2900 2376 struct bios_parser *bp, 2901 2377 struct integrated_info *info) 2902 2378 { 2903 - enum bp_result result = BP_RESULT_BADBIOSTABLE; 2379 + static enum bp_result result = BP_RESULT_BADBIOSTABLE; 2904 2380 2905 2381 struct atom_common_table_header *header; 2906 2382 struct atom_data_revision revision; 2383 + 2384 + struct clock_voltage_caps temp = {0, 0}; 2907 2385 uint32_t i; 2908 2386 uint32_t j; 2909 2387 ··· 2953 2427 info->disp_clk_voltage[j-1].max_supported_clk 2954 2428 ) { 2955 2429 /* swap j and j - 1*/ 2956 - swap(info->disp_clk_voltage[j - 1], 2957 - info->disp_clk_voltage[j]); 2430 + temp = info->disp_clk_voltage[j-1]; 2431 + info->disp_clk_voltage[j-1] = 2432 + info->disp_clk_voltage[j]; 2433 + info->disp_clk_voltage[j] = temp; 2958 2434 } 2959 2435 } 2960 2436 } ··· 2969 2441 struct dc_vram_info *info) 2970 2442 { 2971 2443 struct bios_parser *bp = BP_FROM_DCB(dcb); 2972 - enum bp_result result = BP_RESULT_BADBIOSTABLE; 2444 + static enum bp_result result = BP_RESULT_BADBIOSTABLE; 2973 2445 struct atom_common_table_header *header; 2974 2446 struct atom_data_revision revision; 2975 2447 ··· 3035 2507 struct atom_display_object_path_v2 *object; 3036 2508 struct atom_bracket_layout_record *record; 3037 2509 struct atom_common_record_header *record_header; 3038 - enum bp_result result; 2510 + static enum bp_result result; 3039 2511 struct bios_parser *bp; 3040 2512 struct object_info_table *tbl; 3041 2513 struct display_object_info_table_v1_4 *v1_4; ··· 3141 2613 return result; 3142 2614 } 3143 2615 2616 + static enum bp_result update_slot_layout_info_v2( 2617 + struct dc_bios *dcb, 2618 + unsigned int i, 2619 + struct slot_layout_info *slot_layout_info) 2620 + { 2621 + unsigned int record_offset; 2622 + struct atom_display_object_path_v3 *object; 2623 + struct atom_bracket_layout_record_v2 *record; 2624 + struct atom_common_record_header *record_header; 2625 + static enum bp_result result; 2626 + struct bios_parser *bp; 2627 + struct object_info_table *tbl; 2628 + struct display_object_info_table_v1_5 *v1_5; 2629 + struct graphics_object_id connector_id; 2630 + 2631 + record = NULL; 2632 + record_header = NULL; 2633 + result = BP_RESULT_NORECORD; 2634 + 2635 + bp = BP_FROM_DCB(dcb); 2636 + tbl = &bp->object_info_tbl; 2637 + v1_5 = tbl->v1_5; 2638 + 2639 + object = &v1_5->display_path[i]; 2640 + record_offset = (unsigned int) 2641 + (object->disp_recordoffset) + 2642 + (unsigned int)(bp->object_info_tbl_offset); 2643 + 2644 + for (;;) { 2645 + 2646 + record_header = (struct atom_common_record_header *) 2647 + GET_IMAGE(struct atom_common_record_header, 2648 + record_offset); 2649 + if (record_header == NULL) { 2650 + result = BP_RESULT_BADBIOSTABLE; 2651 + break; 2652 + } 2653 + 2654 + /* the end of the list */ 2655 + if (record_header->record_type == ATOM_RECORD_END_TYPE || 2656 + record_header->record_size == 0) { 2657 + break; 2658 + } 2659 + 2660 + if (record_header->record_type == 2661 + ATOM_BRACKET_LAYOUT_V2_RECORD_TYPE && 2662 + sizeof(struct atom_bracket_layout_record_v2) 2663 + <= record_header->record_size) { 2664 + record = (struct atom_bracket_layout_record_v2 *) 2665 + (record_header); 2666 + result = BP_RESULT_OK; 2667 + break; 2668 + } 2669 + 2670 + record_offset += record_header->record_size; 2671 + } 2672 + 2673 + /* return if the record not found */ 2674 + if (result != BP_RESULT_OK) 2675 + return result; 2676 + 2677 + /* get slot sizes */ 2678 + connector_id = object_id_from_bios_object_id(object->display_objid); 2679 + 2680 + slot_layout_info->length = record->bracketlen; 2681 + slot_layout_info->width = record->bracketwidth; 2682 + slot_layout_info->num_of_connectors = v1_5->number_of_path; 2683 + slot_layout_info->connectors[i].position = record->conn_num; 2684 + slot_layout_info->connectors[i].connector_id = connector_id; 2685 + 2686 + switch (connector_id.id) { 2687 + case CONNECTOR_ID_SINGLE_LINK_DVID: 2688 + case CONNECTOR_ID_DUAL_LINK_DVID: 2689 + slot_layout_info->connectors[i].connector_type = CONNECTOR_LAYOUT_TYPE_DVI_D; 2690 + slot_layout_info->connectors[i].length = CONNECTOR_SIZE_DVI; 2691 + break; 2692 + 2693 + case CONNECTOR_ID_HDMI_TYPE_A: 2694 + slot_layout_info->connectors[i].connector_type = CONNECTOR_LAYOUT_TYPE_HDMI; 2695 + slot_layout_info->connectors[i].length = CONNECTOR_SIZE_HDMI; 2696 + break; 2697 + 2698 + case CONNECTOR_ID_DISPLAY_PORT: 2699 + if (record->mini_type == MINI_TYPE_NORMAL) { 2700 + slot_layout_info->connectors[i].connector_type = CONNECTOR_LAYOUT_TYPE_DP; 2701 + slot_layout_info->connectors[i].length = CONNECTOR_SIZE_DP; 2702 + } else { 2703 + slot_layout_info->connectors[i].connector_type = CONNECTOR_LAYOUT_TYPE_MINI_DP; 2704 + slot_layout_info->connectors[i].length = CONNECTOR_SIZE_MINI_DP; 2705 + } 2706 + break; 2707 + 2708 + default: 2709 + slot_layout_info->connectors[i].connector_type = CONNECTOR_LAYOUT_TYPE_UNKNOWN; 2710 + slot_layout_info->connectors[i].length = CONNECTOR_SIZE_UNKNOWN; 2711 + } 2712 + return result; 2713 + } 3144 2714 3145 2715 static enum bp_result get_bracket_layout_record( 3146 2716 struct dc_bios *dcb, ··· 3247 2621 { 3248 2622 unsigned int i; 3249 2623 struct bios_parser *bp = BP_FROM_DCB(dcb); 3250 - enum bp_result result; 2624 + static enum bp_result result; 3251 2625 struct object_info_table *tbl; 3252 2626 struct display_object_info_table_v1_4 *v1_4; 2627 + struct display_object_info_table_v1_5 *v1_5; 3253 2628 3254 2629 if (slot_layout_info == NULL) { 3255 2630 DC_LOG_DETECTION_EDID_PARSER("Invalid slot_layout_info\n"); ··· 3260 2633 v1_4 = tbl->v1_4; 3261 2634 3262 2635 result = BP_RESULT_NORECORD; 3263 - for (i = 0; i < v1_4->number_of_path; ++i) { 3264 - 3265 - if (bracket_layout_id == 3266 - v1_4->display_path[i].display_objid) { 3267 - result = update_slot_layout_info(dcb, i, 3268 - slot_layout_info); 2636 + switch (bp->object_info_tbl.revision.minor) { 2637 + case 4: 2638 + default: 2639 + for (i = 0; i < v1_4->number_of_path; ++i) { 2640 + if (bracket_layout_id == 2641 + v1_4->display_path[i].display_objid) { 2642 + result = update_slot_layout_info(dcb, i, slot_layout_info); 2643 + break; 2644 + } 2645 + } 2646 + break; 2647 + case 5: 2648 + for (i = 0; i < v1_5->number_of_path; ++i) 2649 + result = update_slot_layout_info_v2(dcb, i, slot_layout_info); 3269 2650 break; 3270 - } 3271 2651 } 3272 2652 return result; 3273 2653 } ··· 3284 2650 struct board_layout_info *board_layout_info) 3285 2651 { 3286 2652 unsigned int i; 3287 - enum bp_result record_result; 2653 + 2654 + struct bios_parser *bp; 2655 + 2656 + static enum bp_result record_result; 3288 2657 3289 2658 const unsigned int slot_index_to_vbios_id[MAX_BOARD_SLOTS] = { 3290 2659 GENERICOBJECT_BRACKET_LAYOUT_ENUM_ID1, 3291 2660 GENERICOBJECT_BRACKET_LAYOUT_ENUM_ID2, 3292 2661 0, 0 3293 2662 }; 2663 + 2664 + 2665 + bp = BP_FROM_DCB(dcb); 3294 2666 3295 2667 if (board_layout_info == NULL) { 3296 2668 DC_LOG_DETECTION_EDID_PARSER("Invalid board_layout_info\n"); ··· 3332 2692 struct dc_bios *dcb, 3333 2693 void *dst) 3334 2694 { 3335 - #ifdef PACK_BIOS_DATA 3336 - struct bios_parser *bp = BP_FROM_DCB(dcb); 3337 - struct atom_rom_header_v2_2 *rom_header = NULL; 3338 - struct atom_rom_header_v2_2 *packed_rom_header = NULL; 3339 - struct atom_common_table_header *data_tbl_header = NULL; 3340 - struct atom_master_list_of_data_tables_v2_1 *data_tbl_list = NULL; 3341 - struct atom_master_data_table_v2_1 *packed_master_data_tbl = NULL; 3342 - struct atom_data_revision tbl_rev = {0}; 3343 - uint16_t *rom_header_offset = NULL; 3344 - const uint8_t *bios = bp->base.bios; 3345 - uint8_t *bios_dst = (uint8_t *)dst; 3346 - uint16_t packed_rom_header_offset; 3347 - uint16_t packed_masterdatatable_offset; 3348 - uint16_t packed_data_tbl_offset; 3349 - uint16_t data_tbl_offset; 3350 - unsigned int i; 3351 - 3352 - rom_header_offset = 3353 - GET_IMAGE(uint16_t, OFFSET_TO_ATOM_ROM_HEADER_POINTER); 3354 - 3355 - if (!rom_header_offset) 3356 - return 0; 3357 - 3358 - rom_header = GET_IMAGE(struct atom_rom_header_v2_2, *rom_header_offset); 3359 - 3360 - if (!rom_header) 3361 - return 0; 3362 - 3363 - get_atom_data_table_revision(&rom_header->table_header, &tbl_rev); 3364 - if (!(tbl_rev.major >= 2 && tbl_rev.minor >= 2)) 3365 - return 0; 3366 - 3367 - get_atom_data_table_revision(&bp->master_data_tbl->table_header, &tbl_rev); 3368 - if (!(tbl_rev.major >= 2 && tbl_rev.minor >= 1)) 3369 - return 0; 3370 - 3371 - packed_rom_header_offset = 3372 - OFFSET_TO_ATOM_ROM_HEADER_POINTER + sizeof(*rom_header_offset); 3373 - 3374 - packed_masterdatatable_offset = 3375 - packed_rom_header_offset + rom_header->table_header.structuresize; 3376 - 3377 - packed_data_tbl_offset = 3378 - packed_masterdatatable_offset + 3379 - bp->master_data_tbl->table_header.structuresize; 3380 - 3381 - packed_rom_header = 3382 - (struct atom_rom_header_v2_2 *)(bios_dst + packed_rom_header_offset); 3383 - 3384 - packed_master_data_tbl = 3385 - (struct atom_master_data_table_v2_1 *)(bios_dst + 3386 - packed_masterdatatable_offset); 3387 - 3388 - memcpy(bios_dst, bios, OFFSET_TO_ATOM_ROM_HEADER_POINTER); 3389 - 3390 - *((uint16_t *)(bios_dst + OFFSET_TO_ATOM_ROM_HEADER_POINTER)) = 3391 - packed_rom_header_offset; 3392 - 3393 - memcpy(bios_dst + packed_rom_header_offset, rom_header, 3394 - rom_header->table_header.structuresize); 3395 - 3396 - packed_rom_header->masterdatatable_offset = packed_masterdatatable_offset; 3397 - 3398 - memcpy(&packed_master_data_tbl->table_header, 3399 - &bp->master_data_tbl->table_header, 3400 - sizeof(bp->master_data_tbl->table_header)); 3401 - 3402 - data_tbl_list = &bp->master_data_tbl->listOfdatatables; 3403 - 3404 - /* Each data table offset in data table list is 2 bytes, 3405 - * we can use that to iterate through listOfdatatables 3406 - * without knowing the name of each member. 3407 - */ 3408 - for (i = 0; i < sizeof(*data_tbl_list)/sizeof(uint16_t); i++) { 3409 - data_tbl_offset = *((uint16_t *)data_tbl_list + i); 3410 - 3411 - if (data_tbl_offset) { 3412 - data_tbl_header = 3413 - (struct atom_common_table_header *)(bios + data_tbl_offset); 3414 - 3415 - memcpy(bios_dst + packed_data_tbl_offset, data_tbl_header, 3416 - data_tbl_header->structuresize); 3417 - 3418 - *((uint16_t *)&packed_master_data_tbl->listOfdatatables + i) = 3419 - packed_data_tbl_offset; 3420 - 3421 - packed_data_tbl_offset += data_tbl_header->structuresize; 3422 - } else { 3423 - *((uint16_t *)&packed_master_data_tbl->listOfdatatables + i) = 0; 3424 - } 3425 - } 3426 - return packed_data_tbl_offset; 3427 - #endif 3428 2695 // TODO: There is data bytes alignment issue, disable it for now. 3429 2696 return 0; 3430 2697 } ··· 3360 2813 return NULL; 3361 2814 dc_golden_offset = DATA_TABLES(dce_info) + disp_cntl_tbl_4_4->dc_golden_table_offset; 3362 2815 *dc_golden_table_ver = disp_cntl_tbl_4_4->dc_golden_table_ver; 2816 + break; 2817 + case 5: 2818 + default: 2819 + /* For atom_display_controller_info_v4_5 there is no need to get golden table from 2820 + * dc_golden_table_offset as all these fields previously in golden table used for AUX 2821 + * pre-charge settings are now available directly in atom_display_controller_info_v4_5. 2822 + */ 3363 2823 break; 3364 2824 } 3365 2825 break; ··· 3470 2916 .bios_parser_destroy = firmware_parser_destroy, 3471 2917 3472 2918 .get_board_layout_info = bios_get_board_layout_info, 2919 + /* TODO: use this fn in hw init?*/ 3473 2920 .pack_data_tables = bios_parser_pack_data_tables, 3474 2921 3475 2922 .get_atom_dc_golden_table = bios_get_atom_dc_golden_table, ··· 3484 2929 .get_lttpr_caps = bios_parser_get_lttpr_caps, 3485 2930 3486 2931 .get_lttpr_interop = bios_parser_get_lttpr_interop, 2932 + 2933 + .get_connector_speed_cap_info = bios_parser_get_connector_speed_cap_info, 3487 2934 }; 3488 2935 3489 2936 static bool bios_parser2_construct( ··· 3559 3002 return false; 3560 3003 3561 3004 bp->object_info_tbl.v1_4 = tbl_v1_4; 3005 + } else if (bp->object_info_tbl.revision.major == 1 3006 + && bp->object_info_tbl.revision.minor == 5) { 3007 + struct display_object_info_table_v1_5 *tbl_v1_5; 3008 + 3009 + tbl_v1_5 = GET_IMAGE(struct display_object_info_table_v1_5, 3010 + bp->object_info_tbl_offset); 3011 + if (!tbl_v1_5) 3012 + return false; 3013 + 3014 + bp->object_info_tbl.v1_5 = tbl_v1_5; 3562 3015 } else { 3563 3016 ASSERT(0); 3564 3017 return false;
+1
drivers/gpu/drm/amd/display/dc/bios/bios_parser_types_internal2.h
··· 40 40 struct atom_data_revision revision; 41 41 union { 42 42 struct display_object_info_table_v1_4 *v1_4; 43 + struct display_object_info_table_v1_5 *v1_5; 43 44 }; 44 45 }; 45 46
+2
drivers/gpu/drm/amd/display/dc/bios/command_table_helper2.c
··· 77 77 case DCN_VERSION_3_1: 78 78 case DCN_VERSION_3_15: 79 79 case DCN_VERSION_3_16: 80 + case DCN_VERSION_3_2: 81 + case DCN_VERSION_3_21: 80 82 *h = dal_cmd_tbl_helper_dce112_get_table2(); 81 83 return true; 82 84
+1 -1
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
··· 328 328 dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg); 329 329 return &clk_mgr->base; 330 330 break; 331 - #endif 332 331 } 332 + #endif 333 333 default: 334 334 ASSERT(0); /* Unknown Asic */ 335 335 break;
+9 -5
drivers/gpu/drm/amd/display/dc/core/dc.c
··· 3054 3054 3055 3055 } 3056 3056 3057 - if (should_lock_all_pipes && dc->hwss.interdependent_update_lock) { 3058 - dc->hwss.interdependent_update_lock(dc, context, false); 3059 - } else { 3060 - dc->hwss.pipe_control_lock(dc, top_pipe_to_program, false); 3061 - } 3057 + #ifdef CONFIG_DRM_AMD_DC_DCN 3058 + if (update_type != UPDATE_TYPE_FAST) 3059 + if (dc->hwss.commit_subvp_config) 3060 + dc->hwss.commit_subvp_config(dc, context); 3061 + #endif 3062 + if (should_lock_all_pipes && dc->hwss.interdependent_update_lock) 3063 + dc->hwss.interdependent_update_lock(dc, context, false); 3064 + else 3065 + dc->hwss.pipe_control_lock(dc, top_pipe_to_program, false); 3062 3066 3063 3067 if ((update_type != UPDATE_TYPE_FAST) && stream->update_flags.bits.dsc_changed) 3064 3068 if (top_pipe_to_program->stream_res.tg->funcs->lock_doublebuffer_enable) {
+19 -1
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
··· 67 67 #include "dcn31/dcn31_resource.h" 68 68 #include "dcn315/dcn315_resource.h" 69 69 #include "dcn316/dcn316_resource.h" 70 + #include "../dcn32/dcn32_resource.h" 71 + #include "../dcn321/dcn321_resource.h" 70 72 71 73 #define DC_LOGGER_INIT(logger) 72 74 ··· 164 162 if (ASICREV_IS_GC_10_3_7(asic_id.hw_internal_rev)) 165 163 dc_version = DCN_VERSION_3_16; 166 164 break; 167 - 165 + case AMDGPU_FAMILY_GC_11_0_0: 166 + dc_version = DCN_VERSION_3_2; 167 + if (ASICREV_IS_GC_11_0_2(asic_id.hw_internal_rev)) 168 + dc_version = DCN_VERSION_3_21; 169 + break; 168 170 default: 169 171 dc_version = DCE_VERSION_UNKNOWN; 170 172 break; ··· 263 257 break; 264 258 case DCN_VERSION_3_16: 265 259 res_pool = dcn316_create_resource_pool(init_data, dc); 260 + break; 261 + case DCN_VERSION_3_2: 262 + res_pool = dcn32_create_resource_pool(init_data, dc); 263 + break; 264 + case DCN_VERSION_3_21: 265 + res_pool = dcn321_create_resource_pool(init_data, dc); 266 266 break; 267 267 #endif 268 268 default: ··· 1994 1982 dc->res_pool, 1995 1983 del_pipe->stream_res.stream_enc, 1996 1984 false); 1985 + /* Release link encoder from stream in new dc_state. */ 1986 + if (dc->res_pool->funcs->link_enc_unassign) 1987 + dc->res_pool->funcs->link_enc_unassign(new_ctx, del_pipe->stream); 1988 + 1989 + #if defined(CONFIG_DRM_AMD_DC_DCN) 1997 1990 if (is_dp_128b_132b_signal(del_pipe)) { 1998 1991 update_hpo_dp_stream_engine_usage( 1999 1992 &new_ctx->res_ctx, dc->res_pool, ··· 2006 1989 false); 2007 1990 remove_hpo_dp_link_enc_from_ctx(&new_ctx->res_ctx, del_pipe, del_pipe->stream); 2008 1991 } 1992 + #endif 2009 1993 2010 1994 if (del_pipe->stream_res.audio) 2011 1995 update_audio_usage(
+19
drivers/gpu/drm/amd/display/dc/dc.h
··· 162 162 struct mpc_color_caps mpc; 163 163 }; 164 164 165 + struct dc_dmub_caps { 166 + bool psr; 167 + }; 168 + 165 169 struct dc_caps { 166 170 uint32_t max_streams; 167 171 uint32_t max_links; ··· 200 196 unsigned int cursor_cache_size; 201 197 struct dc_plane_cap planes[MAX_PLANES]; 202 198 struct dc_color_caps color; 199 + struct dc_dmub_caps dmub_caps; 203 200 bool dp_hpo; 204 201 bool hdmi_frl_pcon_support; 205 202 bool edp_dsc_support; 206 203 bool vbios_lttpr_aware; 207 204 bool vbios_lttpr_enable; 208 205 uint32_t max_otg_num; 206 + #ifdef CONFIG_DRM_AMD_DC_DCN 207 + uint32_t max_cab_allocation_bytes; 208 + uint32_t cache_line_size; 209 + uint32_t cache_num_ways; 210 + uint16_t subvp_fw_processing_delay_us; 211 + uint16_t subvp_prefetch_end_to_mall_start_us; 212 + uint16_t subvp_pstate_allow_width_us; 213 + uint16_t subvp_vertical_int_margin_us; 214 + #endif 209 215 }; 210 216 211 217 struct dc_bug_wa { ··· 441 427 */ 442 428 bool prev_p_state_change_support; 443 429 bool fclk_prev_p_state_change_support; 430 + int num_ways; 431 + int prev_num_ways; 444 432 enum dtm_pstate dtm_level; 445 433 int max_supported_dppclk_khz; 446 434 int max_supported_dispclk_khz; ··· 737 721 bool enable_z9_disable_interface; 738 722 bool enable_sw_cntl_psr; 739 723 union dpia_debug_options dpia_debug; 724 + bool force_disable_subvp; 725 + bool force_subvp_mclk_switch; 726 + bool force_usr_allow; 740 727 bool apply_vendor_specific_lttpr_wa; 741 728 bool extended_blank_optimization; 742 729 union aux_wake_wa_options aux_wake_wa;
+5
drivers/gpu/drm/amd/display/dc/dc_bios_types.h
··· 156 156 enum bp_result (*get_lttpr_interop)( 157 157 struct dc_bios *dcb, 158 158 uint8_t *dce_caps); 159 + 160 + enum bp_result (*get_connector_speed_cap_info)( 161 + struct dc_bios *bios, 162 + struct graphics_object_id object_id, 163 + struct bp_connector_speed_cap_info *info); 159 164 }; 160 165 161 166 struct bios_registers {
+1
drivers/gpu/drm/amd/display/dc/dc_hw_types.h
··· 780 780 uint32_t v_sync_width; 781 781 782 782 uint32_t pix_clk_100hz; 783 + uint32_t min_refresh_in_uhz; 783 784 784 785 uint32_t vic; 785 786 uint32_t hdmi_vic;
+21
drivers/gpu/drm/amd/display/dc/dc_stream.h
··· 145 145 unsigned int cust_pattern_size; 146 146 }; 147 147 148 + #ifdef CONFIG_DRM_AMD_DC_DCN 149 + #define SUBVP_DRR_MARGIN_US 500 // 500us for DRR margin (SubVP + DRR) 150 + 151 + enum mall_stream_type { 152 + SUBVP_NONE, // subvp not in use 153 + SUBVP_MAIN, // subvp in use, this stream is main stream 154 + SUBVP_PHANTOM, // subvp in use, this stream is a phantom stream 155 + }; 156 + 157 + struct mall_stream_config { 158 + /* MALL stream config to indicate if the stream is phantom or not. 159 + * We will use a phantom stream to indicate that the pipe is phantom. 160 + */ 161 + enum mall_stream_type type; 162 + struct dc_stream_state *paired_stream; // master / slave stream 163 + }; 164 + #endif 165 + 148 166 struct dc_stream_state { 149 167 // sink is deprecated, new code should not reference 150 168 // this pointer ··· 273 255 274 256 bool has_non_synchronizable_pclk; 275 257 bool vblank_synchronized; 258 + #ifdef CONFIG_DRM_AMD_DC_DCN 259 + struct mall_stream_config mall_stream_config; 260 + #endif 276 261 }; 277 262 278 263 #define ABM_LEVEL_IMMEDIATE_DISABLE 255
+45
drivers/gpu/drm/amd/display/dc/dce/dce_abm.h
··· 128 128 SRI(DC_ABM1_ACE_THRES_12, ABM, id), \ 129 129 NBIO_SR(BIOS_SCRATCH_2) 130 130 131 + #define ABM_DCN32_REG_LIST(id)\ 132 + SRI(DC_ABM1_HG_SAMPLE_RATE, ABM, id), \ 133 + SRI(DC_ABM1_LS_SAMPLE_RATE, ABM, id), \ 134 + SRI(BL1_PWM_BL_UPDATE_SAMPLE_RATE, ABM, id), \ 135 + SRI(DC_ABM1_HG_MISC_CTRL, ABM, id), \ 136 + SRI(DC_ABM1_IPCSC_COEFF_SEL, ABM, id), \ 137 + SRI(BL1_PWM_CURRENT_ABM_LEVEL, ABM, id), \ 138 + SRI(BL1_PWM_TARGET_ABM_LEVEL, ABM, id), \ 139 + SRI(BL1_PWM_USER_LEVEL, ABM, id), \ 140 + SRI(DC_ABM1_LS_MIN_MAX_PIXEL_VALUE_THRES, ABM, id), \ 141 + SRI(DC_ABM1_HGLS_REG_READ_PROGRESS, ABM, id), \ 142 + SRI(DC_ABM1_ACE_OFFSET_SLOPE_0, ABM, id), \ 143 + SRI(DC_ABM1_ACE_THRES_12, ABM, id), \ 144 + NBIO_SR(BIOS_SCRATCH_2) 145 + 131 146 #define ABM_SF(reg_name, field_name, post_fix)\ 132 147 .field_name = reg_name ## __ ## field_name ## post_fix 133 148 ··· 217 202 #define ABM_MASK_SH_LIST_DCN20(mask_sh) ABM_MASK_SH_LIST_DCE110(mask_sh) 218 203 219 204 #define ABM_MASK_SH_LIST_DCN30(mask_sh) ABM_MASK_SH_LIST_DCN10(mask_sh) 205 + 206 + #define ABM_MASK_SH_LIST_DCN32(mask_sh) \ 207 + ABM_SF(ABM0_DC_ABM1_HG_MISC_CTRL, \ 208 + ABM1_HG_NUM_OF_BINS_SEL, mask_sh), \ 209 + ABM_SF(ABM0_DC_ABM1_HG_MISC_CTRL, \ 210 + ABM1_HG_VMAX_SEL, mask_sh), \ 211 + ABM_SF(ABM0_DC_ABM1_HG_MISC_CTRL, \ 212 + ABM1_HG_BIN_BITWIDTH_SIZE_SEL, mask_sh), \ 213 + ABM_SF(ABM0_DC_ABM1_IPCSC_COEFF_SEL, \ 214 + ABM1_IPCSC_COEFF_SEL_R, mask_sh), \ 215 + ABM_SF(ABM0_DC_ABM1_IPCSC_COEFF_SEL, \ 216 + ABM1_IPCSC_COEFF_SEL_G, mask_sh), \ 217 + ABM_SF(ABM0_DC_ABM1_IPCSC_COEFF_SEL, \ 218 + ABM1_IPCSC_COEFF_SEL_B, mask_sh), \ 219 + ABM_SF(ABM0_BL1_PWM_CURRENT_ABM_LEVEL, \ 220 + BL1_PWM_CURRENT_ABM_LEVEL, mask_sh), \ 221 + ABM_SF(ABM0_BL1_PWM_TARGET_ABM_LEVEL, \ 222 + BL1_PWM_TARGET_ABM_LEVEL, mask_sh), \ 223 + ABM_SF(ABM0_BL1_PWM_USER_LEVEL, \ 224 + BL1_PWM_USER_LEVEL, mask_sh), \ 225 + ABM_SF(ABM0_DC_ABM1_LS_MIN_MAX_PIXEL_VALUE_THRES, \ 226 + ABM1_LS_MIN_PIXEL_VALUE_THRES, mask_sh), \ 227 + ABM_SF(ABM0_DC_ABM1_LS_MIN_MAX_PIXEL_VALUE_THRES, \ 228 + ABM1_LS_MAX_PIXEL_VALUE_THRES, mask_sh), \ 229 + ABM_SF(ABM0_DC_ABM1_HGLS_REG_READ_PROGRESS, \ 230 + ABM1_HG_REG_READ_MISSED_FRAME_CLEAR, mask_sh), \ 231 + ABM_SF(ABM0_DC_ABM1_HGLS_REG_READ_PROGRESS, \ 232 + ABM1_LS_REG_READ_MISSED_FRAME_CLEAR, mask_sh), \ 233 + ABM_SF(ABM0_DC_ABM1_HGLS_REG_READ_PROGRESS, \ 234 + ABM1_BL_REG_READ_MISSED_FRAME_CLEAR, mask_sh) 220 235 221 236 #define ABM_REG_FIELD_LIST(type) \ 222 237 type ABM1_HG_NUM_OF_BINS_SEL; \
+15
drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.h
··· 164 164 CS_SF(PHYPLLA_PIXCLK_RESYNC_CNTL, PHYPLLA_DCCG_DEEP_COLOR_CNTL, mask_sh),\ 165 165 CS_SF(OTG0_PIXEL_RATE_CNTL, DP_DTO0_ENABLE, mask_sh) 166 166 167 + #define CS_COMMON_MASK_SH_LIST_DCN3_2(mask_sh)\ 168 + CS_COMMON_MASK_SH_LIST_DCN2_0(mask_sh),\ 169 + CS_SF(OTG0_PIXEL_RATE_CNTL, PIPE0_DTO_SRC_SEL, mask_sh) 170 + 167 171 #define CS_COMMON_REG_LIST_DCN1_0(index, pllid) \ 168 172 SRI(PIXCLK_RESYNC_CNTL, PHYPLL, pllid),\ 169 173 SRII(PHASE, DP_DTO, 0),\ ··· 201 197 type DP_DTO0_MODULO; \ 202 198 type DP_DTO0_ENABLE; 203 199 200 + #if defined(CONFIG_DRM_AMD_DC_DCN) 201 + #define CS_REG_FIELD_LIST_DCN32(type) \ 202 + type PIPE0_DTO_SRC_SEL; 203 + #endif 204 + 204 205 struct dce110_clk_src_shift { 205 206 CS_REG_FIELD_LIST(uint8_t) 207 + #if defined(CONFIG_DRM_AMD_DC_DCN) 208 + CS_REG_FIELD_LIST_DCN32(uint8_t) 209 + #endif 206 210 }; 207 211 208 212 struct dce110_clk_src_mask{ 209 213 CS_REG_FIELD_LIST(uint32_t) 214 + #if defined(CONFIG_DRM_AMD_DC_DCN) 215 + CS_REG_FIELD_LIST_DCN32(uint32_t) 216 + #endif 210 217 }; 211 218 212 219 struct dce110_clk_src_regs {
+33
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.h
··· 158 158 uint32_t DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_Z8_C; 159 159 uint32_t DCHUBBUB_ARB_ALLOW_SR_ENTER_WATERMARK_Z8_D; 160 160 uint32_t DCHUBBUB_ARB_ALLOW_SR_EXIT_WATERMARK_Z8_D; 161 + uint32_t DCHUBBUB_ARB_USR_RETRAINING_CNTL; 162 + uint32_t DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_A; 163 + uint32_t DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_B; 164 + uint32_t DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_C; 165 + uint32_t DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_D; 166 + uint32_t DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_A; 167 + uint32_t DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_B; 168 + uint32_t DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_C; 169 + uint32_t DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_D; 170 + uint32_t DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_A; 171 + uint32_t DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_B; 172 + uint32_t DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_C; 173 + uint32_t DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_D; 161 174 }; 175 + 176 + #define HUBBUB_REG_FIELD_LIST_DCN32(type) \ 177 + type DCHUBBUB_ARB_ALLOW_USR_RETRAINING_FORCE_VALUE;\ 178 + type DCHUBBUB_ARB_ALLOW_USR_RETRAINING_FORCE_ENABLE;\ 179 + type DCHUBBUB_ARB_DO_NOT_FORCE_ALLOW_USR_RETRAINING_DURING_PSTATE_CHANGE_REQUEST;\ 180 + type DCHUBBUB_ARB_DO_NOT_FORCE_ALLOW_USR_RETRAINING_DURING_PRE_CSTATE;\ 181 + type DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_A;\ 182 + type DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_B;\ 183 + type DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_C;\ 184 + type DCHUBBUB_ARB_USR_RETRAINING_WATERMARK_D;\ 185 + type DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_A;\ 186 + type DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_B;\ 187 + type DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_C;\ 188 + type DCHUBBUB_ARB_UCLK_PSTATE_CHANGE_WATERMARK_D;\ 189 + type DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_A;\ 190 + type DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_B;\ 191 + type DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_C;\ 192 + type DCHUBBUB_ARB_FCLK_PSTATE_CHANGE_WATERMARK_D 162 193 163 194 /* set field name */ 164 195 #define HUBBUB_SF(reg_name, field_name, post_fix)\ ··· 368 337 HUBBUB_STUTTER_REG_FIELD_LIST(uint8_t); 369 338 HUBBUB_HVM_REG_FIELD_LIST(uint8_t); 370 339 HUBBUB_RET_REG_FIELD_LIST(uint8_t); 340 + HUBBUB_REG_FIELD_LIST_DCN32(uint8_t); 371 341 }; 372 342 373 343 struct dcn_hubbub_mask { ··· 376 344 HUBBUB_STUTTER_REG_FIELD_LIST(uint32_t); 377 345 HUBBUB_HVM_REG_FIELD_LIST(uint32_t); 378 346 HUBBUB_RET_REG_FIELD_LIST(uint32_t); 347 + HUBBUB_REG_FIELD_LIST_DCN32(uint32_t); 379 348 }; 380 349 381 350 struct dc;
-5
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c
··· 1375 1375 pipe_ctx->stream_res.tg = NULL; 1376 1376 pipe_ctx->plane_res.hubp = NULL; 1377 1377 1378 - if (tg->funcs->is_tg_enabled(tg)) { 1379 - if (tg->funcs->init_odm) 1380 - tg->funcs->init_odm(tg); 1381 - } 1382 - 1383 1378 tg->funcs->tg_init(tg); 1384 1379 } 1385 1380
+2
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.h
··· 514 514 type DIG_UPDATE_POSITION_X;\ 515 515 type DIG_UPDATE_POSITION_Y;\ 516 516 type OTG_H_TIMING_DIV_MODE;\ 517 + type OTG_H_TIMING_DIV_MODE_MANUAL;\ 517 518 type OTG_DRR_TIMING_DBUF_UPDATE_MODE;\ 518 519 type OTG_CRC_DSC_MODE;\ 519 520 type OTG_CRC_DATA_STREAM_COMBINE_MODE;\ ··· 554 553 int vupdate_offset; 555 554 int vupdate_width; 556 555 int vready_offset; 556 + struct dc_crtc_timing orginal_patched_timing; 557 557 enum signal_type signal; 558 558 }; 559 559
+25
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_stream_encoder.h
··· 73 73 SRI(HDMI_ACR_48_1, DIG, id),\ 74 74 SRI(DP_DB_CNTL, DP, id), \ 75 75 SRI(DP_MSA_MISC, DP, id), \ 76 + SRI(DP_MSA_VBID_MISC, DP, id), \ 76 77 SRI(DP_MSA_COLORIMETRY, DP, id), \ 77 78 SRI(DP_MSA_TIMING_PARAM1, DP, id), \ 78 79 SRI(DP_MSA_TIMING_PARAM2, DP, id), \ ··· 187 186 uint32_t HDMI_GENERIC_PACKET_CONTROL9; 188 187 uint32_t HDMI_GENERIC_PACKET_CONTROL10; 189 188 uint32_t DIG_CLOCK_PATTERN; 189 + uint32_t DIG_FIFO_CTRL0; 190 190 }; 191 191 192 192 ··· 339 337 SE_SF(DIG0_DIG_FE_CNTL, DIG_SOURCE_SELECT, mask_sh),\ 340 338 SE_SF(DIG0_DIG_CLOCK_PATTERN, DIG_CLOCK_PATTERN, mask_sh) 341 339 340 + #if defined(CONFIG_DRM_AMD_DC_HDCP) 341 + #define SE_COMMON_MASK_SH_LIST_SOC(mask_sh)\ 342 + SE_COMMON_MASK_SH_LIST_SOC_BASE(mask_sh),\ 343 + SE_SF(DIG0_HDMI_VBI_PACKET_CONTROL, HDMI_ACP_SEND, mask_sh) 344 + #else 342 345 #define SE_COMMON_MASK_SH_LIST_SOC(mask_sh)\ 343 346 SE_COMMON_MASK_SH_LIST_SOC_BASE(mask_sh) 347 + #endif 344 348 345 349 #define SE_COMMON_MASK_SH_LIST_DCN10(mask_sh)\ 346 350 SE_COMMON_MASK_SH_LIST_SOC(mask_sh),\ ··· 575 567 type DP_SEC_GSP11_ENABLE;\ 576 568 type DP_SEC_GSP11_LINE_NUM 577 569 570 + #define SE_REG_FIELD_LIST_DCN3_2(type) \ 571 + type DIG_SYMCLK_FE_ON;\ 572 + type DIG_FIFO_READ_START_LEVEL;\ 573 + type DIG_FIFO_ENABLE;\ 574 + type DIG_FIFO_RESET;\ 575 + type DIG_FIFO_RESET_DONE 576 + 578 577 struct dcn10_stream_encoder_shift { 579 578 SE_REG_FIELD_LIST_DCN1_0(uint8_t); 579 + #if defined(CONFIG_DRM_AMD_DC_HDCP) 580 + uint8_t HDMI_ACP_SEND; 581 + #endif 580 582 SE_REG_FIELD_LIST_DCN2_0(uint8_t); 581 583 SE_REG_FIELD_LIST_DCN3_0(uint8_t); 584 + SE_REG_FIELD_LIST_DCN3_2(uint8_t); 585 + 582 586 }; 583 587 584 588 struct dcn10_stream_encoder_mask { 585 589 SE_REG_FIELD_LIST_DCN1_0(uint32_t); 590 + #if defined(CONFIG_DRM_AMD_DC_HDCP) 591 + uint32_t HDMI_ACP_SEND; 592 + #endif 586 593 SE_REG_FIELD_LIST_DCN2_0(uint32_t); 587 594 SE_REG_FIELD_LIST_DCN3_0(uint32_t); 595 + SE_REG_FIELD_LIST_DCN3_2(uint32_t); 596 + 588 597 }; 589 598 590 599 struct dcn10_stream_encoder {
+33 -1
drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.h
··· 133 133 type OTG_DROP_PIXEL[MAX_PIPES]; 134 134 135 135 #define DCCG3_REG_FIELD_LIST(type) \ 136 + type HDMICHARCLK0_EN;\ 137 + type HDMICHARCLK0_SRC_SEL;\ 136 138 type PHYASYMCLK_FORCE_EN;\ 137 139 type PHYASYMCLK_FORCE_SRC_SEL;\ 138 140 type PHYBSYMCLK_FORCE_EN;\ ··· 205 203 type PHYDSYMCLK_GATE_DISABLE; \ 206 204 type PHYESYMCLK_GATE_DISABLE; 207 205 206 + #define DCCG32_REG_FIELD_LIST(type) \ 207 + type DPSTREAMCLK0_EN;\ 208 + type DPSTREAMCLK1_EN;\ 209 + type DPSTREAMCLK2_EN;\ 210 + type DPSTREAMCLK3_EN;\ 211 + type DPSTREAMCLK0_SRC_SEL;\ 212 + type DPSTREAMCLK1_SRC_SEL;\ 213 + type DPSTREAMCLK2_SRC_SEL;\ 214 + type DPSTREAMCLK3_SRC_SEL;\ 215 + type HDMISTREAMCLK0_EN;\ 216 + type OTG0_PIXEL_RATE_DIVK1;\ 217 + type OTG0_PIXEL_RATE_DIVK2;\ 218 + type OTG1_PIXEL_RATE_DIVK1;\ 219 + type OTG1_PIXEL_RATE_DIVK2;\ 220 + type OTG2_PIXEL_RATE_DIVK1;\ 221 + type OTG2_PIXEL_RATE_DIVK2;\ 222 + type OTG3_PIXEL_RATE_DIVK1;\ 223 + type OTG3_PIXEL_RATE_DIVK2;\ 224 + type DTBCLK_P0_SRC_SEL;\ 225 + type DTBCLK_P0_EN;\ 226 + type DTBCLK_P1_SRC_SEL;\ 227 + type DTBCLK_P1_EN;\ 228 + type DTBCLK_P2_SRC_SEL;\ 229 + type DTBCLK_P2_EN;\ 230 + type DTBCLK_P3_SRC_SEL;\ 231 + type DTBCLK_P3_EN; 232 + 208 233 struct dccg_shift { 209 234 DCCG_REG_FIELD_LIST(uint8_t) 210 235 DCCG3_REG_FIELD_LIST(uint8_t) 211 236 DCCG31_REG_FIELD_LIST(uint8_t) 237 + DCCG32_REG_FIELD_LIST(uint8_t) 212 238 }; 213 239 214 240 struct dccg_mask { 215 241 DCCG_REG_FIELD_LIST(uint32_t) 216 242 DCCG3_REG_FIELD_LIST(uint32_t) 217 243 DCCG31_REG_FIELD_LIST(uint32_t) 244 + DCCG32_REG_FIELD_LIST(uint32_t) 218 245 }; 219 246 220 247 struct dccg_registers { ··· 278 247 uint32_t DCCG_GATE_DISABLE_CNTL3; 279 248 uint32_t HDMISTREAMCLK0_DTO_PARAM; 280 249 uint32_t DCCG_GATE_DISABLE_CNTL4; 281 - 250 + uint32_t OTG_PIXEL_RATE_DIV; 251 + uint32_t DTBCLK_P_CNTL; 282 252 }; 283 253 284 254 struct dcn_dccg {
+22 -3
drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h
··· 161 161 DCN21_HUBP_REG_COMMON_VARIABLE_LIST;\ 162 162 uint32_t DCN_DMDATA_VM_CNTL 163 163 164 + #define DCN32_HUBP_REG_COMMON_VARIABLE_LIST \ 165 + DCN30_HUBP_REG_COMMON_VARIABLE_LIST;\ 166 + uint32_t DCHUBP_MALL_CONFIG;\ 167 + uint32_t DCHUBP_VMPG_CONFIG;\ 168 + uint32_t UCLK_PSTATE_FORCE 169 + 164 170 #define DCN2_HUBP_REG_FIELD_VARIABLE_LIST(type) \ 165 171 DCN_HUBP_REG_FIELD_BASE_LIST(type); \ 166 172 type DMDATA_ADDRESS_HIGH;\ ··· 228 222 type CURSOR_REQ_MODE;\ 229 223 type HUBP_SOFT_RESET 230 224 225 + #define DCN32_HUBP_REG_FIELD_VARIABLE_LIST(type) \ 226 + DCN31_HUBP_REG_FIELD_VARIABLE_LIST(type);\ 227 + type USE_MALL_SEL; \ 228 + type USE_MALL_FOR_CURSOR;\ 229 + type VMPG_SIZE; \ 230 + type PTE_BUFFER_MODE; \ 231 + type BIGK_FRAGMENT_SIZE; \ 232 + type FORCE_ONE_ROW_FOR_FRAME; \ 233 + type DATA_UCLK_PSTATE_FORCE_EN; \ 234 + type DATA_UCLK_PSTATE_FORCE_VALUE; \ 235 + type CURSOR_UCLK_PSTATE_FORCE_EN; \ 236 + type CURSOR_UCLK_PSTATE_FORCE_VALUE 237 + 231 238 struct dcn_hubp2_registers { 232 - DCN30_HUBP_REG_COMMON_VARIABLE_LIST; 239 + DCN32_HUBP_REG_COMMON_VARIABLE_LIST; 233 240 }; 234 241 235 242 struct dcn_hubp2_shift { 236 - DCN31_HUBP_REG_FIELD_VARIABLE_LIST(uint8_t); 243 + DCN32_HUBP_REG_FIELD_VARIABLE_LIST(uint8_t); 237 244 }; 238 245 239 246 struct dcn_hubp2_mask { 240 - DCN31_HUBP_REG_FIELD_VARIABLE_LIST(uint32_t); 247 + DCN32_HUBP_REG_FIELD_VARIABLE_LIST(uint32_t); 241 248 }; 242 249 243 250 struct dcn20_hubp {
+28 -4
drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c
··· 768 768 /* TODO enable stream if timing changed */ 769 769 /* TODO unblank stream if DP */ 770 770 771 + if (pipe_ctx->stream && pipe_ctx->stream->mall_stream_config.type == SUBVP_PHANTOM) { 772 + if (pipe_ctx->stream_res.tg && pipe_ctx->stream_res.tg->funcs->phantom_crtc_post_enable) 773 + pipe_ctx->stream_res.tg->funcs->phantom_crtc_post_enable(pipe_ctx->stream_res.tg); 774 + } 771 775 return DC_OK; 772 776 } 773 777 ··· 1251 1247 lock, 1252 1248 &hw_locks, 1253 1249 &inst_flags); 1250 + } else if (pipe->stream && pipe->stream->mall_stream_config.type == SUBVP_MAIN) { 1251 + union dmub_inbox0_cmd_lock_hw hw_lock_cmd = { 0 }; 1252 + hw_lock_cmd.bits.command_code = DMUB_INBOX0_CMD__HW_LOCK; 1253 + hw_lock_cmd.bits.hw_lock_client = HW_LOCK_CLIENT_DRIVER; 1254 + hw_lock_cmd.bits.lock_pipe = 1; 1255 + hw_lock_cmd.bits.otg_inst = pipe->stream_res.tg->inst; 1256 + hw_lock_cmd.bits.lock = lock; 1257 + if (!lock) 1258 + hw_lock_cmd.bits.should_release = 1; 1259 + dmub_hw_lock_mgr_inbox0_cmd(dc->ctx->dmub_srv, hw_lock_cmd); 1254 1260 } else if (pipe->plane_state != NULL && pipe->plane_state->triplebuffer_flips) { 1255 1261 if (lock) 1256 1262 pipe->stream_res.tg->funcs->triplebuffer_lock(pipe->stream_res.tg); ··· 1578 1564 plane_state->update_flags.bits.addr_update) 1579 1565 hws->funcs.update_plane_addr(dc, pipe_ctx); 1580 1566 1581 - 1582 - 1583 1567 if (pipe_ctx->update_flags.bits.enable) 1584 1568 hubp->funcs->set_blank(hubp, false); 1569 + /* If the stream paired with this plane is phantom, the plane is also phantom */ 1570 + if (pipe_ctx->stream && pipe_ctx->stream->mall_stream_config.type == SUBVP_PHANTOM 1571 + && hubp->funcs->phantom_hubp_post_enable) 1572 + hubp->funcs->phantom_hubp_post_enable(hubp); 1585 1573 } 1586 1574 1587 1575 ··· 1594 1578 { 1595 1579 struct dce_hwseq *hws = dc->hwseq; 1596 1580 /* Only need to unblank on top pipe */ 1581 + 1597 1582 if ((pipe_ctx->update_flags.bits.enable || pipe_ctx->stream->update_flags.bits.abm_level) 1598 1583 && !pipe_ctx->top_pipe && !pipe_ctx->prev_odm_pipe) 1599 1584 hws->funcs.blank_pixel_data(dc, pipe_ctx, !pipe_ctx->plane_state->visible); ··· 1602 1585 /* Only update TG on top pipe */ 1603 1586 if (pipe_ctx->update_flags.bits.global_sync && !pipe_ctx->top_pipe 1604 1587 && !pipe_ctx->prev_odm_pipe) { 1605 - 1606 1588 pipe_ctx->stream_res.tg->funcs->program_global_sync( 1607 1589 pipe_ctx->stream_res.tg, 1608 1590 pipe_ctx->pipe_dlg_param.vready_offset, ··· 1609 1593 pipe_ctx->pipe_dlg_param.vupdate_offset, 1610 1594 pipe_ctx->pipe_dlg_param.vupdate_width); 1611 1595 1612 - pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg, CRTC_STATE_VACTIVE); 1596 + if (pipe_ctx->stream->mall_stream_config.type != SUBVP_PHANTOM) { 1597 + pipe_ctx->stream_res.tg->funcs->wait_for_state( 1598 + pipe_ctx->stream_res.tg, CRTC_STATE_VBLANK); 1599 + pipe_ctx->stream_res.tg->funcs->wait_for_state( 1600 + pipe_ctx->stream_res.tg, CRTC_STATE_VACTIVE); 1601 + } 1613 1602 1614 1603 pipe_ctx->stream_res.tg->funcs->set_vtg_params( 1615 1604 pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing, true); ··· 1770 1749 pipe->plane_res.hubp->funcs->hubp_wait_pipe_read_start(pipe->plane_res.hubp); 1771 1750 } 1772 1751 } 1752 + if (hws->funcs.program_mall_pipe_config) 1753 + hws->funcs.program_mall_pipe_config(dc, context); 1773 1754 } 1774 1755 1775 1756 void dcn20_post_unlock_program_front_end( ··· 2432 2409 NULL, 2433 2410 hubp->inst, 2434 2411 mpcc_id); 2412 + 2435 2413 dc->hwss.update_visual_confirm_color(dc, pipe_ctx, &blnd_cfg.black_color, mpcc_id); 2436 2414 2437 2415 ASSERT(new_mpcc != NULL);
+8 -8
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_dio_stream_encoder.c
··· 195 195 } 196 196 } 197 197 198 - static void enc3_stream_encoder_update_hdmi_info_packets( 198 + void enc3_stream_encoder_update_hdmi_info_packets( 199 199 struct stream_encoder *enc, 200 200 const struct encoder_info_frame *info_frame) 201 201 { ··· 214 214 enc3_update_hdmi_info_packet(enc1, 4, &info_frame->hdrsmd); 215 215 } 216 216 217 - static void enc3_stream_encoder_stop_hdmi_info_packets( 217 + void enc3_stream_encoder_stop_hdmi_info_packets( 218 218 struct stream_encoder *enc) 219 219 { 220 220 struct dcn10_stream_encoder *enc1 = DCN10STRENC_FROM_STRENC(enc); ··· 318 318 } 319 319 320 320 321 - static void enc3_dp_set_dsc_pps_info_packet(struct stream_encoder *enc, 321 + void enc3_dp_set_dsc_pps_info_packet(struct stream_encoder *enc, 322 322 bool enable, 323 323 uint8_t *dsc_packed_pps, 324 324 bool immediate_update) ··· 404 404 } 405 405 } 406 406 407 - static void enc3_stream_encoder_update_dp_info_packets( 407 + void enc3_stream_encoder_update_dp_info_packets( 408 408 struct stream_encoder *enc, 409 409 const struct encoder_info_frame *info_frame) 410 410 { ··· 652 652 REG_UPDATE(HDMI_GC, HDMI_GC_AVMUTE, 0); 653 653 } 654 654 655 - static void enc3_audio_mute_control( 655 + void enc3_audio_mute_control( 656 656 struct stream_encoder *enc, 657 657 bool mute) 658 658 { ··· 660 660 enc->afmt->funcs->audio_mute_control(enc->afmt, mute); 661 661 } 662 662 663 - static void enc3_se_dp_audio_setup( 663 + void enc3_se_dp_audio_setup( 664 664 struct stream_encoder *enc, 665 665 unsigned int az_inst, 666 666 struct audio_info *info) ··· 691 691 enc->afmt->funcs->setup_dp_audio(enc->afmt); 692 692 } 693 693 694 - static void enc3_se_dp_audio_enable( 694 + void enc3_se_dp_audio_enable( 695 695 struct stream_encoder *enc) 696 696 { 697 697 enc1_se_enable_audio_clock(enc, true); ··· 757 757 */ 758 758 } 759 759 760 - static void enc3_se_hdmi_audio_setup( 760 + void enc3_se_hdmi_audio_setup( 761 761 struct stream_encoder *enc, 762 762 unsigned int az_inst, 763 763 struct audio_info *info,
+35
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_dio_stream_encoder.h
··· 287 287 const struct dcn10_stream_encoder_shift *se_shift, 288 288 const struct dcn10_stream_encoder_mask *se_mask); 289 289 290 + void enc3_stream_encoder_update_hdmi_info_packets( 291 + struct stream_encoder *enc, 292 + const struct encoder_info_frame *info_frame); 293 + 294 + void enc3_stream_encoder_stop_hdmi_info_packets( 295 + struct stream_encoder *enc); 296 + 297 + void enc3_stream_encoder_update_dp_info_packets( 298 + struct stream_encoder *enc, 299 + const struct encoder_info_frame *info_frame); 300 + 301 + void enc3_audio_mute_control( 302 + struct stream_encoder *enc, 303 + bool mute); 304 + 305 + void enc3_se_dp_audio_setup( 306 + struct stream_encoder *enc, 307 + unsigned int az_inst, 308 + struct audio_info *info); 309 + 310 + void enc3_se_dp_audio_enable( 311 + struct stream_encoder *enc); 312 + 313 + void enc3_se_hdmi_audio_setup( 314 + struct stream_encoder *enc, 315 + unsigned int az_inst, 316 + struct audio_info *info, 317 + struct audio_crtc_info *audio_crtc_info); 318 + 319 + void enc3_dp_set_dsc_pps_info_packet( 320 + struct stream_encoder *enc, 321 + bool enable, 322 + uint8_t *dsc_packed_pps, 323 + bool immediate_update); 324 + 290 325 #endif /* __DC_DIO_STREAM_ENCODER_DCN30_H__ */
+4 -4
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_dpp.c
··· 41 41 dpp->tf_shift->field_name, dpp->tf_mask->field_name 42 42 43 43 44 - static void dpp30_read_state(struct dpp *dpp_base, struct dcn_dpp_state *s) 44 + void dpp30_read_state(struct dpp *dpp_base, struct dcn_dpp_state *s) 45 45 { 46 - struct dcn20_dpp *dpp = TO_DCN20_DPP(dpp_base); 46 + struct dcn3_dpp *dpp = TO_DCN30_DPP(dpp_base); 47 47 48 48 REG_GET(DPP_CONTROL, 49 49 DPP_CLOCK_ENABLE, &s->is_enabled); ··· 167 167 PRE_DEGAM_SELECT, degamma_lut_selection); 168 168 } 169 169 170 - static void dpp3_cnv_setup ( 170 + void dpp3_cnv_setup ( 171 171 struct dpp *dpp_base, 172 172 enum surface_pixel_format format, 173 173 enum expansion_mode mode, ··· 372 372 } 373 373 374 374 375 - static bool dpp3_get_optimal_number_of_taps( 375 + bool dpp3_get_optimal_number_of_taps( 376 376 struct dpp *dpp, 377 377 struct scaler_data *scl_data, 378 378 const struct scaling_taps *in_taps)
+16
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_dpp.h
··· 588 588 struct dpp *dpp_base, 589 589 uint32_t enable, uint32_t additive_blending); 590 590 591 + void dpp30_read_state(struct dpp *dpp_base, 592 + struct dcn_dpp_state *s); 593 + 594 + bool dpp3_get_optimal_number_of_taps( 595 + struct dpp *dpp, 596 + struct scaler_data *scl_data, 597 + const struct scaling_taps *in_taps); 598 + 599 + void dpp3_cnv_setup ( 600 + struct dpp *dpp_base, 601 + enum surface_pixel_format format, 602 + enum expansion_mode mode, 603 + struct dc_csc_transform input_csc_color_matrix, 604 + enum dc_color_space input_color_space, 605 + struct cnv_alpha_2bit_lut *alpha_2bit_lut); 606 + 591 607 void dpp3_program_CM_bias( 592 608 struct dpp *dpp_base, 593 609 struct CM_bias_params *bias_params);
+7 -7
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_mpc.c
··· 44 44 #define NUM_ELEMENTS(a) (sizeof(a) / sizeof((a)[0])) 45 45 46 46 47 - static bool mpc3_is_dwb_idle( 47 + bool mpc3_is_dwb_idle( 48 48 struct mpc *mpc, 49 49 int dwb_id) 50 50 { ··· 59 59 return false; 60 60 } 61 61 62 - static void mpc3_set_dwb_mux( 62 + void mpc3_set_dwb_mux( 63 63 struct mpc *mpc, 64 64 int dwb_id, 65 65 int mpcc_id) ··· 70 70 MPC_DWB0_MUX, mpcc_id); 71 71 } 72 72 73 - static void mpc3_disable_dwb_mux( 73 + void mpc3_disable_dwb_mux( 74 74 struct mpc *mpc, 75 75 int dwb_id) 76 76 { ··· 80 80 MPC_DWB0_MUX, 0xf); 81 81 } 82 82 83 - static void mpc3_set_out_rate_control( 83 + void mpc3_set_out_rate_control( 84 84 struct mpc *mpc, 85 85 int opp_id, 86 86 bool enable, ··· 99 99 MPC_OUT_FLOW_CONTROL_COUNT, flow_control->flow_ctrl_cnt1); 100 100 } 101 101 102 - static enum dc_lut_mode mpc3_get_ogam_current(struct mpc *mpc, int mpcc_id) 102 + enum dc_lut_mode mpc3_get_ogam_current(struct mpc *mpc, int mpcc_id) 103 103 { 104 104 /*Contrary to DCN2 and DCN1 wherein a single status register field holds this info; 105 105 *in DCN3/3AG, we need to read two separate fields to retrieve the same info ··· 137 137 return mode; 138 138 } 139 139 140 - static void mpc3_power_on_ogam_lut( 140 + void mpc3_power_on_ogam_lut( 141 141 struct mpc *mpc, int mpcc_id, 142 142 bool power_on) 143 143 { ··· 1035 1035 } 1036 1036 1037 1037 1038 - static void mpc3_init_mpcc(struct mpcc *mpcc, int mpcc_inst) 1038 + void mpc3_init_mpcc(struct mpcc *mpcc, int mpcc_inst) 1039 1039 { 1040 1040 mpcc->mpcc_id = mpcc_inst; 1041 1041 mpcc->dpp_id = 0xf;
+147
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_mpc.h
··· 282 282 uint32_t MPCC_OGAM_RAMB_START_BASE_CNTL_R[MAX_MPCC]; \ 283 283 uint32_t MPC_OUT_CSC_COEF_FORMAT 284 284 285 + #define MPC_REG_VARIABLE_LIST_DCN32 \ 286 + uint32_t MPCC_MCM_SHAPER_CONTROL[MAX_MPCC]; \ 287 + uint32_t MPCC_MCM_SHAPER_OFFSET_R[MAX_MPCC]; \ 288 + uint32_t MPCC_MCM_SHAPER_OFFSET_G[MAX_MPCC]; \ 289 + uint32_t MPCC_MCM_SHAPER_OFFSET_B[MAX_MPCC]; \ 290 + uint32_t MPCC_MCM_SHAPER_SCALE_R[MAX_MPCC]; \ 291 + uint32_t MPCC_MCM_SHAPER_SCALE_G_B[MAX_MPCC]; \ 292 + uint32_t MPCC_MCM_SHAPER_LUT_INDEX[MAX_MPCC]; \ 293 + uint32_t MPCC_MCM_SHAPER_LUT_DATA[MAX_MPCC]; \ 294 + uint32_t MPCC_MCM_SHAPER_LUT_WRITE_EN_MASK[MAX_MPCC]; \ 295 + uint32_t MPCC_MCM_SHAPER_RAMA_START_CNTL_B[MAX_MPCC]; \ 296 + uint32_t MPCC_MCM_SHAPER_RAMA_START_CNTL_G[MAX_MPCC]; \ 297 + uint32_t MPCC_MCM_SHAPER_RAMA_START_CNTL_R[MAX_MPCC]; \ 298 + uint32_t MPCC_MCM_SHAPER_RAMA_END_CNTL_B[MAX_MPCC]; \ 299 + uint32_t MPCC_MCM_SHAPER_RAMA_END_CNTL_G[MAX_MPCC]; \ 300 + uint32_t MPCC_MCM_SHAPER_RAMA_END_CNTL_R[MAX_MPCC]; \ 301 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_0_1[MAX_MPCC]; \ 302 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_2_3[MAX_MPCC]; \ 303 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_4_5[MAX_MPCC]; \ 304 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_6_7[MAX_MPCC]; \ 305 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_8_9[MAX_MPCC]; \ 306 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_10_11[MAX_MPCC]; \ 307 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_12_13[MAX_MPCC]; \ 308 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_14_15[MAX_MPCC]; \ 309 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_16_17[MAX_MPCC]; \ 310 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_18_19[MAX_MPCC]; \ 311 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_20_21[MAX_MPCC]; \ 312 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_22_23[MAX_MPCC]; \ 313 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_24_25[MAX_MPCC]; \ 314 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_26_27[MAX_MPCC]; \ 315 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_28_29[MAX_MPCC]; \ 316 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_30_31[MAX_MPCC]; \ 317 + uint32_t MPCC_MCM_SHAPER_RAMA_REGION_32_33[MAX_MPCC]; \ 318 + uint32_t MPCC_MCM_SHAPER_RAMB_START_CNTL_B[MAX_MPCC]; \ 319 + uint32_t MPCC_MCM_SHAPER_RAMB_START_CNTL_G[MAX_MPCC]; \ 320 + uint32_t MPCC_MCM_SHAPER_RAMB_START_CNTL_R[MAX_MPCC]; \ 321 + uint32_t MPCC_MCM_SHAPER_RAMB_END_CNTL_B[MAX_MPCC]; \ 322 + uint32_t MPCC_MCM_SHAPER_RAMB_END_CNTL_G[MAX_MPCC]; \ 323 + uint32_t MPCC_MCM_SHAPER_RAMB_END_CNTL_R[MAX_MPCC]; \ 324 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_0_1[MAX_MPCC]; \ 325 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_2_3[MAX_MPCC]; \ 326 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_4_5[MAX_MPCC]; \ 327 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_6_7[MAX_MPCC]; \ 328 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_8_9[MAX_MPCC]; \ 329 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_10_11[MAX_MPCC]; \ 330 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_12_13[MAX_MPCC]; \ 331 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_14_15[MAX_MPCC]; \ 332 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_16_17[MAX_MPCC]; \ 333 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_18_19[MAX_MPCC]; \ 334 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_20_21[MAX_MPCC]; \ 335 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_22_23[MAX_MPCC]; \ 336 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_24_25[MAX_MPCC]; \ 337 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_26_27[MAX_MPCC]; \ 338 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_28_29[MAX_MPCC]; \ 339 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_30_31[MAX_MPCC]; \ 340 + uint32_t MPCC_MCM_SHAPER_RAMB_REGION_32_33[MAX_MPCC]; \ 341 + uint32_t MPCC_MCM_3DLUT_MODE[MAX_MPCC]; \ 342 + uint32_t MPCC_MCM_3DLUT_INDEX[MAX_MPCC]; \ 343 + uint32_t MPCC_MCM_3DLUT_DATA[MAX_MPCC]; \ 344 + uint32_t MPCC_MCM_3DLUT_DATA_30BIT[MAX_MPCC]; \ 345 + uint32_t MPCC_MCM_3DLUT_READ_WRITE_CONTROL[MAX_MPCC]; \ 346 + uint32_t MPCC_MCM_3DLUT_OUT_NORM_FACTOR[MAX_MPCC]; \ 347 + uint32_t MPCC_MCM_3DLUT_OUT_OFFSET_R[MAX_MPCC]; \ 348 + uint32_t MPCC_MCM_3DLUT_OUT_OFFSET_G[MAX_MPCC]; \ 349 + uint32_t MPCC_MCM_3DLUT_OUT_OFFSET_B[MAX_MPCC]; \ 350 + uint32_t MPCC_MCM_MEM_PWR_CTRL[MAX_MPCC] 351 + 285 352 #define MPC_COMMON_MASK_SH_LIST_DCN3_0(mask_sh) \ 286 353 MPC_COMMON_MASK_SH_LIST_DCN1_0(mask_sh),\ 287 354 SF(MPCC0_MPCC_CONTROL, MPCC_BG_BPC, mask_sh),\ ··· 647 580 type MPC_RMU_SHAPER_RAMA_EXP_REGION1_NUM_SEGMENTS;\ 648 581 type MPC_RMU_SHAPER_MODE_CURRENT 649 582 583 + #define MPC_REG_FIELD_LIST_DCN32(type) \ 584 + type MPCC_MCM_SHAPER_MEM_PWR_FORCE;\ 585 + type MPCC_MCM_SHAPER_MEM_PWR_DIS;\ 586 + type MPCC_MCM_SHAPER_MEM_LOW_PWR_MODE;\ 587 + type MPCC_MCM_3DLUT_MEM_PWR_FORCE;\ 588 + type MPCC_MCM_3DLUT_MEM_PWR_DIS;\ 589 + type MPCC_MCM_3DLUT_MEM_LOW_PWR_MODE;\ 590 + type MPCC_MCM_1DLUT_MEM_PWR_FORCE;\ 591 + type MPCC_MCM_1DLUT_MEM_PWR_DIS;\ 592 + type MPCC_MCM_1DLUT_MEM_LOW_PWR_MODE;\ 593 + type MPCC_MCM_SHAPER_MEM_PWR_STATE;\ 594 + type MPCC_MCM_3DLUT_MEM_PWR_STATE;\ 595 + type MPCC_MCM_1DLUT_MEM_PWR_STATE;\ 596 + type MPCC_MCM_3DLUT_MODE; \ 597 + type MPCC_MCM_3DLUT_SIZE; \ 598 + type MPCC_MCM_3DLUT_MODE_CURRENT; \ 599 + type MPCC_MCM_3DLUT_WRITE_EN_MASK;\ 600 + type MPCC_MCM_3DLUT_RAM_SEL;\ 601 + type MPCC_MCM_3DLUT_30BIT_EN;\ 602 + type MPCC_MCM_3DLUT_CONFIG_STATUS;\ 603 + type MPCC_MCM_3DLUT_READ_SEL;\ 604 + type MPCC_MCM_3DLUT_INDEX;\ 605 + type MPCC_MCM_3DLUT_DATA0;\ 606 + type MPCC_MCM_3DLUT_DATA1;\ 607 + type MPCC_MCM_3DLUT_DATA_30BIT;\ 608 + type MPCC_MCM_SHAPER_LUT_MODE;\ 609 + type MPCC_MCM_SHAPER_MODE_CURRENT;\ 610 + type MPCC_MCM_SHAPER_OFFSET_R;\ 611 + type MPCC_MCM_SHAPER_OFFSET_G;\ 612 + type MPCC_MCM_SHAPER_OFFSET_B;\ 613 + type MPCC_MCM_SHAPER_SCALE_R;\ 614 + type MPCC_MCM_SHAPER_SCALE_G;\ 615 + type MPCC_MCM_SHAPER_SCALE_B;\ 616 + type MPCC_MCM_SHAPER_LUT_INDEX;\ 617 + type MPCC_MCM_SHAPER_LUT_DATA;\ 618 + type MPCC_MCM_SHAPER_LUT_WRITE_EN_MASK;\ 619 + type MPCC_MCM_SHAPER_LUT_WRITE_SEL;\ 620 + type MPCC_MCM_SHAPER_CONFIG_STATUS;\ 621 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION_START_B;\ 622 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION_START_SEGMENT_B;\ 623 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION_END_B;\ 624 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION_END_BASE_B;\ 625 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION0_LUT_OFFSET;\ 626 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION0_NUM_SEGMENTS;\ 627 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION1_LUT_OFFSET;\ 628 + type MPCC_MCM_SHAPER_RAMA_EXP_REGION1_NUM_SEGMENTS 629 + 650 630 #define MPC_COMMON_MASK_SH_LIST_DCN303(mask_sh) \ 651 631 MPC_COMMON_MASK_SH_LIST_DCN1_0(mask_sh),\ 652 632 SF(MPCC0_MPCC_CONTROL, MPCC_BG_BPC, mask_sh),\ ··· 872 758 873 759 struct dcn30_mpc_registers { 874 760 MPC_REG_VARIABLE_LIST_DCN3_0; 761 + MPC_REG_VARIABLE_LIST_DCN32; 875 762 }; 876 763 877 764 struct dcn30_mpc_shift { 878 765 MPC_REG_FIELD_LIST_DCN3_0(uint8_t); 766 + MPC_REG_FIELD_LIST_DCN32(uint8_t); 879 767 }; 880 768 881 769 struct dcn30_mpc_mask { 882 770 MPC_REG_FIELD_LIST_DCN3_0(uint32_t); 771 + MPC_REG_FIELD_LIST_DCN32(uint32_t); 883 772 }; 884 773 885 774 struct dcn30_mpc { ··· 957 840 struct mpc *mpc, 958 841 int rmu_idx, 959 842 int value); 843 + 844 + void mpc3_set_dwb_mux( 845 + struct mpc *mpc, 846 + int dwb_id, 847 + int mpcc_id); 848 + 849 + void mpc3_disable_dwb_mux( 850 + struct mpc *mpc, 851 + int dwb_id); 852 + 853 + bool mpc3_is_dwb_idle( 854 + struct mpc *mpc, 855 + int dwb_id); 856 + 857 + void mpc3_set_out_rate_control( 858 + struct mpc *mpc, 859 + int opp_id, 860 + bool enable, 861 + bool rate_2x_mode, 862 + struct mpc_dwb_flow_control *flow_control); 863 + 864 + void mpc3_power_on_ogam_lut( 865 + struct mpc *mpc, int mpcc_id, 866 + bool power_on); 867 + 868 + void mpc3_init_mpcc(struct mpcc *mpcc, int mpcc_inst); 869 + 870 + enum dc_lut_mode mpc3_get_ogam_current( 871 + struct mpc *mpc, 872 + int mpcc_id); 960 873 961 874 #endif
+9
drivers/gpu/drm/amd/display/dc/dcn30/dcn30_optc.h
··· 28 28 29 29 #include "dcn20/dcn20_optc.h" 30 30 31 + #define V_TOTAL_REGS_DCN30_SRI(inst) 31 32 32 33 #define OPTC_COMMON_REG_LIST_DCN3_BASE(inst) \ 33 34 SRI(OTG_VSTARTUP_PARAM, OTG, inst),\ ··· 56 55 SRI(OTG_V_TOTAL_MAX, OTG, inst),\ 57 56 SRI(OTG_V_TOTAL_MIN, OTG, inst),\ 58 57 SRI(OTG_V_TOTAL_CONTROL, OTG, inst),\ 58 + V_TOTAL_REGS_DCN30_SRI(inst)\ 59 59 SRI(OTG_TRIGA_CNTL, OTG, inst),\ 60 60 SRI(OTG_FORCE_COUNT_NOW_CNTL, OTG, inst),\ 61 61 SRI(OTG_STATIC_SCREEN_CONTROL, OTG, inst),\ ··· 82 80 SRI(OTG_VERT_SYNC_CONTROL, OTG, inst),\ 83 81 SRI(OTG_GSL_CONTROL, OTG, inst),\ 84 82 SRI(OTG_CRC_CNTL, OTG, inst),\ 83 + SRI(OTG_CRC_CNTL2, OTG, inst),\ 85 84 SRI(OTG_CRC0_DATA_RG, OTG, inst),\ 86 85 SRI(OTG_CRC0_DATA_B, OTG, inst),\ 87 86 SRI(OTG_CRC0_WINDOWA_X_CONTROL, OTG, inst),\ ··· 111 108 SRI(OPTC_MEMORY_CONFIG, ODM, inst),\ 112 109 SR(DWB_SOURCE_SELECT) 113 110 111 + #define DCN30_VTOTAL_REGS_SF(mask_sh) 114 112 115 113 #define OPTC_COMMON_MASK_SH_LIST_DCN3_BASE(mask_sh)\ 116 114 SF(OTG0_OTG_VSTARTUP_PARAM, VSTARTUP_START, mask_sh),\ ··· 165 161 SF(OTG0_OTG_V_TOTAL_CONTROL, OTG_SET_V_TOTAL_MIN_MASK, mask_sh),\ 166 162 SF(OTG0_OTG_V_TOTAL_CONTROL, OTG_VTOTAL_MID_REPLACING_MIN_EN, mask_sh),\ 167 163 SF(OTG0_OTG_V_TOTAL_CONTROL, OTG_VTOTAL_MID_REPLACING_MAX_EN, mask_sh),\ 164 + DCN30_VTOTAL_REGS_SF(mask_sh)\ 168 165 SF(OTG0_OTG_FORCE_COUNT_NOW_CNTL, OTG_FORCE_COUNT_NOW_CLEAR, mask_sh),\ 169 166 SF(OTG0_OTG_FORCE_COUNT_NOW_CNTL, OTG_FORCE_COUNT_NOW_MODE, mask_sh),\ 170 167 SF(OTG0_OTG_FORCE_COUNT_NOW_CNTL, OTG_FORCE_COUNT_NOW_OCCURRED, mask_sh),\ ··· 224 219 SF(OTG0_OTG_CRC_CNTL, OTG_CRC_CONT_EN, mask_sh),\ 225 220 SF(OTG0_OTG_CRC_CNTL, OTG_CRC0_SELECT, mask_sh),\ 226 221 SF(OTG0_OTG_CRC_CNTL, OTG_CRC_EN, mask_sh),\ 222 + SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DSC_MODE, mask_sh),\ 223 + SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_STREAM_COMBINE_MODE, mask_sh),\ 224 + SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_STREAM_SPLIT_MODE, mask_sh),\ 225 + SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_FORMAT, mask_sh),\ 227 226 SF(OTG0_OTG_CRC0_DATA_RG, CRC0_R_CR, mask_sh),\ 228 227 SF(OTG0_OTG_CRC0_DATA_RG, CRC0_G_Y, mask_sh),\ 229 228 SF(OTG0_OTG_CRC0_DATA_B, CRC0_B_CB, mask_sh),\
+24 -4
drivers/gpu/drm/amd/display/dc/dcn31/dcn31_dccg.c
··· 158 158 } 159 159 } 160 160 161 - void dccg31_set_dpstreamclk( 162 - struct dccg *dccg, 163 - enum hdmistreamclk_source src, 164 - int otg_inst) 161 + void dccg31_set_dpstreamclk(struct dccg *dccg, 162 + enum streamclk_source src, 163 + int otg_inst) 165 164 { 166 165 if (src == REFCLK) 167 166 dccg31_disable_dpstreamclk(dccg, otg_inst); ··· 661 662 } 662 663 } 663 664 665 + void dccg31_otg_add_pixel(struct dccg *dccg, 666 + uint32_t otg_inst) 667 + { 668 + struct dcn_dccg *dccg_dcn = TO_DCN_DCCG(dccg); 669 + 670 + REG_UPDATE(OTG_PIXEL_RATE_CNTL[otg_inst], 671 + OTG_ADD_PIXEL[otg_inst], 1); 672 + } 673 + 674 + void dccg31_otg_drop_pixel(struct dccg *dccg, 675 + uint32_t otg_inst) 676 + { 677 + struct dcn_dccg *dccg_dcn = TO_DCN_DCCG(dccg); 678 + 679 + REG_UPDATE(OTG_PIXEL_RATE_CNTL[otg_inst], 680 + OTG_DROP_PIXEL[otg_inst], 1); 681 + } 682 + 664 683 static const struct dccg_funcs dccg31_funcs = { 665 684 .update_dpp_dto = dccg31_update_dpp_dto, 666 685 .get_dccg_ref_freq = dccg31_get_dccg_ref_freq, ··· 691 674 .set_physymclk = dccg31_set_physymclk, 692 675 .set_dtbclk_dto = dccg31_set_dtbclk_dto, 693 676 .set_audio_dtbclk_dto = dccg31_set_audio_dtbclk_dto, 677 + .set_fifo_errdet_ovr_en = dccg2_set_fifo_errdet_ovr_en, 678 + .otg_add_pixel = dccg31_otg_add_pixel, 679 + .otg_drop_pixel = dccg31_otg_drop_pixel, 694 680 .set_dispclk_change_mode = dccg31_set_dispclk_change_mode, 695 681 .disable_dsc = dccg31_disable_dscclk, 696 682 .enable_dsc = dccg31_enable_dscclk,
+5 -9
drivers/gpu/drm/amd/display/dc/dcn31/dcn31_dccg.h
··· 28 28 29 29 #include "dcn30/dcn30_dccg.h" 30 30 31 - #define DCCG_SFII(block, reg_name, field_prefix, field_name, inst, post_fix)\ 32 - .field_prefix ## _ ## field_name[inst] = block ## inst ## _ ## reg_name ## __ ## field_prefix ## inst ## _ ## field_name ## post_fix 33 - 34 - 35 31 #define DCCG_REG_LIST_DCN31() \ 36 32 SR(DPPCLK_DTO_CTRL),\ 37 33 DCCG_SRII(DTO_PARAM, DPPCLK, 0),\ ··· 120 124 DCCG_SFII(OTG, PIXEL_RATE_CNTL, DTBCLK_DTO, DIV, 1, mask_sh),\ 121 125 DCCG_SFII(OTG, PIXEL_RATE_CNTL, DTBCLK_DTO, DIV, 2, mask_sh),\ 122 126 DCCG_SFII(OTG, PIXEL_RATE_CNTL, DTBCLK_DTO, DIV, 3, mask_sh),\ 127 + DCCG_SFII(OTG, PIXEL_RATE_CNTL, OTG, ADD_PIXEL, 0, mask_sh),\ 128 + DCCG_SFII(OTG, PIXEL_RATE_CNTL, OTG, ADD_PIXEL, 1, mask_sh),\ 129 + DCCG_SFII(OTG, PIXEL_RATE_CNTL, OTG, ADD_PIXEL, 2, mask_sh),\ 130 + DCCG_SFII(OTG, PIXEL_RATE_CNTL, OTG, ADD_PIXEL, 3, mask_sh),\ 123 131 DCCG_SF(DCCG_AUDIO_DTO_SOURCE, DCCG_AUDIO_DTO_SEL, mask_sh),\ 124 132 DCCG_SF(DCCG_AUDIO_DTO_SOURCE, DCCG_AUDIO_DTO0_SOURCE_SEL, mask_sh),\ 125 133 DCCG_SF(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_MODE, mask_sh), \ ··· 163 163 164 164 void dccg31_set_dpstreamclk( 165 165 struct dccg *dccg, 166 - enum hdmistreamclk_source src, 166 + enum streamclk_source src, 167 167 int otg_inst); 168 168 169 169 void dccg31_enable_symclk32_se( ··· 193 193 void dccg31_set_audio_dtbclk_dto( 194 194 struct dccg *dccg, 195 195 const struct dtbclk_dto_params *params); 196 - 197 - void dccg31_set_hdmistreamclk( 198 - struct dccg *dccg, 199 - enum hdmistreamclk_source src); 200 196 201 197 #endif //__DCN31_DCCG_H__
+1 -21
drivers/gpu/drm/amd/display/dc/dcn31/dcn31_optc.c
··· 213 213 } 214 214 } 215 215 216 - void optc3_init_odm(struct timing_generator *optc) 217 - { 218 - struct optc *optc1 = DCN10TG_FROM_TG(optc); 219 - 220 - REG_SET_5(OPTC_DATA_SOURCE_SELECT, 0, 221 - OPTC_NUM_OF_INPUT_SEGMENT, 0, 222 - OPTC_SEG0_SRC_SEL, optc->inst, 223 - OPTC_SEG1_SRC_SEL, 0xf, 224 - OPTC_SEG2_SRC_SEL, 0xf, 225 - OPTC_SEG3_SRC_SEL, 0xf 226 - ); 227 - 228 - REG_SET(OTG_H_TIMING_CNTL, 0, 229 - OTG_H_TIMING_DIV_MODE, 0); 230 - 231 - REG_SET(OPTC_MEMORY_CONFIG, 0, 232 - OPTC_MEM_SEL, 0); 233 - optc1->opp_count = 1; 234 - } 235 - 236 216 static struct timing_generator_funcs dcn31_tg_funcs = { 237 217 .validate_timing = optc1_validate_timing, 238 218 .program_timing = optc1_program_timing, ··· 246 266 .lock_doublebuffer_disable = optc3_lock_doublebuffer_disable, 247 267 .enable_optc_clock = optc1_enable_optc_clock, 248 268 .set_drr = optc31_set_drr, 269 + .get_last_used_drr_vtotal = optc2_get_last_used_drr_vtotal, 249 270 .set_vtotal_min_max = optc1_set_vtotal_min_max, 250 271 .set_static_screen_control = optc1_set_static_screen_control, 251 272 .program_stereo = optc1_program_stereo, ··· 273 292 .program_manual_trigger = optc2_program_manual_trigger, 274 293 .setup_manual_trigger = optc2_setup_manual_trigger, 275 294 .get_hw_timing = optc1_get_hw_timing, 276 - .init_odm = optc3_init_odm, 277 295 }; 278 296 279 297 void dcn31_timing_generator_init(struct optc *optc1)
+4 -2
drivers/gpu/drm/amd/display/dc/dcn31/dcn31_optc.h
··· 98 98 SRI(OPTC_WIDTH_CONTROL, ODM, inst),\ 99 99 SRI(OPTC_MEMORY_CONFIG, ODM, inst),\ 100 100 SRI(OTG_CRC_CNTL2, OTG, inst),\ 101 - SR(DWB_SOURCE_SELECT) 101 + SR(DWB_SOURCE_SELECT),\ 102 + SRI(OTG_DRR_CONTROL, OTG, inst) 102 103 103 104 #define OPTC_COMMON_MASK_SH_LIST_DCN3_1(mask_sh)\ 104 105 SF(OTG0_OTG_VSTARTUP_PARAM, VSTARTUP_START, mask_sh),\ ··· 253 252 SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DSC_MODE, mask_sh),\ 254 253 SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_STREAM_COMBINE_MODE, mask_sh),\ 255 254 SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_STREAM_SPLIT_MODE, mask_sh),\ 256 - SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_FORMAT, mask_sh) 255 + SF(OTG0_OTG_CRC_CNTL2, OTG_CRC_DATA_FORMAT, mask_sh),\ 256 + SF(OTG0_OTG_DRR_CONTROL, OTG_V_TOTAL_LAST_USED_BY_DRR, mask_sh) 257 257 258 258 void dcn31_timing_generator_init(struct optc *optc1); 259 259
+19 -26
drivers/gpu/drm/amd/display/dc/dcn32/dcn32_dccg.c
··· 141 141 /* Controls the generation of pixel valid for OTG in (OTG -> HPO case) */ 142 142 void dccg32_set_dtbclk_dto( 143 143 struct dccg *dccg, 144 - int otg_inst, 145 - int pixclk_khz, 146 - int num_odm_segments, 147 - const struct dc_crtc_timing *timing) 144 + const struct dtbclk_dto_params *params) 148 145 { 149 146 struct dcn_dccg *dccg_dcn = TO_DCN_DCCG(dccg); 150 147 /* DTO Output Rate / Pixel Rate = 1/4 */ 151 - int req_dtbclk_khz = pixclk_khz / 4; 148 + int req_dtbclk_khz = params->pixclk_khz / 4; 152 149 153 - if (dccg->ref_dtbclk_khz && req_dtbclk_khz) { 150 + if (params->ref_dtbclk_khz && req_dtbclk_khz) { 154 151 uint32_t modulo, phase; 155 152 156 153 // phase / modulo = dtbclk / dtbclk ref 157 - modulo = 0xffffffff; 158 - phase = (((unsigned long long)modulo * req_dtbclk_khz) + dccg->ref_dtbclk_khz - 1) / dccg->ref_dtbclk_khz; 154 + modulo = params->ref_dtbclk_khz * 1000; 155 + phase = req_dtbclk_khz * 1000; 159 156 160 - REG_WRITE(DTBCLK_DTO_MODULO[otg_inst], modulo); 161 - REG_WRITE(DTBCLK_DTO_PHASE[otg_inst], phase); 157 + REG_WRITE(DTBCLK_DTO_MODULO[params->otg_inst], modulo); 158 + REG_WRITE(DTBCLK_DTO_PHASE[params->otg_inst], phase); 162 159 163 - REG_UPDATE(OTG_PIXEL_RATE_CNTL[otg_inst], 164 - DTBCLK_DTO_ENABLE[otg_inst], 1); 160 + REG_UPDATE(OTG_PIXEL_RATE_CNTL[params->otg_inst], 161 + DTBCLK_DTO_ENABLE[params->otg_inst], 1); 165 162 166 - REG_WAIT(OTG_PIXEL_RATE_CNTL[otg_inst], 167 - DTBCLKDTO_ENABLE_STATUS[otg_inst], 1, 163 + REG_WAIT(OTG_PIXEL_RATE_CNTL[params->otg_inst], 164 + DTBCLKDTO_ENABLE_STATUS[params->otg_inst], 1, 168 165 1, 100); 169 166 170 167 /* program OTG_PIXEL_RATE_DIV for DIVK1 and DIVK2 fields */ 171 - dccg32_set_pixel_rate_div(dccg, otg_inst, PIXEL_RATE_DIV_BY_1, PIXEL_RATE_DIV_BY_1); 168 + dccg32_set_pixel_rate_div(dccg, params->otg_inst, PIXEL_RATE_DIV_BY_1, PIXEL_RATE_DIV_BY_1); 172 169 173 170 /* The recommended programming sequence to enable DTBCLK DTO to generate 174 171 * valid pixel HPO DPSTREAM ENCODER, specifies that DTO source select should 175 172 * be set only after DTO is enabled 176 173 */ 177 - REG_UPDATE(OTG_PIXEL_RATE_CNTL[otg_inst], 178 - PIPE_DTO_SRC_SEL[otg_inst], 2); 179 - 180 - dccg->dtbclk_khz[otg_inst] = req_dtbclk_khz; 174 + REG_UPDATE(OTG_PIXEL_RATE_CNTL[params->otg_inst], 175 + PIPE_DTO_SRC_SEL[params->otg_inst], 2); 181 176 } else { 182 - REG_UPDATE_2(OTG_PIXEL_RATE_CNTL[otg_inst], 183 - DTBCLK_DTO_ENABLE[otg_inst], 0, 184 - PIPE_DTO_SRC_SEL[otg_inst], 1); 177 + REG_UPDATE_2(OTG_PIXEL_RATE_CNTL[params->otg_inst], 178 + DTBCLK_DTO_ENABLE[params->otg_inst], 0, 179 + PIPE_DTO_SRC_SEL[params->otg_inst], 1); 185 180 186 - REG_WRITE(DTBCLK_DTO_MODULO[otg_inst], 0); 187 - REG_WRITE(DTBCLK_DTO_PHASE[otg_inst], 0); 188 - 189 - dccg->dtbclk_khz[otg_inst] = 0; 181 + REG_WRITE(DTBCLK_DTO_MODULO[params->otg_inst], 0); 182 + REG_WRITE(DTBCLK_DTO_PHASE[params->otg_inst], 0); 190 183 } 191 184 } 192 185
+35 -1
drivers/gpu/drm/amd/display/dc/dcn32/dcn32_dio_stream_encoder.c
··· 211 211 HDMI_GC_SEND, 1, 212 212 HDMI_NULL_SEND, 1); 213 213 214 + #if defined(CONFIG_DRM_AMD_DC_HDCP) 214 215 /* Disable Audio Content Protection packet transmission */ 215 216 REG_UPDATE(HDMI_VBI_PACKET_CONTROL, HDMI_ACP_SEND, 0); 217 + #endif 216 218 217 219 /* following belongs to audio */ 218 220 /* Enable Audio InfoFrame packet transmission. */ ··· 303 301 304 302 REG_UPDATE(DP_STEER_FIFO, DP_STEER_FIFO_RESET, 0); 305 303 304 + /* DIG Resync FIFO now needs to be explicitly enabled 305 + */ 306 + // TODO: Confirm if we need to wait for DIG_SYMCLK_FE_ON 307 + REG_WAIT(DIG_FE_CNTL, DIG_SYMCLK_FE_ON, 1, 10, 5000); 308 + 309 + REG_UPDATE(DIG_FIFO_CTRL0, DIG_FIFO_RESET, 1); 310 + 311 + REG_WAIT(DIG_FIFO_CTRL0, DIG_FIFO_RESET_DONE, 1, 10, 5000); 312 + 313 + REG_UPDATE(DIG_FIFO_CTRL0, DIG_FIFO_RESET, 0); 314 + 315 + REG_WAIT(DIG_FIFO_CTRL0, DIG_FIFO_RESET_DONE, 0, 10, 5000); 316 + 317 + REG_UPDATE(DIG_FIFO_CTRL0, DIG_FIFO_ENABLE, 1); 318 + 306 319 /* wait 100us for DIG/DP logic to prime 307 320 * (i.e. a few video lines) 308 321 */ ··· 371 354 } 372 355 } 373 356 357 + static void enc32_stream_encoder_reset_fifo(struct stream_encoder *enc) 358 + { 359 + struct dcn10_stream_encoder *enc1 = DCN10STRENC_FROM_STRENC(enc); 360 + uint32_t fifo_enabled; 361 + 362 + REG_GET(DIG_FIFO_CTRL0, DIG_FIFO_ENABLE, &fifo_enabled); 363 + 364 + if (fifo_enabled == 0) { 365 + /* reset DIG resync FIFO */ 366 + REG_UPDATE(DIG_FIFO_CTRL0, DIG_FIFO_RESET, 1); 367 + /* TODO: fix timeout when wait for DIG_FIFO_RESET_DONE */ 368 + //REG_WAIT(DIG_FIFO_CTRL0, DIG_FIFO_RESET_DONE, 1, 1, 100); 369 + udelay(1); 370 + REG_UPDATE(DIG_FIFO_CTRL0, DIG_FIFO_RESET, 0); 371 + REG_WAIT(DIG_FIFO_CTRL0, DIG_FIFO_RESET_DONE, 0, 1, 100); 372 + } 373 + } 374 374 375 375 static const struct stream_encoder_funcs dcn32_str_enc_funcs = { 376 376 .dp_set_odm_combine = ··· 409 375 .stop_dp_info_packets = 410 376 enc1_stream_encoder_stop_dp_info_packets, 411 377 .reset_fifo = 412 - enc1_stream_encoder_reset_fifo, 378 + enc32_stream_encoder_reset_fifo, 413 379 .dp_blank = 414 380 enc1_stream_encoder_dp_blank, 415 381 .dp_unblank =
+13 -2
drivers/gpu/drm/amd/display/dc/dcn32/dcn32_dio_stream_encoder.h
··· 89 89 SRI(DP_SEC_METADATA_TRANSMISSION, DP, id), \ 90 90 SRI(HDMI_METADATA_PACKET_CONTROL, DIG, id), \ 91 91 SRI(DIG_FE_CNTL, DIG, id), \ 92 - SRI(DIG_CLOCK_PATTERN, DIG, id) 92 + SRI(DIG_CLOCK_PATTERN, DIG, id), \ 93 + SRI(DIG_FIFO_CTRL0, DIG, id) 93 94 94 95 95 96 #define SE_COMMON_MASK_SH_LIST_DCN32_BASE(mask_sh)\ ··· 234 233 SE_SF(DIG0_HDMI_METADATA_PACKET_CONTROL, HDMI_METADATA_PACKET_LINE_REFERENCE, mask_sh),\ 235 234 SE_SF(DIG0_HDMI_METADATA_PACKET_CONTROL, HDMI_METADATA_PACKET_LINE, mask_sh),\ 236 235 SE_SF(DIG0_DIG_FE_CNTL, DOLBY_VISION_EN, mask_sh),\ 236 + SE_SF(DIG0_DIG_FE_CNTL, DIG_SYMCLK_FE_ON, mask_sh),\ 237 237 SE_SF(DP0_DP_SEC_FRAMING4, DP_SST_SDP_SPLITTING, mask_sh),\ 238 - SE_SF(DIG0_DIG_CLOCK_PATTERN, DIG_CLOCK_PATTERN, mask_sh) 238 + SE_SF(DIG0_DIG_CLOCK_PATTERN, DIG_CLOCK_PATTERN, mask_sh),\ 239 + SE_SF(DIG0_DIG_FIFO_CTRL0, DIG_FIFO_READ_START_LEVEL, mask_sh),\ 240 + SE_SF(DIG0_DIG_FIFO_CTRL0, DIG_FIFO_ENABLE, mask_sh),\ 241 + SE_SF(DIG0_DIG_FIFO_CTRL0, DIG_FIFO_RESET, mask_sh),\ 242 + SE_SF(DIG0_DIG_FIFO_CTRL0, DIG_FIFO_RESET_DONE, mask_sh) 239 243 244 + #if defined(CONFIG_DRM_AMD_DC_HDCP) 240 245 #define SE_COMMON_MASK_SH_LIST_DCN32(mask_sh)\ 241 246 SE_COMMON_MASK_SH_LIST_DCN32_BASE(mask_sh),\ 242 247 SE_SF(DIG0_HDMI_VBI_PACKET_CONTROL, HDMI_ACP_SEND, mask_sh) 248 + #else 249 + #define SE_COMMON_MASK_SH_LIST_DCN32(mask_sh)\ 250 + SE_COMMON_MASK_SH_LIST_DCN32_BASE(mask_sh) 251 + #endif 243 252 244 253 void dcn32_dio_stream_encoder_construct( 245 254 struct dcn10_stream_encoder *enc1,
+2 -2
drivers/gpu/drm/amd/display/dc/dcn32/dcn32_hwseq.c
··· 649 649 * Otherwise, if taking control is not possible, we need to power 650 650 * everything down. 651 651 */ 652 - if (dcb->funcs->is_accelerated_mode(dcb) || dc->config.power_down_display_on_boot) { 652 + if (dcb->funcs->is_accelerated_mode(dcb) || dc->config.seamless_boot_edp_requested) { 653 653 hws->funcs.init_pipes(dc, dc->current_state); 654 654 if (dc->res_pool->hubbub->funcs->allow_self_refresh_control) 655 655 dc->res_pool->hubbub->funcs->allow_self_refresh_control(dc->res_pool->hubbub, ··· 661 661 * To avoid this, power down hardware on boot 662 662 * if DIG is turned on and seamless boot not enabled 663 663 */ 664 - if (dc->config.power_down_display_on_boot) { 664 + if (dc->config.seamless_boot_edp_requested) { 665 665 struct dc_link *edp_links[MAX_NUM_EDP]; 666 666 struct dc_link *edp_link; 667 667
+34 -2
drivers/gpu/drm/amd/display/dc/dcn32/dcn32_optc.c
··· 91 91 REG_UPDATE(OPTC_WIDTH_CONTROL, 92 92 OPTC_SEGMENT_WIDTH, mpcc_hactive); 93 93 94 - REG_SET(OTG_H_TIMING_CNTL, 0, OTG_H_TIMING_DIV_MODE, opp_cnt - 1); 94 + REG_UPDATE(OTG_H_TIMING_CNTL, 95 + OTG_H_TIMING_DIV_MODE, opp_cnt - 1); 95 96 optc1->opp_count = opp_cnt; 96 97 } 97 98 99 + static void optc32_set_h_timing_div_manual_mode(struct timing_generator *optc, bool manual_mode) 100 + { 101 + struct optc *optc1 = DCN10TG_FROM_TG(optc); 102 + 103 + REG_UPDATE(OTG_H_TIMING_CNTL, 104 + OTG_H_TIMING_DIV_MODE_MANUAL, manual_mode ? 1 : 0); 105 + } 98 106 /** 99 107 * Enable CRTC 100 108 * Enable CRTC - call ASIC Control Object to enable Timing generator. ··· 165 157 REG_WAIT(OTG_CLOCK_CONTROL, OTG_BUSY, 0, 1, 100000); 166 158 } 167 159 160 + static void optc32_set_odm_bypass(struct timing_generator *optc, 161 + const struct dc_crtc_timing *dc_crtc_timing) 162 + { 163 + struct optc *optc1 = DCN10TG_FROM_TG(optc); 164 + enum h_timing_div_mode h_div = H_TIMING_NO_DIV; 165 + 166 + REG_SET_5(OPTC_DATA_SOURCE_SELECT, 0, 167 + OPTC_NUM_OF_INPUT_SEGMENT, 0, 168 + OPTC_SEG0_SRC_SEL, optc->inst, 169 + OPTC_SEG1_SRC_SEL, 0xf, 170 + OPTC_SEG2_SRC_SEL, 0xf, 171 + OPTC_SEG3_SRC_SEL, 0xf 172 + ); 173 + 174 + h_div = optc1_is_two_pixels_per_containter(dc_crtc_timing); 175 + REG_UPDATE(OTG_H_TIMING_CNTL, 176 + OTG_H_TIMING_DIV_MODE, h_div); 177 + 178 + REG_SET(OPTC_MEMORY_CONFIG, 0, 179 + OPTC_MEM_SEL, 0); 180 + optc1->opp_count = 1; 181 + } 182 + 168 183 169 184 static struct timing_generator_funcs dcn32_tg_funcs = { 170 185 .validate_timing = optc1_validate_timing, ··· 237 206 .set_dsc_config = optc3_set_dsc_config, 238 207 .get_dsc_status = optc2_get_dsc_status, 239 208 .set_dwb_source = NULL, 240 - .set_odm_bypass = optc3_set_odm_bypass, 209 + .set_odm_bypass = optc32_set_odm_bypass, 241 210 .set_odm_combine = optc32_set_odm_combine, 211 + .set_h_timing_div_manual_mode = optc32_set_h_timing_div_manual_mode, 242 212 .get_optc_source = optc2_get_optc_source, 243 213 .set_out_mux = optc3_set_out_mux, 244 214 .set_drr_trigger_window = optc3_set_drr_trigger_window,
+1 -1
drivers/gpu/drm/amd/display/dc/dcn32/dcn32_resource.c
··· 77 77 78 78 #include "dcn/dcn_3_2_0_offset.h" 79 79 #include "dcn/dcn_3_2_0_sh_mask.h" 80 - #include "dcn/nbio_4_3_0_offset.h" 80 + #include "nbio/nbio_4_3_0_offset.h" 81 81 82 82 #include "reg_helper.h" 83 83 #include "dce/dmub_abm.h"
+55 -4
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
··· 811 811 pipes[pipe_idx].pipe.dest.vupdate_offset = get_vupdate_offset(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx); 812 812 pipes[pipe_idx].pipe.dest.vupdate_width = get_vupdate_width(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx); 813 813 pipes[pipe_idx].pipe.dest.vready_offset = get_vready_offset(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx); 814 - context->res_ctx.pipe_ctx[i].det_buffer_size_kb = context->bw_ctx.dml.ip.det_buffer_size_kbytes; 815 - context->res_ctx.pipe_ctx[i].unbounded_req = pipes[pipe_idx].pipe.src.unbounded_req_mode; 816 - 814 + if (context->res_ctx.pipe_ctx[i].stream->mall_stream_config.type == SUBVP_PHANTOM) { 815 + // Phantom pipe requires that DET_SIZE = 0 and no unbounded requests 816 + context->res_ctx.pipe_ctx[i].det_buffer_size_kb = 0; 817 + context->res_ctx.pipe_ctx[i].unbounded_req = false; 818 + } else { 819 + context->res_ctx.pipe_ctx[i].det_buffer_size_kb = context->bw_ctx.dml.ip.det_buffer_size_kbytes; 820 + context->res_ctx.pipe_ctx[i].unbounded_req = pipes[pipe_idx].pipe.src.unbounded_req_mode; 821 + } 817 822 if (context->bw_ctx.bw.dcn.clk.dppclk_khz < pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000) 818 823 context->bw_ctx.bw.dcn.clk.dppclk_khz = pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000; 819 824 context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz = ··· 1018 1013 pipes[pipe_cnt].pipe.dest.pixel_rate_mhz *= 2; 1019 1014 pipes[pipe_cnt].pipe.dest.otg_inst = res_ctx->pipe_ctx[i].stream_res.tg->inst; 1020 1015 pipes[pipe_cnt].dout.dp_lanes = 4; 1016 + if (res_ctx->pipe_ctx[i].stream->link) { 1017 + switch (res_ctx->pipe_ctx[i].stream->link->cur_link_settings.link_rate) { 1018 + case LINK_RATE_HIGH: 1019 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_hbr; 1020 + break; 1021 + case LINK_RATE_HIGH2: 1022 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_hbr2; 1023 + break; 1024 + case LINK_RATE_HIGH3: 1025 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_hbr3; 1026 + break; 1027 + case LINK_RATE_UHBR10: 1028 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_uhbr10; 1029 + break; 1030 + case LINK_RATE_UHBR13_5: 1031 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_uhbr13p5; 1032 + break; 1033 + case LINK_RATE_UHBR20: 1034 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_uhbr20; 1035 + break; 1036 + default: 1037 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_na; 1038 + break; 1039 + } 1040 + } 1021 1041 pipes[pipe_cnt].dout.is_virtual = 0; 1022 1042 pipes[pipe_cnt].pipe.dest.vtotal_min = res_ctx->pipe_ctx[i].stream->adjust.v_total_min; 1023 1043 pipes[pipe_cnt].pipe.dest.vtotal_max = res_ctx->pipe_ctx[i].stream->adjust.v_total_max; ··· 1100 1070 pipes[pipe_cnt].dout.is_virtual = 1; 1101 1071 pipes[pipe_cnt].dout.output_type = dm_dp; 1102 1072 pipes[pipe_cnt].dout.dp_lanes = 4; 1073 + pipes[pipe_cnt].dout.dp_rate = dm_dp_rate_hbr2; 1103 1074 } 1104 1075 1105 1076 switch (res_ctx->pipe_ctx[i].stream->timing.display_color_depth) { ··· 1169 1138 * bw calculations due to cursor on/off 1170 1139 */ 1171 1140 if (res_ctx->pipe_ctx[i].plane_state && 1172 - res_ctx->pipe_ctx[i].plane_state->address.type == PLN_ADDR_TYPE_VIDEO_PROGRESSIVE) 1141 + (res_ctx->pipe_ctx[i].plane_state->address.type == PLN_ADDR_TYPE_VIDEO_PROGRESSIVE || 1142 + res_ctx->pipe_ctx[i].stream->mall_stream_config.type == SUBVP_PHANTOM)) 1173 1143 pipes[pipe_cnt].pipe.src.num_cursors = 0; 1174 1144 else 1175 1145 pipes[pipe_cnt].pipe.src.num_cursors = dc->dml.ip.number_of_cursors; ··· 1181 1149 if (!res_ctx->pipe_ctx[i].plane_state) { 1182 1150 pipes[pipe_cnt].pipe.src.is_hsplit = pipes[pipe_cnt].pipe.dest.odm_combine != dm_odm_combine_mode_disabled; 1183 1151 pipes[pipe_cnt].pipe.src.source_scan = dm_horz; 1152 + pipes[pipe_cnt].pipe.src.source_rotation = dm_rotation_0; 1184 1153 pipes[pipe_cnt].pipe.src.sw_mode = dm_sw_4kb_s; 1185 1154 pipes[pipe_cnt].pipe.src.macro_tile_size = dm_64k_tile; 1186 1155 pipes[pipe_cnt].pipe.src.viewport_width = timing->h_addressable; ··· 1234 1201 1235 1202 pipes[pipe_cnt].pipe.src.source_scan = pln->rotation == ROTATION_ANGLE_90 1236 1203 || pln->rotation == ROTATION_ANGLE_270 ? dm_vert : dm_horz; 1204 + switch (pln->rotation) { 1205 + case ROTATION_ANGLE_0: 1206 + pipes[pipe_cnt].pipe.src.source_rotation = dm_rotation_0; 1207 + break; 1208 + case ROTATION_ANGLE_90: 1209 + pipes[pipe_cnt].pipe.src.source_rotation = dm_rotation_90; 1210 + break; 1211 + case ROTATION_ANGLE_180: 1212 + pipes[pipe_cnt].pipe.src.source_rotation = dm_rotation_180; 1213 + break; 1214 + case ROTATION_ANGLE_270: 1215 + pipes[pipe_cnt].pipe.src.source_rotation = dm_rotation_270; 1216 + break; 1217 + default: 1218 + break; 1219 + } 1237 1220 pipes[pipe_cnt].pipe.src.viewport_y_y = scl->viewport.y; 1238 1221 pipes[pipe_cnt].pipe.src.viewport_y_c = scl->viewport_c.y; 1222 + pipes[pipe_cnt].pipe.src.viewport_x_y = scl->viewport.x; 1223 + pipes[pipe_cnt].pipe.src.viewport_x_c = scl->viewport_c.x; 1239 1224 pipes[pipe_cnt].pipe.src.viewport_width = scl->viewport.width; 1240 1225 pipes[pipe_cnt].pipe.src.viewport_width_c = scl->viewport_c.width; 1241 1226 pipes[pipe_cnt].pipe.src.viewport_height = scl->viewport.height;
+10
drivers/gpu/drm/amd/display/dc/inc/core_types.h
··· 195 195 enum dc_status (*add_dsc_to_stream_resource)( 196 196 struct dc *dc, struct dc_state *state, 197 197 struct dc_stream_state *stream); 198 + 199 + void (*add_phantom_pipes)( 200 + struct dc *dc, 201 + struct dc_state *context, 202 + display_e2e_pipe_params_st *pipes, 203 + unsigned int pipe_cnt, 204 + unsigned int index); 205 + void (*remove_phantom_pipes)( 206 + struct dc *dc, 207 + struct dc_state *context); 198 208 }; 199 209 200 210 struct audio_support{
+3 -2
drivers/gpu/drm/amd/display/dc/inc/hw/dccg.h
··· 45 45 PHYSYMCLK_FORCE_SRC_PHYD32CLK, // Select phyd32clk as the source of clock which is output to PHY through DCIO. 46 46 }; 47 47 48 - enum hdmistreamclk_source { 48 + enum streamclk_source { 49 49 REFCLK, // Selects REFCLK as source for hdmistreamclk. 50 50 DTBCLK0, // Selects DTBCLK0 as source for hdmistreamclk. 51 + DPREFCLK, // Selects DPREFCLK as source for hdmistreamclk 51 52 }; 52 53 53 54 enum dentist_dispclk_change_mode { ··· 92 91 93 92 void (*set_dpstreamclk)( 94 93 struct dccg *dccg, 95 - enum hdmistreamclk_source src, 94 + enum streamclk_source src, 96 95 int otg_inst); 97 96 98 97 void (*enable_symclk32_se)(
+3
drivers/gpu/drm/amd/display/dc/inc/hw/dchubbub.h
··· 47 47 uint32_t sr_enter; 48 48 uint32_t sr_exit; 49 49 uint32_t dram_clk_chanage; 50 + uint32_t usr_retrain; 51 + uint32_t fclk_pstate_change; 50 52 }; 51 53 52 54 struct dcn_hubbub_wm { ··· 170 168 void (*program_det_size)(struct hubbub *hubbub, int hubp_inst, unsigned det_buffer_size_in_kbyte); 171 169 void (*program_compbuf_size)(struct hubbub *hubbub, unsigned compbuf_size_kb, bool safe_to_increase); 172 170 void (*init_crb)(struct hubbub *hubbub); 171 + void (*force_usr_retraining_allow)(struct hubbub *hubbub, bool allow); 173 172 }; 174 173 175 174 struct hubbub {
+7
drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h
··· 140 140 141 141 void (*set_blank)(struct hubp *hubp, bool blank); 142 142 void (*set_blank_regs)(struct hubp *hubp, bool blank); 143 + #ifdef CONFIG_DRM_AMD_DC_DCN 144 + void (*phantom_hubp_post_enable)(struct hubp *hubp); 145 + #endif 143 146 void (*set_hubp_blank_en)(struct hubp *hubp, bool blank); 144 147 145 148 void (*set_cursor_attributes)( ··· 195 192 bool enable); 196 193 bool (*hubp_in_blank)(struct hubp *hubp); 197 194 void (*hubp_soft_reset)(struct hubp *hubp, bool reset); 195 + 196 + void (*hubp_update_force_pstate_disallow)(struct hubp *hubp, bool allow); 197 + void (*hubp_update_mall_sel)(struct hubp *hubp, uint32_t mall_sel); 198 + void (*hubp_prepare_subvp_buffering)(struct hubp *hubp, bool enable); 198 199 199 200 void (*hubp_set_flip_int)(struct hubp *hubp); 200 201
+2
drivers/gpu/drm/amd/display/dc/inc/hw/mem_input.h
··· 37 37 uint32_t cstate_enter_plus_exit_z8_ns; 38 38 uint32_t cstate_enter_plus_exit_ns; 39 39 uint32_t pstate_change_ns; 40 + uint32_t fclk_pstate_change_ns; 40 41 }; 41 42 42 43 struct dcn_watermarks { ··· 47 46 uint32_t frac_urg_bw_flip; 48 47 int32_t urgent_latency_ns; 49 48 struct cstate_pstate_watermarks_st cstate_pstate; 49 + uint32_t usr_retraining_ns; 50 50 }; 51 51 52 52 struct dcn_watermark_set {
+4
drivers/gpu/drm/amd/display/dc/inc/hw/stream_encoder.h
··· 164 164 void (*stop_dp_info_packets)( 165 165 struct stream_encoder *enc); 166 166 167 + void (*reset_fifo)( 168 + struct stream_encoder *enc 169 + ); 170 + 167 171 void (*dp_blank)( 168 172 struct dc_link *link, 169 173 struct stream_encoder *enc);
+8 -2
drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h
··· 174 174 175 175 bool (*enable_crtc)(struct timing_generator *tg); 176 176 bool (*disable_crtc)(struct timing_generator *tg); 177 + #ifdef CONFIG_DRM_AMD_DC_DCN 178 + void (*phantom_crtc_post_enable)(struct timing_generator *tg); 179 + #endif 177 180 bool (*immediate_disable_crtc)(struct timing_generator *tg); 178 181 bool (*is_counter_moving)(struct timing_generator *tg); 179 182 void (*get_position)(struct timing_generator *tg, ··· 296 293 void (*set_odm_bypass)(struct timing_generator *optc, const struct dc_crtc_timing *dc_crtc_timing); 297 294 void (*set_odm_combine)(struct timing_generator *optc, int *opp_id, int opp_cnt, 298 295 struct dc_crtc_timing *timing); 296 + void (*set_h_timing_div_manual_mode)(struct timing_generator *optc, bool manual_mode); 299 297 void (*set_gsl)(struct timing_generator *optc, const struct gsl_params *params); 300 298 void (*set_gsl_source_select)(struct timing_generator *optc, 301 299 int group_idx, ··· 314 310 uint32_t slave_pixel_clock_100Hz, 315 311 uint8_t master_clock_divider, 316 312 uint8_t slave_clock_divider); 317 - 318 - void (*init_odm)(struct timing_generator *tg); 313 + bool (*validate_vmin_vmax)(struct timing_generator *optc, 314 + int vmin, int vmax); 315 + bool (*validate_vtotal_change_limit)(struct timing_generator *optc, 316 + uint32_t vtotal_change_limit); 319 317 }; 320 318 321 319 #endif
+2
drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h
··· 244 244 struct pipe_ctx *pipe_ctx, 245 245 struct tg_color *color, 246 246 int mpcc_id); 247 + 248 + void (*commit_subvp_config)(struct dc *dc, struct dc_state *context); 247 249 }; 248 250 249 251 void color_space_to_black_color(
+5
drivers/gpu/drm/amd/display/dc/inc/hw_sequencer_private.h
··· 145 145 void (*PLAT_58856_wa)(struct dc_state *context, 146 146 struct pipe_ctx *pipe_ctx); 147 147 void (*setup_hpo_hw_control)(const struct dce_hwseq *hws, bool enable); 148 + #ifdef CONFIG_DRM_AMD_DC_DCN 149 + void (*program_mall_pipe_config)(struct dc *dc, struct dc_state *context); 150 + void (*subvp_update_force_pstate)(struct dc *dc, struct dc_state *context); 151 + void (*update_mall_sel)(struct dc *dc, struct dc_state *context); 152 + #endif 148 153 }; 149 154 150 155 struct dce_hwseq {
+7
drivers/gpu/drm/amd/display/dc/inc/resource.h
··· 205 205 struct link_resource *link_res, 206 206 struct dc_link_settings *link_settings); 207 207 208 + #if defined(CONFIG_DRM_AMD_DC_DCN) 209 + struct hpo_dp_link_encoder *resource_get_hpo_dp_link_enc_for_det_lt( 210 + const struct resource_context *res_ctx, 211 + const struct resource_pool *pool, 212 + const struct dc_link *link); 213 + #endif 214 + 208 215 void reset_syncd_pipes_from_disabled_pipes(struct dc *dc, 209 216 struct dc_state *context); 210 217
+10
drivers/gpu/drm/amd/display/include/bios_parser_types.h
··· 335 335 uint32_t dram_sr_enter_exit_latency_100ns; 336 336 }; 337 337 338 + struct bp_connector_speed_cap_info { 339 + uint32_t DP_HBR2_EN:1; 340 + uint32_t DP_HBR3_EN:1; 341 + uint32_t HDMI_6GB_EN:1; 342 + uint32_t DP_UHBR10_EN:1; 343 + uint32_t DP_UHBR13_5_EN:1; 344 + uint32_t DP_UHBR20_EN:1; 345 + uint32_t RESERVED:29; 346 + }; 347 + 338 348 #endif /*__DAL_BIOS_PARSER_TYPES_H__ */