Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

drm/amdgpu/vcn: fix vcn2.5 instance issue

Fix vcn2.5 instance issue, vcn0 and vcn1 have same register offset

Signed-off-by: James Zhu <James.Zhu@amd.com>
Reviewed-by: Leo Liu <leo.liu@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>

authored by

James Zhu and committed by
Alex Deucher
326b523e 62884a7b

+44 -44
+44 -44
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
··· 435 435 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) { 436 436 if (!indirect) { 437 437 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 438 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 438 + UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 439 439 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); 440 440 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 441 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 441 + UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 442 442 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); 443 443 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 444 - UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); 444 + UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); 445 445 } else { 446 446 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 447 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); 447 + UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); 448 448 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 449 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); 449 + UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); 450 450 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 451 - UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); 451 + UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); 452 452 } 453 453 offset = 0; 454 454 } else { 455 455 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 456 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 456 + UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 457 457 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); 458 458 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 459 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 459 + UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 460 460 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); 461 461 offset = size; 462 462 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 463 - UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 463 + UVD, 0, mmUVD_VCPU_CACHE_OFFSET0), 464 464 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); 465 465 } 466 466 467 467 if (!indirect) 468 468 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 469 - UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect); 469 + UVD, 0, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect); 470 470 else 471 471 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 472 - UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); 472 + UVD, 0, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); 473 473 474 474 /* cache window 1: stack */ 475 475 if (!indirect) { 476 476 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 477 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 477 + UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 478 478 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); 479 479 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 480 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 480 + UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 481 481 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); 482 482 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 483 - UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); 483 + UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); 484 484 } else { 485 485 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 486 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); 486 + UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); 487 487 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 488 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); 488 + UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); 489 489 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 490 - UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); 490 + UVD, 0, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); 491 491 } 492 492 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 493 - UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); 493 + UVD, 0, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); 494 494 495 495 /* cache window 2: context */ 496 496 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 497 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW), 497 + UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW), 498 498 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); 499 499 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 500 - UVD, inst_idx, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH), 500 + UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH), 501 501 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); 502 502 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 503 - UVD, inst_idx, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); 503 + UVD, 0, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); 504 504 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 505 - UVD, inst_idx, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); 505 + UVD, 0, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); 506 506 507 507 /* non-cache window */ 508 508 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 509 - UVD, inst_idx, mmUVD_LMI_VCPU_NC0_64BIT_BAR_LOW), 0, 0, indirect); 509 + UVD, 0, mmUVD_LMI_VCPU_NC0_64BIT_BAR_LOW), 0, 0, indirect); 510 510 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 511 - UVD, inst_idx, mmUVD_LMI_VCPU_NC0_64BIT_BAR_HIGH), 0, 0, indirect); 511 + UVD, 0, mmUVD_LMI_VCPU_NC0_64BIT_BAR_HIGH), 0, 0, indirect); 512 512 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 513 - UVD, inst_idx, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); 513 + UVD, 0, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); 514 514 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 515 - UVD, inst_idx, mmUVD_VCPU_NONCACHE_SIZE0), 0, 0, indirect); 515 + UVD, 0, mmUVD_VCPU_NONCACHE_SIZE0), 0, 0, indirect); 516 516 517 517 /* VCN global tiling registers */ 518 518 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 519 - UVD, inst_idx, mmUVD_GFX8_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); 519 + UVD, 0, mmUVD_GFX8_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); 520 520 } 521 521 522 522 /** ··· 670 670 UVD_CGC_CTRL__VCPU_MODE_MASK | 671 671 UVD_CGC_CTRL__MMSCH_MODE_MASK); 672 672 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 673 - UVD, inst_idx, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect); 673 + UVD, 0, mmUVD_CGC_CTRL), reg_data, sram_sel, indirect); 674 674 675 675 /* turn off clock gating */ 676 676 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 677 - UVD, inst_idx, mmUVD_CGC_GATE), 0, sram_sel, indirect); 677 + UVD, 0, mmUVD_CGC_GATE), 0, sram_sel, indirect); 678 678 679 679 /* turn on SUVD clock gating */ 680 680 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 681 - UVD, inst_idx, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); 681 + UVD, 0, mmUVD_SUVD_CGC_GATE), 1, sram_sel, indirect); 682 682 683 683 /* turn on sw mode in UVD_SUVD_CGC_CTRL */ 684 684 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 685 - UVD, inst_idx, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); 685 + UVD, 0, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); 686 686 } 687 687 688 688 /** ··· 772 772 tmp |= UVD_VCPU_CNTL__CLK_EN_MASK; 773 773 tmp |= UVD_VCPU_CNTL__BLK_RST_MASK; 774 774 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 775 - UVD, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect); 775 + UVD, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect); 776 776 777 777 /* disable master interupt */ 778 778 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 779 - UVD, inst_idx, mmUVD_MASTINT_EN), 0, 0, indirect); 779 + UVD, 0, mmUVD_MASTINT_EN), 0, 0, indirect); 780 780 781 781 /* setup mmUVD_LMI_CTRL */ 782 782 tmp = (0x8 | UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK | ··· 788 788 (8 << UVD_LMI_CTRL__WRITE_CLEAN_TIMER__SHIFT) | 789 789 0x00100000L); 790 790 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 791 - UVD, inst_idx, mmUVD_LMI_CTRL), tmp, 0, indirect); 791 + UVD, 0, mmUVD_LMI_CTRL), tmp, 0, indirect); 792 792 793 793 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 794 - UVD, inst_idx, mmUVD_MPC_CNTL), 794 + UVD, 0, mmUVD_MPC_CNTL), 795 795 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); 796 796 797 797 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 798 - UVD, inst_idx, mmUVD_MPC_SET_MUXA0), 798 + UVD, 0, mmUVD_MPC_SET_MUXA0), 799 799 ((0x1 << UVD_MPC_SET_MUXA0__VARA_1__SHIFT) | 800 800 (0x2 << UVD_MPC_SET_MUXA0__VARA_2__SHIFT) | 801 801 (0x3 << UVD_MPC_SET_MUXA0__VARA_3__SHIFT) | 802 802 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); 803 803 804 804 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 805 - UVD, inst_idx, mmUVD_MPC_SET_MUXB0), 805 + UVD, 0, mmUVD_MPC_SET_MUXB0), 806 806 ((0x1 << UVD_MPC_SET_MUXB0__VARB_1__SHIFT) | 807 807 (0x2 << UVD_MPC_SET_MUXB0__VARB_2__SHIFT) | 808 808 (0x3 << UVD_MPC_SET_MUXB0__VARB_3__SHIFT) | 809 809 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); 810 810 811 811 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 812 - UVD, inst_idx, mmUVD_MPC_SET_MUX), 812 + UVD, 0, mmUVD_MPC_SET_MUX), 813 813 ((0x0 << UVD_MPC_SET_MUX__SET_0__SHIFT) | 814 814 (0x1 << UVD_MPC_SET_MUX__SET_1__SHIFT) | 815 815 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); ··· 817 817 vcn_v2_5_mc_resume_dpg_mode(adev, inst_idx, indirect); 818 818 819 819 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 820 - UVD, inst_idx, mmUVD_REG_XX_MASK), 0x10, 0, indirect); 820 + UVD, 0, mmUVD_REG_XX_MASK), 0x10, 0, indirect); 821 821 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 822 - UVD, inst_idx, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect); 822 + UVD, 0, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect); 823 823 824 824 /* enable LMI MC and UMC channels */ 825 825 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 826 - UVD, inst_idx, mmUVD_LMI_CTRL2), 0, 0, indirect); 826 + UVD, 0, mmUVD_LMI_CTRL2), 0, 0, indirect); 827 827 828 828 /* unblock VCPU register access */ 829 829 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 830 - UVD, inst_idx, mmUVD_RB_ARB_CTRL), 0, 0, indirect); 830 + UVD, 0, mmUVD_RB_ARB_CTRL), 0, 0, indirect); 831 831 832 832 tmp = (0xFF << UVD_VCPU_CNTL__PRB_TIMEOUT_VAL__SHIFT); 833 833 tmp |= UVD_VCPU_CNTL__CLK_EN_MASK; 834 834 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 835 - UVD, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect); 835 + UVD, 0, mmUVD_VCPU_CNTL), tmp, 0, indirect); 836 836 837 837 /* enable master interrupt */ 838 838 WREG32_SOC15_DPG_MODE_2_0(inst_idx, SOC15_DPG_MODE_OFFSET_2_0( 839 - UVD, inst_idx, mmUVD_MASTINT_EN), 839 + UVD, 0, mmUVD_MASTINT_EN), 840 840 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); 841 841 842 842 if (indirect)