riscv: __asm_copy_to-from_user: Fix: Typos in comments

Fixing typos and grammar mistakes and using more intuitive label
name.

Signed-off-by: Akira Tsukamoto <akira.tsukamoto@gmail.com>
Fixes: ca6eaaa210de ("riscv: __asm_copy_to-from_user: Optimize unaligned memory access and pipeline stall")
Signed-off-by: Palmer Dabbelt <palmerdabbelt@google.com>

authored by Akira Tsukamoto and committed by Palmer Dabbelt ea196c54 d4b3e010

+9 -9
+9 -9
arch/riscv/lib/uaccess.S
··· 33 34 /* 35 * Use byte copy only if too small. 36 */ 37 li a3, 9*SZREG /* size must be larger than size in word_copy */ 38 bltu a2, a3, .Lbyte_copy_tail 39 40 /* 41 - * Copy first bytes until dst is align to word boundary. 42 * a0 - start of dst 43 * t1 - start of aligned dst 44 */ 45 addi t1, a0, SZREG-1 46 andi t1, t1, ~(SZREG-1) 47 /* dst is already aligned, skip */ 48 - beq a0, t1, .Lskip_first_bytes 49 1: 50 /* a5 - one byte for copying data */ 51 fixup lb a5, 0(a1), 10f ··· 55 addi a0, a0, 1 /* dst */ 56 bltu a0, t1, 1b /* t1 - start of aligned dst */ 57 58 - .Lskip_first_bytes: 59 /* 60 * Now dst is aligned. 61 * Use shift-copy if src is misaligned. ··· 72 * 73 * a0 - start of aligned dst 74 * a1 - start of aligned src 75 - * a3 - a1 & mask:(SZREG-1) 76 * t0 - end of aligned dst 77 */ 78 addi t0, t0, -(8*SZREG) /* not to over run */ ··· 106 * For misaligned copy we still perform aligned word copy, but 107 * we need to use the value fetched from the previous iteration and 108 * do some shifts. 109 - * This is safe because reading less than a word size. 110 * 111 * a0 - start of aligned dst 112 * a1 - start of src ··· 116 */ 117 /* calculating aligned word boundary for dst */ 118 andi t1, t0, ~(SZREG-1) 119 - /* Converting unaligned src to aligned arc */ 120 andi a1, a1, ~(SZREG-1) 121 122 /* ··· 128 li a5, SZREG*8 129 sub t4, a5, t3 130 131 - /* Load the first word to combine with seceond word */ 132 fixup REG_L a5, 0(a1), 10f 133 134 3: ··· 160 * a1 - start of remaining src 161 * t0 - end of remaining dst 162 */ 163 - bgeu a0, t0, 5f 164 4: 165 fixup lb a5, 0(a1), 10f 166 addi a1, a1, 1 /* src */ ··· 168 addi a0, a0, 1 /* dst */ 169 bltu a0, t0, 4b /* t0 - end of dst */ 170 171 - 5: 172 /* Disable access to user memory */ 173 csrc CSR_STATUS, t6 174 li a0, 0
··· 33 34 /* 35 * Use byte copy only if too small. 36 + * SZREG holds 4 for RV32 and 8 for RV64 37 */ 38 li a3, 9*SZREG /* size must be larger than size in word_copy */ 39 bltu a2, a3, .Lbyte_copy_tail 40 41 /* 42 + * Copy first bytes until dst is aligned to word boundary. 43 * a0 - start of dst 44 * t1 - start of aligned dst 45 */ 46 addi t1, a0, SZREG-1 47 andi t1, t1, ~(SZREG-1) 48 /* dst is already aligned, skip */ 49 + beq a0, t1, .Lskip_align_dst 50 1: 51 /* a5 - one byte for copying data */ 52 fixup lb a5, 0(a1), 10f ··· 54 addi a0, a0, 1 /* dst */ 55 bltu a0, t1, 1b /* t1 - start of aligned dst */ 56 57 + .Lskip_align_dst: 58 /* 59 * Now dst is aligned. 60 * Use shift-copy if src is misaligned. ··· 71 * 72 * a0 - start of aligned dst 73 * a1 - start of aligned src 74 * t0 - end of aligned dst 75 */ 76 addi t0, t0, -(8*SZREG) /* not to over run */ ··· 106 * For misaligned copy we still perform aligned word copy, but 107 * we need to use the value fetched from the previous iteration and 108 * do some shifts. 109 + * This is safe because reading is less than a word size. 110 * 111 * a0 - start of aligned dst 112 * a1 - start of src ··· 116 */ 117 /* calculating aligned word boundary for dst */ 118 andi t1, t0, ~(SZREG-1) 119 + /* Converting unaligned src to aligned src */ 120 andi a1, a1, ~(SZREG-1) 121 122 /* ··· 128 li a5, SZREG*8 129 sub t4, a5, t3 130 131 + /* Load the first word to combine with second word */ 132 fixup REG_L a5, 0(a1), 10f 133 134 3: ··· 160 * a1 - start of remaining src 161 * t0 - end of remaining dst 162 */ 163 + bgeu a0, t0, .Lout_copy_user /* check if end of copy */ 164 4: 165 fixup lb a5, 0(a1), 10f 166 addi a1, a1, 1 /* src */ ··· 168 addi a0, a0, 1 /* dst */ 169 bltu a0, t0, 4b /* t0 - end of dst */ 170 171 + .Lout_copy_user: 172 /* Disable access to user memory */ 173 csrc CSR_STATUS, t6 174 li a0, 0