Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

locking/atomic: treewide: delete arch_atomic_*() kerneldoc

Currently several architectures have kerneldoc comments for
arch_atomic_*(), which is unhelpful as these live in a shared namespace
where they clash, and the arch_atomic_*() ops are now an implementation
detail of the raw_atomic_*() ops, which no-one should use those
directly.

Delete the kerneldoc comments for arch_atomic_*(), along with
pseudo-kerneldoc comments which are in the correct style but are missing
the leading '/**' necessary to be true kerneldoc comments.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-28-mark.rutland@arm.com

authored by

Mark Rutland and committed by
Peter Zijlstra
ef558b4b e74f4059

-351
-25
arch/alpha/include/asm/atomic.h
··· 200 200 #undef ATOMIC_OP_RETURN 201 201 #undef ATOMIC_OP 202 202 203 - /** 204 - * arch_atomic_fetch_add_unless - add unless the number is a given value 205 - * @v: pointer of type atomic_t 206 - * @a: the amount to add to v... 207 - * @u: ...unless v is equal to u. 208 - * 209 - * Atomically adds @a to @v, so long as it was not @u. 210 - * Returns the old value of @v. 211 - */ 212 203 static __inline__ int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) 213 204 { 214 205 int c, new, old; ··· 223 232 } 224 233 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless 225 234 226 - /** 227 - * arch_atomic64_fetch_add_unless - add unless the number is a given value 228 - * @v: pointer of type atomic64_t 229 - * @a: the amount to add to v... 230 - * @u: ...unless v is equal to u. 231 - * 232 - * Atomically adds @a to @v, so long as it was not @u. 233 - * Returns the old value of @v. 234 - */ 235 235 static __inline__ s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 236 236 { 237 237 s64 c, new, old; ··· 246 264 } 247 265 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 248 266 249 - /* 250 - * arch_atomic64_dec_if_positive - decrement by 1 if old value positive 251 - * @v: pointer of type atomic_t 252 - * 253 - * The function returns the old value of *v minus 1, even if 254 - * the atomic variable, v, was not decremented. 255 - */ 256 267 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) 257 268 { 258 269 s64 old, tmp;
-17
arch/arc/include/asm/atomic64-arcv2.h
··· 182 182 } 183 183 #define arch_atomic64_xchg arch_atomic64_xchg 184 184 185 - /** 186 - * arch_atomic64_dec_if_positive - decrement by 1 if old value positive 187 - * @v: pointer of type atomic64_t 188 - * 189 - * The function returns the old value of *v minus 1, even if 190 - * the atomic variable, v, was not decremented. 191 - */ 192 - 193 185 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) 194 186 { 195 187 s64 val; ··· 206 214 } 207 215 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 208 216 209 - /** 210 - * arch_atomic64_fetch_add_unless - add unless the number is a given value 211 - * @v: pointer of type atomic64_t 212 - * @a: the amount to add to v... 213 - * @u: ...unless v is equal to u. 214 - * 215 - * Atomically adds @a to @v, if it was not @u. 216 - * Returns the old value of @v 217 - */ 218 217 static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 219 218 { 220 219 s64 old, temp;
-16
arch/hexagon/include/asm/atomic.h
··· 28 28 29 29 #define arch_atomic_set_release(v, i) arch_atomic_set((v), (i)) 30 30 31 - /** 32 - * arch_atomic_read - reads a word, atomically 33 - * @v: pointer to atomic value 34 - * 35 - * Assumes all word reads on our architecture are atomic. 36 - */ 37 31 #define arch_atomic_read(v) READ_ONCE((v)->counter) 38 32 39 33 #define ATOMIC_OP(op) \ ··· 105 111 #undef ATOMIC_FETCH_OP 106 112 #undef ATOMIC_OP_RETURN 107 113 #undef ATOMIC_OP 108 - 109 - /** 110 - * arch_atomic_fetch_add_unless - add unless the number is a given value 111 - * @v: pointer to value 112 - * @a: amount to add 113 - * @u: unless value is equal to u 114 - * 115 - * Returns old value. 116 - * 117 - */ 118 114 119 115 static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) 120 116 {
-49
arch/loongarch/include/asm/atomic.h
··· 29 29 30 30 #define ATOMIC_INIT(i) { (i) } 31 31 32 - /* 33 - * arch_atomic_read - read atomic variable 34 - * @v: pointer of type atomic_t 35 - * 36 - * Atomically reads the value of @v. 37 - */ 38 32 #define arch_atomic_read(v) READ_ONCE((v)->counter) 39 - 40 - /* 41 - * arch_atomic_set - set atomic variable 42 - * @v: pointer of type atomic_t 43 - * @i: required value 44 - * 45 - * Atomically sets the value of @v to @i. 46 - */ 47 33 #define arch_atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) 48 34 49 35 #define ATOMIC_OP(op, I, asm_op) \ ··· 125 139 } 126 140 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless 127 141 128 - /* 129 - * arch_atomic_sub_if_positive - conditionally subtract integer from atomic variable 130 - * @i: integer value to subtract 131 - * @v: pointer of type atomic_t 132 - * 133 - * Atomically test @v and subtract @i if @v is greater or equal than @i. 134 - * The function returns the old value of @v minus @i. 135 - */ 136 142 static inline int arch_atomic_sub_if_positive(int i, atomic_t *v) 137 143 { 138 144 int result; ··· 159 181 return result; 160 182 } 161 183 162 - /* 163 - * arch_atomic_dec_if_positive - decrement by 1 if old value positive 164 - * @v: pointer of type atomic_t 165 - */ 166 184 #define arch_atomic_dec_if_positive(v) arch_atomic_sub_if_positive(1, v) 167 185 168 186 #ifdef CONFIG_64BIT 169 187 170 188 #define ATOMIC64_INIT(i) { (i) } 171 189 172 - /* 173 - * arch_atomic64_read - read atomic variable 174 - * @v: pointer of type atomic64_t 175 - * 176 - */ 177 190 #define arch_atomic64_read(v) READ_ONCE((v)->counter) 178 - 179 - /* 180 - * arch_atomic64_set - set atomic variable 181 - * @v: pointer of type atomic64_t 182 - * @i: required value 183 - */ 184 191 #define arch_atomic64_set(v, i) WRITE_ONCE((v)->counter, (i)) 185 192 186 193 #define ATOMIC64_OP(op, I, asm_op) \ ··· 260 297 } 261 298 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 262 299 263 - /* 264 - * arch_atomic64_sub_if_positive - conditionally subtract integer from atomic variable 265 - * @i: integer value to subtract 266 - * @v: pointer of type atomic64_t 267 - * 268 - * Atomically test @v and subtract @i if @v is greater or equal than @i. 269 - * The function returns the old value of @v minus @i. 270 - */ 271 300 static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v) 272 301 { 273 302 long result; ··· 294 339 return result; 295 340 } 296 341 297 - /* 298 - * arch_atomic64_dec_if_positive - decrement by 1 if old value positive 299 - * @v: pointer of type atomic64_t 300 - */ 301 342 #define arch_atomic64_dec_if_positive(v) arch_atomic64_sub_if_positive(1, v) 302 343 303 344 #endif /* CONFIG_64BIT */
-87
arch/x86/include/asm/atomic.h
··· 14 14 * resource counting etc.. 15 15 */ 16 16 17 - /** 18 - * arch_atomic_read - read atomic variable 19 - * @v: pointer of type atomic_t 20 - * 21 - * Atomically reads the value of @v. 22 - */ 23 17 static __always_inline int arch_atomic_read(const atomic_t *v) 24 18 { 25 19 /* ··· 23 29 return __READ_ONCE((v)->counter); 24 30 } 25 31 26 - /** 27 - * arch_atomic_set - set atomic variable 28 - * @v: pointer of type atomic_t 29 - * @i: required value 30 - * 31 - * Atomically sets the value of @v to @i. 32 - */ 33 32 static __always_inline void arch_atomic_set(atomic_t *v, int i) 34 33 { 35 34 __WRITE_ONCE(v->counter, i); 36 35 } 37 36 38 - /** 39 - * arch_atomic_add - add integer to atomic variable 40 - * @i: integer value to add 41 - * @v: pointer of type atomic_t 42 - * 43 - * Atomically adds @i to @v. 44 - */ 45 37 static __always_inline void arch_atomic_add(int i, atomic_t *v) 46 38 { 47 39 asm volatile(LOCK_PREFIX "addl %1,%0" ··· 35 55 : "ir" (i) : "memory"); 36 56 } 37 57 38 - /** 39 - * arch_atomic_sub - subtract integer from atomic variable 40 - * @i: integer value to subtract 41 - * @v: pointer of type atomic_t 42 - * 43 - * Atomically subtracts @i from @v. 44 - */ 45 58 static __always_inline void arch_atomic_sub(int i, atomic_t *v) 46 59 { 47 60 asm volatile(LOCK_PREFIX "subl %1,%0" ··· 42 69 : "ir" (i) : "memory"); 43 70 } 44 71 45 - /** 46 - * arch_atomic_sub_and_test - subtract value from variable and test result 47 - * @i: integer value to subtract 48 - * @v: pointer of type atomic_t 49 - * 50 - * Atomically subtracts @i from @v and returns 51 - * true if the result is zero, or false for all 52 - * other cases. 53 - */ 54 72 static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) 55 73 { 56 74 return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i); 57 75 } 58 76 #define arch_atomic_sub_and_test arch_atomic_sub_and_test 59 77 60 - /** 61 - * arch_atomic_inc - increment atomic variable 62 - * @v: pointer of type atomic_t 63 - * 64 - * Atomically increments @v by 1. 65 - */ 66 78 static __always_inline void arch_atomic_inc(atomic_t *v) 67 79 { 68 80 asm volatile(LOCK_PREFIX "incl %0" ··· 55 97 } 56 98 #define arch_atomic_inc arch_atomic_inc 57 99 58 - /** 59 - * arch_atomic_dec - decrement atomic variable 60 - * @v: pointer of type atomic_t 61 - * 62 - * Atomically decrements @v by 1. 63 - */ 64 100 static __always_inline void arch_atomic_dec(atomic_t *v) 65 101 { 66 102 asm volatile(LOCK_PREFIX "decl %0" ··· 62 110 } 63 111 #define arch_atomic_dec arch_atomic_dec 64 112 65 - /** 66 - * arch_atomic_dec_and_test - decrement and test 67 - * @v: pointer of type atomic_t 68 - * 69 - * Atomically decrements @v by 1 and 70 - * returns true if the result is 0, or false for all other 71 - * cases. 72 - */ 73 113 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) 74 114 { 75 115 return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e); 76 116 } 77 117 #define arch_atomic_dec_and_test arch_atomic_dec_and_test 78 118 79 - /** 80 - * arch_atomic_inc_and_test - increment and test 81 - * @v: pointer of type atomic_t 82 - * 83 - * Atomically increments @v by 1 84 - * and returns true if the result is zero, or false for all 85 - * other cases. 86 - */ 87 119 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) 88 120 { 89 121 return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e); 90 122 } 91 123 #define arch_atomic_inc_and_test arch_atomic_inc_and_test 92 124 93 - /** 94 - * arch_atomic_add_negative - add and test if negative 95 - * @i: integer value to add 96 - * @v: pointer of type atomic_t 97 - * 98 - * Atomically adds @i to @v and returns true 99 - * if the result is negative, or false when 100 - * result is greater than or equal to zero. 101 - */ 102 125 static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v) 103 126 { 104 127 return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i); 105 128 } 106 129 #define arch_atomic_add_negative arch_atomic_add_negative 107 130 108 - /** 109 - * arch_atomic_add_return - add integer and return 110 - * @i: integer value to add 111 - * @v: pointer of type atomic_t 112 - * 113 - * Atomically adds @i to @v and returns @i + @v 114 - */ 115 131 static __always_inline int arch_atomic_add_return(int i, atomic_t *v) 116 132 { 117 133 return i + xadd(&v->counter, i); 118 134 } 119 135 #define arch_atomic_add_return arch_atomic_add_return 120 136 121 - /** 122 - * arch_atomic_sub_return - subtract integer and return 123 - * @v: pointer of type atomic_t 124 - * @i: integer value to subtract 125 - * 126 - * Atomically subtracts @i from @v and returns @v - @i 127 - */ 128 137 static __always_inline int arch_atomic_sub_return(int i, atomic_t *v) 129 138 { 130 139 return arch_atomic_add_return(-i, v);
-76
arch/x86/include/asm/atomic64_32.h
··· 61 61 #undef __ATOMIC64_DECL 62 62 #undef ATOMIC64_EXPORT 63 63 64 - /** 65 - * arch_atomic64_cmpxchg - cmpxchg atomic64 variable 66 - * @v: pointer to type atomic64_t 67 - * @o: expected value 68 - * @n: new value 69 - * 70 - * Atomically sets @v to @n if it was equal to @o and returns 71 - * the old value. 72 - */ 73 - 74 64 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) 75 65 { 76 66 return arch_cmpxchg64(&v->counter, o, n); 77 67 } 78 68 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg 79 69 80 - /** 81 - * arch_atomic64_xchg - xchg atomic64 variable 82 - * @v: pointer to type atomic64_t 83 - * @n: value to assign 84 - * 85 - * Atomically xchgs the value of @v to @n and returns 86 - * the old value. 87 - */ 88 70 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) 89 71 { 90 72 s64 o; ··· 79 97 } 80 98 #define arch_atomic64_xchg arch_atomic64_xchg 81 99 82 - /** 83 - * arch_atomic64_set - set atomic64 variable 84 - * @v: pointer to type atomic64_t 85 - * @i: value to assign 86 - * 87 - * Atomically sets the value of @v to @n. 88 - */ 89 100 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) 90 101 { 91 102 unsigned high = (unsigned)(i >> 32); ··· 88 113 : "eax", "edx", "memory"); 89 114 } 90 115 91 - /** 92 - * arch_atomic64_read - read atomic64 variable 93 - * @v: pointer to type atomic64_t 94 - * 95 - * Atomically reads the value of @v and returns it. 96 - */ 97 116 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) 98 117 { 99 118 s64 r; ··· 95 126 return r; 96 127 } 97 128 98 - /** 99 - * arch_atomic64_add_return - add and return 100 - * @i: integer value to add 101 - * @v: pointer to type atomic64_t 102 - * 103 - * Atomically adds @i to @v and returns @i + *@v 104 - */ 105 129 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) 106 130 { 107 131 alternative_atomic64(add_return, ··· 104 142 } 105 143 #define arch_atomic64_add_return arch_atomic64_add_return 106 144 107 - /* 108 - * Other variants with different arithmetic operators: 109 - */ 110 145 static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) 111 146 { 112 147 alternative_atomic64(sub_return, ··· 131 172 } 132 173 #define arch_atomic64_dec_return arch_atomic64_dec_return 133 174 134 - /** 135 - * arch_atomic64_add - add integer to atomic64 variable 136 - * @i: integer value to add 137 - * @v: pointer to type atomic64_t 138 - * 139 - * Atomically adds @i to @v. 140 - */ 141 175 static __always_inline s64 arch_atomic64_add(s64 i, atomic64_t *v) 142 176 { 143 177 __alternative_atomic64(add, add_return, ··· 139 187 return i; 140 188 } 141 189 142 - /** 143 - * arch_atomic64_sub - subtract the atomic64 variable 144 - * @i: integer value to subtract 145 - * @v: pointer to type atomic64_t 146 - * 147 - * Atomically subtracts @i from @v. 148 - */ 149 190 static __always_inline s64 arch_atomic64_sub(s64 i, atomic64_t *v) 150 191 { 151 192 __alternative_atomic64(sub, sub_return, ··· 147 202 return i; 148 203 } 149 204 150 - /** 151 - * arch_atomic64_inc - increment atomic64 variable 152 - * @v: pointer to type atomic64_t 153 - * 154 - * Atomically increments @v by 1. 155 - */ 156 205 static __always_inline void arch_atomic64_inc(atomic64_t *v) 157 206 { 158 207 __alternative_atomic64(inc, inc_return, /* no output */, ··· 154 215 } 155 216 #define arch_atomic64_inc arch_atomic64_inc 156 217 157 - /** 158 - * arch_atomic64_dec - decrement atomic64 variable 159 - * @v: pointer to type atomic64_t 160 - * 161 - * Atomically decrements @v by 1. 162 - */ 163 218 static __always_inline void arch_atomic64_dec(atomic64_t *v) 164 219 { 165 220 __alternative_atomic64(dec, dec_return, /* no output */, ··· 161 228 } 162 229 #define arch_atomic64_dec arch_atomic64_dec 163 230 164 - /** 165 - * arch_atomic64_add_unless - add unless the number is a given value 166 - * @v: pointer of type atomic64_t 167 - * @a: the amount to add to v... 168 - * @u: ...unless v is equal to u. 169 - * 170 - * Atomically adds @a to @v, so long as it was not @u. 171 - * Returns non-zero if the add was done, zero otherwise. 172 - */ 173 231 static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 174 232 { 175 233 unsigned low = (unsigned)u;
-81
arch/x86/include/asm/atomic64_64.h
··· 10 10 11 11 #define ATOMIC64_INIT(i) { (i) } 12 12 13 - /** 14 - * arch_atomic64_read - read atomic64 variable 15 - * @v: pointer of type atomic64_t 16 - * 17 - * Atomically reads the value of @v. 18 - * Doesn't imply a read memory barrier. 19 - */ 20 13 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) 21 14 { 22 15 return __READ_ONCE((v)->counter); 23 16 } 24 17 25 - /** 26 - * arch_atomic64_set - set atomic64 variable 27 - * @v: pointer to type atomic64_t 28 - * @i: required value 29 - * 30 - * Atomically sets the value of @v to @i. 31 - */ 32 18 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) 33 19 { 34 20 __WRITE_ONCE(v->counter, i); 35 21 } 36 22 37 - /** 38 - * arch_atomic64_add - add integer to atomic64 variable 39 - * @i: integer value to add 40 - * @v: pointer to type atomic64_t 41 - * 42 - * Atomically adds @i to @v. 43 - */ 44 23 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) 45 24 { 46 25 asm volatile(LOCK_PREFIX "addq %1,%0" ··· 27 48 : "er" (i), "m" (v->counter) : "memory"); 28 49 } 29 50 30 - /** 31 - * arch_atomic64_sub - subtract the atomic64 variable 32 - * @i: integer value to subtract 33 - * @v: pointer to type atomic64_t 34 - * 35 - * Atomically subtracts @i from @v. 36 - */ 37 51 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) 38 52 { 39 53 asm volatile(LOCK_PREFIX "subq %1,%0" ··· 34 62 : "er" (i), "m" (v->counter) : "memory"); 35 63 } 36 64 37 - /** 38 - * arch_atomic64_sub_and_test - subtract value from variable and test result 39 - * @i: integer value to subtract 40 - * @v: pointer to type atomic64_t 41 - * 42 - * Atomically subtracts @i from @v and returns 43 - * true if the result is zero, or false for all 44 - * other cases. 45 - */ 46 65 static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v) 47 66 { 48 67 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i); 49 68 } 50 69 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test 51 70 52 - /** 53 - * arch_atomic64_inc - increment atomic64 variable 54 - * @v: pointer to type atomic64_t 55 - * 56 - * Atomically increments @v by 1. 57 - */ 58 71 static __always_inline void arch_atomic64_inc(atomic64_t *v) 59 72 { 60 73 asm volatile(LOCK_PREFIX "incq %0" ··· 48 91 } 49 92 #define arch_atomic64_inc arch_atomic64_inc 50 93 51 - /** 52 - * arch_atomic64_dec - decrement atomic64 variable 53 - * @v: pointer to type atomic64_t 54 - * 55 - * Atomically decrements @v by 1. 56 - */ 57 94 static __always_inline void arch_atomic64_dec(atomic64_t *v) 58 95 { 59 96 asm volatile(LOCK_PREFIX "decq %0" ··· 56 105 } 57 106 #define arch_atomic64_dec arch_atomic64_dec 58 107 59 - /** 60 - * arch_atomic64_dec_and_test - decrement and test 61 - * @v: pointer to type atomic64_t 62 - * 63 - * Atomically decrements @v by 1 and 64 - * returns true if the result is 0, or false for all other 65 - * cases. 66 - */ 67 108 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) 68 109 { 69 110 return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e); 70 111 } 71 112 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test 72 113 73 - /** 74 - * arch_atomic64_inc_and_test - increment and test 75 - * @v: pointer to type atomic64_t 76 - * 77 - * Atomically increments @v by 1 78 - * and returns true if the result is zero, or false for all 79 - * other cases. 80 - */ 81 114 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) 82 115 { 83 116 return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e); 84 117 } 85 118 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test 86 119 87 - /** 88 - * arch_atomic64_add_negative - add and test if negative 89 - * @i: integer value to add 90 - * @v: pointer to type atomic64_t 91 - * 92 - * Atomically adds @i to @v and returns true 93 - * if the result is negative, or false when 94 - * result is greater than or equal to zero. 95 - */ 96 120 static __always_inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v) 97 121 { 98 122 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i); 99 123 } 100 124 #define arch_atomic64_add_negative arch_atomic64_add_negative 101 125 102 - /** 103 - * arch_atomic64_add_return - add and return 104 - * @i: integer value to add 105 - * @v: pointer to type atomic64_t 106 - * 107 - * Atomically adds @i to @v and returns @i + @v 108 - */ 109 126 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) 110 127 { 111 128 return i + xadd(&v->counter, i);