Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-instrumented.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
6/*
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
16 */
17#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20#include <linux/build_bug.h>
21#include <linux/compiler.h>
22#include <linux/instrumented.h>
23
24static __always_inline int
25atomic_read(const atomic_t *v)
26{
27 instrument_atomic_read(v, sizeof(*v));
28 return arch_atomic_read(v);
29}
30
31static __always_inline int
32atomic_read_acquire(const atomic_t *v)
33{
34 instrument_atomic_read(v, sizeof(*v));
35 return arch_atomic_read_acquire(v);
36}
37
38static __always_inline void
39atomic_set(atomic_t *v, int i)
40{
41 instrument_atomic_write(v, sizeof(*v));
42 arch_atomic_set(v, i);
43}
44
45static __always_inline void
46atomic_set_release(atomic_t *v, int i)
47{
48 instrument_atomic_write(v, sizeof(*v));
49 arch_atomic_set_release(v, i);
50}
51
52static __always_inline void
53atomic_add(int i, atomic_t *v)
54{
55 instrument_atomic_read_write(v, sizeof(*v));
56 arch_atomic_add(i, v);
57}
58
59static __always_inline int
60atomic_add_return(int i, atomic_t *v)
61{
62 instrument_atomic_read_write(v, sizeof(*v));
63 return arch_atomic_add_return(i, v);
64}
65
66static __always_inline int
67atomic_add_return_acquire(int i, atomic_t *v)
68{
69 instrument_atomic_read_write(v, sizeof(*v));
70 return arch_atomic_add_return_acquire(i, v);
71}
72
73static __always_inline int
74atomic_add_return_release(int i, atomic_t *v)
75{
76 instrument_atomic_read_write(v, sizeof(*v));
77 return arch_atomic_add_return_release(i, v);
78}
79
80static __always_inline int
81atomic_add_return_relaxed(int i, atomic_t *v)
82{
83 instrument_atomic_read_write(v, sizeof(*v));
84 return arch_atomic_add_return_relaxed(i, v);
85}
86
87static __always_inline int
88atomic_fetch_add(int i, atomic_t *v)
89{
90 instrument_atomic_read_write(v, sizeof(*v));
91 return arch_atomic_fetch_add(i, v);
92}
93
94static __always_inline int
95atomic_fetch_add_acquire(int i, atomic_t *v)
96{
97 instrument_atomic_read_write(v, sizeof(*v));
98 return arch_atomic_fetch_add_acquire(i, v);
99}
100
101static __always_inline int
102atomic_fetch_add_release(int i, atomic_t *v)
103{
104 instrument_atomic_read_write(v, sizeof(*v));
105 return arch_atomic_fetch_add_release(i, v);
106}
107
108static __always_inline int
109atomic_fetch_add_relaxed(int i, atomic_t *v)
110{
111 instrument_atomic_read_write(v, sizeof(*v));
112 return arch_atomic_fetch_add_relaxed(i, v);
113}
114
115static __always_inline void
116atomic_sub(int i, atomic_t *v)
117{
118 instrument_atomic_read_write(v, sizeof(*v));
119 arch_atomic_sub(i, v);
120}
121
122static __always_inline int
123atomic_sub_return(int i, atomic_t *v)
124{
125 instrument_atomic_read_write(v, sizeof(*v));
126 return arch_atomic_sub_return(i, v);
127}
128
129static __always_inline int
130atomic_sub_return_acquire(int i, atomic_t *v)
131{
132 instrument_atomic_read_write(v, sizeof(*v));
133 return arch_atomic_sub_return_acquire(i, v);
134}
135
136static __always_inline int
137atomic_sub_return_release(int i, atomic_t *v)
138{
139 instrument_atomic_read_write(v, sizeof(*v));
140 return arch_atomic_sub_return_release(i, v);
141}
142
143static __always_inline int
144atomic_sub_return_relaxed(int i, atomic_t *v)
145{
146 instrument_atomic_read_write(v, sizeof(*v));
147 return arch_atomic_sub_return_relaxed(i, v);
148}
149
150static __always_inline int
151atomic_fetch_sub(int i, atomic_t *v)
152{
153 instrument_atomic_read_write(v, sizeof(*v));
154 return arch_atomic_fetch_sub(i, v);
155}
156
157static __always_inline int
158atomic_fetch_sub_acquire(int i, atomic_t *v)
159{
160 instrument_atomic_read_write(v, sizeof(*v));
161 return arch_atomic_fetch_sub_acquire(i, v);
162}
163
164static __always_inline int
165atomic_fetch_sub_release(int i, atomic_t *v)
166{
167 instrument_atomic_read_write(v, sizeof(*v));
168 return arch_atomic_fetch_sub_release(i, v);
169}
170
171static __always_inline int
172atomic_fetch_sub_relaxed(int i, atomic_t *v)
173{
174 instrument_atomic_read_write(v, sizeof(*v));
175 return arch_atomic_fetch_sub_relaxed(i, v);
176}
177
178static __always_inline void
179atomic_inc(atomic_t *v)
180{
181 instrument_atomic_read_write(v, sizeof(*v));
182 arch_atomic_inc(v);
183}
184
185static __always_inline int
186atomic_inc_return(atomic_t *v)
187{
188 instrument_atomic_read_write(v, sizeof(*v));
189 return arch_atomic_inc_return(v);
190}
191
192static __always_inline int
193atomic_inc_return_acquire(atomic_t *v)
194{
195 instrument_atomic_read_write(v, sizeof(*v));
196 return arch_atomic_inc_return_acquire(v);
197}
198
199static __always_inline int
200atomic_inc_return_release(atomic_t *v)
201{
202 instrument_atomic_read_write(v, sizeof(*v));
203 return arch_atomic_inc_return_release(v);
204}
205
206static __always_inline int
207atomic_inc_return_relaxed(atomic_t *v)
208{
209 instrument_atomic_read_write(v, sizeof(*v));
210 return arch_atomic_inc_return_relaxed(v);
211}
212
213static __always_inline int
214atomic_fetch_inc(atomic_t *v)
215{
216 instrument_atomic_read_write(v, sizeof(*v));
217 return arch_atomic_fetch_inc(v);
218}
219
220static __always_inline int
221atomic_fetch_inc_acquire(atomic_t *v)
222{
223 instrument_atomic_read_write(v, sizeof(*v));
224 return arch_atomic_fetch_inc_acquire(v);
225}
226
227static __always_inline int
228atomic_fetch_inc_release(atomic_t *v)
229{
230 instrument_atomic_read_write(v, sizeof(*v));
231 return arch_atomic_fetch_inc_release(v);
232}
233
234static __always_inline int
235atomic_fetch_inc_relaxed(atomic_t *v)
236{
237 instrument_atomic_read_write(v, sizeof(*v));
238 return arch_atomic_fetch_inc_relaxed(v);
239}
240
241static __always_inline void
242atomic_dec(atomic_t *v)
243{
244 instrument_atomic_read_write(v, sizeof(*v));
245 arch_atomic_dec(v);
246}
247
248static __always_inline int
249atomic_dec_return(atomic_t *v)
250{
251 instrument_atomic_read_write(v, sizeof(*v));
252 return arch_atomic_dec_return(v);
253}
254
255static __always_inline int
256atomic_dec_return_acquire(atomic_t *v)
257{
258 instrument_atomic_read_write(v, sizeof(*v));
259 return arch_atomic_dec_return_acquire(v);
260}
261
262static __always_inline int
263atomic_dec_return_release(atomic_t *v)
264{
265 instrument_atomic_read_write(v, sizeof(*v));
266 return arch_atomic_dec_return_release(v);
267}
268
269static __always_inline int
270atomic_dec_return_relaxed(atomic_t *v)
271{
272 instrument_atomic_read_write(v, sizeof(*v));
273 return arch_atomic_dec_return_relaxed(v);
274}
275
276static __always_inline int
277atomic_fetch_dec(atomic_t *v)
278{
279 instrument_atomic_read_write(v, sizeof(*v));
280 return arch_atomic_fetch_dec(v);
281}
282
283static __always_inline int
284atomic_fetch_dec_acquire(atomic_t *v)
285{
286 instrument_atomic_read_write(v, sizeof(*v));
287 return arch_atomic_fetch_dec_acquire(v);
288}
289
290static __always_inline int
291atomic_fetch_dec_release(atomic_t *v)
292{
293 instrument_atomic_read_write(v, sizeof(*v));
294 return arch_atomic_fetch_dec_release(v);
295}
296
297static __always_inline int
298atomic_fetch_dec_relaxed(atomic_t *v)
299{
300 instrument_atomic_read_write(v, sizeof(*v));
301 return arch_atomic_fetch_dec_relaxed(v);
302}
303
304static __always_inline void
305atomic_and(int i, atomic_t *v)
306{
307 instrument_atomic_read_write(v, sizeof(*v));
308 arch_atomic_and(i, v);
309}
310
311static __always_inline int
312atomic_fetch_and(int i, atomic_t *v)
313{
314 instrument_atomic_read_write(v, sizeof(*v));
315 return arch_atomic_fetch_and(i, v);
316}
317
318static __always_inline int
319atomic_fetch_and_acquire(int i, atomic_t *v)
320{
321 instrument_atomic_read_write(v, sizeof(*v));
322 return arch_atomic_fetch_and_acquire(i, v);
323}
324
325static __always_inline int
326atomic_fetch_and_release(int i, atomic_t *v)
327{
328 instrument_atomic_read_write(v, sizeof(*v));
329 return arch_atomic_fetch_and_release(i, v);
330}
331
332static __always_inline int
333atomic_fetch_and_relaxed(int i, atomic_t *v)
334{
335 instrument_atomic_read_write(v, sizeof(*v));
336 return arch_atomic_fetch_and_relaxed(i, v);
337}
338
339static __always_inline void
340atomic_andnot(int i, atomic_t *v)
341{
342 instrument_atomic_read_write(v, sizeof(*v));
343 arch_atomic_andnot(i, v);
344}
345
346static __always_inline int
347atomic_fetch_andnot(int i, atomic_t *v)
348{
349 instrument_atomic_read_write(v, sizeof(*v));
350 return arch_atomic_fetch_andnot(i, v);
351}
352
353static __always_inline int
354atomic_fetch_andnot_acquire(int i, atomic_t *v)
355{
356 instrument_atomic_read_write(v, sizeof(*v));
357 return arch_atomic_fetch_andnot_acquire(i, v);
358}
359
360static __always_inline int
361atomic_fetch_andnot_release(int i, atomic_t *v)
362{
363 instrument_atomic_read_write(v, sizeof(*v));
364 return arch_atomic_fetch_andnot_release(i, v);
365}
366
367static __always_inline int
368atomic_fetch_andnot_relaxed(int i, atomic_t *v)
369{
370 instrument_atomic_read_write(v, sizeof(*v));
371 return arch_atomic_fetch_andnot_relaxed(i, v);
372}
373
374static __always_inline void
375atomic_or(int i, atomic_t *v)
376{
377 instrument_atomic_read_write(v, sizeof(*v));
378 arch_atomic_or(i, v);
379}
380
381static __always_inline int
382atomic_fetch_or(int i, atomic_t *v)
383{
384 instrument_atomic_read_write(v, sizeof(*v));
385 return arch_atomic_fetch_or(i, v);
386}
387
388static __always_inline int
389atomic_fetch_or_acquire(int i, atomic_t *v)
390{
391 instrument_atomic_read_write(v, sizeof(*v));
392 return arch_atomic_fetch_or_acquire(i, v);
393}
394
395static __always_inline int
396atomic_fetch_or_release(int i, atomic_t *v)
397{
398 instrument_atomic_read_write(v, sizeof(*v));
399 return arch_atomic_fetch_or_release(i, v);
400}
401
402static __always_inline int
403atomic_fetch_or_relaxed(int i, atomic_t *v)
404{
405 instrument_atomic_read_write(v, sizeof(*v));
406 return arch_atomic_fetch_or_relaxed(i, v);
407}
408
409static __always_inline void
410atomic_xor(int i, atomic_t *v)
411{
412 instrument_atomic_read_write(v, sizeof(*v));
413 arch_atomic_xor(i, v);
414}
415
416static __always_inline int
417atomic_fetch_xor(int i, atomic_t *v)
418{
419 instrument_atomic_read_write(v, sizeof(*v));
420 return arch_atomic_fetch_xor(i, v);
421}
422
423static __always_inline int
424atomic_fetch_xor_acquire(int i, atomic_t *v)
425{
426 instrument_atomic_read_write(v, sizeof(*v));
427 return arch_atomic_fetch_xor_acquire(i, v);
428}
429
430static __always_inline int
431atomic_fetch_xor_release(int i, atomic_t *v)
432{
433 instrument_atomic_read_write(v, sizeof(*v));
434 return arch_atomic_fetch_xor_release(i, v);
435}
436
437static __always_inline int
438atomic_fetch_xor_relaxed(int i, atomic_t *v)
439{
440 instrument_atomic_read_write(v, sizeof(*v));
441 return arch_atomic_fetch_xor_relaxed(i, v);
442}
443
444static __always_inline int
445atomic_xchg(atomic_t *v, int i)
446{
447 instrument_atomic_read_write(v, sizeof(*v));
448 return arch_atomic_xchg(v, i);
449}
450
451static __always_inline int
452atomic_xchg_acquire(atomic_t *v, int i)
453{
454 instrument_atomic_read_write(v, sizeof(*v));
455 return arch_atomic_xchg_acquire(v, i);
456}
457
458static __always_inline int
459atomic_xchg_release(atomic_t *v, int i)
460{
461 instrument_atomic_read_write(v, sizeof(*v));
462 return arch_atomic_xchg_release(v, i);
463}
464
465static __always_inline int
466atomic_xchg_relaxed(atomic_t *v, int i)
467{
468 instrument_atomic_read_write(v, sizeof(*v));
469 return arch_atomic_xchg_relaxed(v, i);
470}
471
472static __always_inline int
473atomic_cmpxchg(atomic_t *v, int old, int new)
474{
475 instrument_atomic_read_write(v, sizeof(*v));
476 return arch_atomic_cmpxchg(v, old, new);
477}
478
479static __always_inline int
480atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
481{
482 instrument_atomic_read_write(v, sizeof(*v));
483 return arch_atomic_cmpxchg_acquire(v, old, new);
484}
485
486static __always_inline int
487atomic_cmpxchg_release(atomic_t *v, int old, int new)
488{
489 instrument_atomic_read_write(v, sizeof(*v));
490 return arch_atomic_cmpxchg_release(v, old, new);
491}
492
493static __always_inline int
494atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
495{
496 instrument_atomic_read_write(v, sizeof(*v));
497 return arch_atomic_cmpxchg_relaxed(v, old, new);
498}
499
500static __always_inline bool
501atomic_try_cmpxchg(atomic_t *v, int *old, int new)
502{
503 instrument_atomic_read_write(v, sizeof(*v));
504 instrument_atomic_read_write(old, sizeof(*old));
505 return arch_atomic_try_cmpxchg(v, old, new);
506}
507
508static __always_inline bool
509atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
510{
511 instrument_atomic_read_write(v, sizeof(*v));
512 instrument_atomic_read_write(old, sizeof(*old));
513 return arch_atomic_try_cmpxchg_acquire(v, old, new);
514}
515
516static __always_inline bool
517atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
518{
519 instrument_atomic_read_write(v, sizeof(*v));
520 instrument_atomic_read_write(old, sizeof(*old));
521 return arch_atomic_try_cmpxchg_release(v, old, new);
522}
523
524static __always_inline bool
525atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
526{
527 instrument_atomic_read_write(v, sizeof(*v));
528 instrument_atomic_read_write(old, sizeof(*old));
529 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
530}
531
532static __always_inline bool
533atomic_sub_and_test(int i, atomic_t *v)
534{
535 instrument_atomic_read_write(v, sizeof(*v));
536 return arch_atomic_sub_and_test(i, v);
537}
538
539static __always_inline bool
540atomic_dec_and_test(atomic_t *v)
541{
542 instrument_atomic_read_write(v, sizeof(*v));
543 return arch_atomic_dec_and_test(v);
544}
545
546static __always_inline bool
547atomic_inc_and_test(atomic_t *v)
548{
549 instrument_atomic_read_write(v, sizeof(*v));
550 return arch_atomic_inc_and_test(v);
551}
552
553static __always_inline bool
554atomic_add_negative(int i, atomic_t *v)
555{
556 instrument_atomic_read_write(v, sizeof(*v));
557 return arch_atomic_add_negative(i, v);
558}
559
560static __always_inline int
561atomic_fetch_add_unless(atomic_t *v, int a, int u)
562{
563 instrument_atomic_read_write(v, sizeof(*v));
564 return arch_atomic_fetch_add_unless(v, a, u);
565}
566
567static __always_inline bool
568atomic_add_unless(atomic_t *v, int a, int u)
569{
570 instrument_atomic_read_write(v, sizeof(*v));
571 return arch_atomic_add_unless(v, a, u);
572}
573
574static __always_inline bool
575atomic_inc_not_zero(atomic_t *v)
576{
577 instrument_atomic_read_write(v, sizeof(*v));
578 return arch_atomic_inc_not_zero(v);
579}
580
581static __always_inline bool
582atomic_inc_unless_negative(atomic_t *v)
583{
584 instrument_atomic_read_write(v, sizeof(*v));
585 return arch_atomic_inc_unless_negative(v);
586}
587
588static __always_inline bool
589atomic_dec_unless_positive(atomic_t *v)
590{
591 instrument_atomic_read_write(v, sizeof(*v));
592 return arch_atomic_dec_unless_positive(v);
593}
594
595static __always_inline int
596atomic_dec_if_positive(atomic_t *v)
597{
598 instrument_atomic_read_write(v, sizeof(*v));
599 return arch_atomic_dec_if_positive(v);
600}
601
602static __always_inline s64
603atomic64_read(const atomic64_t *v)
604{
605 instrument_atomic_read(v, sizeof(*v));
606 return arch_atomic64_read(v);
607}
608
609static __always_inline s64
610atomic64_read_acquire(const atomic64_t *v)
611{
612 instrument_atomic_read(v, sizeof(*v));
613 return arch_atomic64_read_acquire(v);
614}
615
616static __always_inline void
617atomic64_set(atomic64_t *v, s64 i)
618{
619 instrument_atomic_write(v, sizeof(*v));
620 arch_atomic64_set(v, i);
621}
622
623static __always_inline void
624atomic64_set_release(atomic64_t *v, s64 i)
625{
626 instrument_atomic_write(v, sizeof(*v));
627 arch_atomic64_set_release(v, i);
628}
629
630static __always_inline void
631atomic64_add(s64 i, atomic64_t *v)
632{
633 instrument_atomic_read_write(v, sizeof(*v));
634 arch_atomic64_add(i, v);
635}
636
637static __always_inline s64
638atomic64_add_return(s64 i, atomic64_t *v)
639{
640 instrument_atomic_read_write(v, sizeof(*v));
641 return arch_atomic64_add_return(i, v);
642}
643
644static __always_inline s64
645atomic64_add_return_acquire(s64 i, atomic64_t *v)
646{
647 instrument_atomic_read_write(v, sizeof(*v));
648 return arch_atomic64_add_return_acquire(i, v);
649}
650
651static __always_inline s64
652atomic64_add_return_release(s64 i, atomic64_t *v)
653{
654 instrument_atomic_read_write(v, sizeof(*v));
655 return arch_atomic64_add_return_release(i, v);
656}
657
658static __always_inline s64
659atomic64_add_return_relaxed(s64 i, atomic64_t *v)
660{
661 instrument_atomic_read_write(v, sizeof(*v));
662 return arch_atomic64_add_return_relaxed(i, v);
663}
664
665static __always_inline s64
666atomic64_fetch_add(s64 i, atomic64_t *v)
667{
668 instrument_atomic_read_write(v, sizeof(*v));
669 return arch_atomic64_fetch_add(i, v);
670}
671
672static __always_inline s64
673atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
674{
675 instrument_atomic_read_write(v, sizeof(*v));
676 return arch_atomic64_fetch_add_acquire(i, v);
677}
678
679static __always_inline s64
680atomic64_fetch_add_release(s64 i, atomic64_t *v)
681{
682 instrument_atomic_read_write(v, sizeof(*v));
683 return arch_atomic64_fetch_add_release(i, v);
684}
685
686static __always_inline s64
687atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
688{
689 instrument_atomic_read_write(v, sizeof(*v));
690 return arch_atomic64_fetch_add_relaxed(i, v);
691}
692
693static __always_inline void
694atomic64_sub(s64 i, atomic64_t *v)
695{
696 instrument_atomic_read_write(v, sizeof(*v));
697 arch_atomic64_sub(i, v);
698}
699
700static __always_inline s64
701atomic64_sub_return(s64 i, atomic64_t *v)
702{
703 instrument_atomic_read_write(v, sizeof(*v));
704 return arch_atomic64_sub_return(i, v);
705}
706
707static __always_inline s64
708atomic64_sub_return_acquire(s64 i, atomic64_t *v)
709{
710 instrument_atomic_read_write(v, sizeof(*v));
711 return arch_atomic64_sub_return_acquire(i, v);
712}
713
714static __always_inline s64
715atomic64_sub_return_release(s64 i, atomic64_t *v)
716{
717 instrument_atomic_read_write(v, sizeof(*v));
718 return arch_atomic64_sub_return_release(i, v);
719}
720
721static __always_inline s64
722atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
723{
724 instrument_atomic_read_write(v, sizeof(*v));
725 return arch_atomic64_sub_return_relaxed(i, v);
726}
727
728static __always_inline s64
729atomic64_fetch_sub(s64 i, atomic64_t *v)
730{
731 instrument_atomic_read_write(v, sizeof(*v));
732 return arch_atomic64_fetch_sub(i, v);
733}
734
735static __always_inline s64
736atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
737{
738 instrument_atomic_read_write(v, sizeof(*v));
739 return arch_atomic64_fetch_sub_acquire(i, v);
740}
741
742static __always_inline s64
743atomic64_fetch_sub_release(s64 i, atomic64_t *v)
744{
745 instrument_atomic_read_write(v, sizeof(*v));
746 return arch_atomic64_fetch_sub_release(i, v);
747}
748
749static __always_inline s64
750atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
751{
752 instrument_atomic_read_write(v, sizeof(*v));
753 return arch_atomic64_fetch_sub_relaxed(i, v);
754}
755
756static __always_inline void
757atomic64_inc(atomic64_t *v)
758{
759 instrument_atomic_read_write(v, sizeof(*v));
760 arch_atomic64_inc(v);
761}
762
763static __always_inline s64
764atomic64_inc_return(atomic64_t *v)
765{
766 instrument_atomic_read_write(v, sizeof(*v));
767 return arch_atomic64_inc_return(v);
768}
769
770static __always_inline s64
771atomic64_inc_return_acquire(atomic64_t *v)
772{
773 instrument_atomic_read_write(v, sizeof(*v));
774 return arch_atomic64_inc_return_acquire(v);
775}
776
777static __always_inline s64
778atomic64_inc_return_release(atomic64_t *v)
779{
780 instrument_atomic_read_write(v, sizeof(*v));
781 return arch_atomic64_inc_return_release(v);
782}
783
784static __always_inline s64
785atomic64_inc_return_relaxed(atomic64_t *v)
786{
787 instrument_atomic_read_write(v, sizeof(*v));
788 return arch_atomic64_inc_return_relaxed(v);
789}
790
791static __always_inline s64
792atomic64_fetch_inc(atomic64_t *v)
793{
794 instrument_atomic_read_write(v, sizeof(*v));
795 return arch_atomic64_fetch_inc(v);
796}
797
798static __always_inline s64
799atomic64_fetch_inc_acquire(atomic64_t *v)
800{
801 instrument_atomic_read_write(v, sizeof(*v));
802 return arch_atomic64_fetch_inc_acquire(v);
803}
804
805static __always_inline s64
806atomic64_fetch_inc_release(atomic64_t *v)
807{
808 instrument_atomic_read_write(v, sizeof(*v));
809 return arch_atomic64_fetch_inc_release(v);
810}
811
812static __always_inline s64
813atomic64_fetch_inc_relaxed(atomic64_t *v)
814{
815 instrument_atomic_read_write(v, sizeof(*v));
816 return arch_atomic64_fetch_inc_relaxed(v);
817}
818
819static __always_inline void
820atomic64_dec(atomic64_t *v)
821{
822 instrument_atomic_read_write(v, sizeof(*v));
823 arch_atomic64_dec(v);
824}
825
826static __always_inline s64
827atomic64_dec_return(atomic64_t *v)
828{
829 instrument_atomic_read_write(v, sizeof(*v));
830 return arch_atomic64_dec_return(v);
831}
832
833static __always_inline s64
834atomic64_dec_return_acquire(atomic64_t *v)
835{
836 instrument_atomic_read_write(v, sizeof(*v));
837 return arch_atomic64_dec_return_acquire(v);
838}
839
840static __always_inline s64
841atomic64_dec_return_release(atomic64_t *v)
842{
843 instrument_atomic_read_write(v, sizeof(*v));
844 return arch_atomic64_dec_return_release(v);
845}
846
847static __always_inline s64
848atomic64_dec_return_relaxed(atomic64_t *v)
849{
850 instrument_atomic_read_write(v, sizeof(*v));
851 return arch_atomic64_dec_return_relaxed(v);
852}
853
854static __always_inline s64
855atomic64_fetch_dec(atomic64_t *v)
856{
857 instrument_atomic_read_write(v, sizeof(*v));
858 return arch_atomic64_fetch_dec(v);
859}
860
861static __always_inline s64
862atomic64_fetch_dec_acquire(atomic64_t *v)
863{
864 instrument_atomic_read_write(v, sizeof(*v));
865 return arch_atomic64_fetch_dec_acquire(v);
866}
867
868static __always_inline s64
869atomic64_fetch_dec_release(atomic64_t *v)
870{
871 instrument_atomic_read_write(v, sizeof(*v));
872 return arch_atomic64_fetch_dec_release(v);
873}
874
875static __always_inline s64
876atomic64_fetch_dec_relaxed(atomic64_t *v)
877{
878 instrument_atomic_read_write(v, sizeof(*v));
879 return arch_atomic64_fetch_dec_relaxed(v);
880}
881
882static __always_inline void
883atomic64_and(s64 i, atomic64_t *v)
884{
885 instrument_atomic_read_write(v, sizeof(*v));
886 arch_atomic64_and(i, v);
887}
888
889static __always_inline s64
890atomic64_fetch_and(s64 i, atomic64_t *v)
891{
892 instrument_atomic_read_write(v, sizeof(*v));
893 return arch_atomic64_fetch_and(i, v);
894}
895
896static __always_inline s64
897atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
898{
899 instrument_atomic_read_write(v, sizeof(*v));
900 return arch_atomic64_fetch_and_acquire(i, v);
901}
902
903static __always_inline s64
904atomic64_fetch_and_release(s64 i, atomic64_t *v)
905{
906 instrument_atomic_read_write(v, sizeof(*v));
907 return arch_atomic64_fetch_and_release(i, v);
908}
909
910static __always_inline s64
911atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
912{
913 instrument_atomic_read_write(v, sizeof(*v));
914 return arch_atomic64_fetch_and_relaxed(i, v);
915}
916
917static __always_inline void
918atomic64_andnot(s64 i, atomic64_t *v)
919{
920 instrument_atomic_read_write(v, sizeof(*v));
921 arch_atomic64_andnot(i, v);
922}
923
924static __always_inline s64
925atomic64_fetch_andnot(s64 i, atomic64_t *v)
926{
927 instrument_atomic_read_write(v, sizeof(*v));
928 return arch_atomic64_fetch_andnot(i, v);
929}
930
931static __always_inline s64
932atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
933{
934 instrument_atomic_read_write(v, sizeof(*v));
935 return arch_atomic64_fetch_andnot_acquire(i, v);
936}
937
938static __always_inline s64
939atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
940{
941 instrument_atomic_read_write(v, sizeof(*v));
942 return arch_atomic64_fetch_andnot_release(i, v);
943}
944
945static __always_inline s64
946atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
947{
948 instrument_atomic_read_write(v, sizeof(*v));
949 return arch_atomic64_fetch_andnot_relaxed(i, v);
950}
951
952static __always_inline void
953atomic64_or(s64 i, atomic64_t *v)
954{
955 instrument_atomic_read_write(v, sizeof(*v));
956 arch_atomic64_or(i, v);
957}
958
959static __always_inline s64
960atomic64_fetch_or(s64 i, atomic64_t *v)
961{
962 instrument_atomic_read_write(v, sizeof(*v));
963 return arch_atomic64_fetch_or(i, v);
964}
965
966static __always_inline s64
967atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
968{
969 instrument_atomic_read_write(v, sizeof(*v));
970 return arch_atomic64_fetch_or_acquire(i, v);
971}
972
973static __always_inline s64
974atomic64_fetch_or_release(s64 i, atomic64_t *v)
975{
976 instrument_atomic_read_write(v, sizeof(*v));
977 return arch_atomic64_fetch_or_release(i, v);
978}
979
980static __always_inline s64
981atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
982{
983 instrument_atomic_read_write(v, sizeof(*v));
984 return arch_atomic64_fetch_or_relaxed(i, v);
985}
986
987static __always_inline void
988atomic64_xor(s64 i, atomic64_t *v)
989{
990 instrument_atomic_read_write(v, sizeof(*v));
991 arch_atomic64_xor(i, v);
992}
993
994static __always_inline s64
995atomic64_fetch_xor(s64 i, atomic64_t *v)
996{
997 instrument_atomic_read_write(v, sizeof(*v));
998 return arch_atomic64_fetch_xor(i, v);
999}
1000
1001static __always_inline s64
1002atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1003{
1004 instrument_atomic_read_write(v, sizeof(*v));
1005 return arch_atomic64_fetch_xor_acquire(i, v);
1006}
1007
1008static __always_inline s64
1009atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1010{
1011 instrument_atomic_read_write(v, sizeof(*v));
1012 return arch_atomic64_fetch_xor_release(i, v);
1013}
1014
1015static __always_inline s64
1016atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1017{
1018 instrument_atomic_read_write(v, sizeof(*v));
1019 return arch_atomic64_fetch_xor_relaxed(i, v);
1020}
1021
1022static __always_inline s64
1023atomic64_xchg(atomic64_t *v, s64 i)
1024{
1025 instrument_atomic_read_write(v, sizeof(*v));
1026 return arch_atomic64_xchg(v, i);
1027}
1028
1029static __always_inline s64
1030atomic64_xchg_acquire(atomic64_t *v, s64 i)
1031{
1032 instrument_atomic_read_write(v, sizeof(*v));
1033 return arch_atomic64_xchg_acquire(v, i);
1034}
1035
1036static __always_inline s64
1037atomic64_xchg_release(atomic64_t *v, s64 i)
1038{
1039 instrument_atomic_read_write(v, sizeof(*v));
1040 return arch_atomic64_xchg_release(v, i);
1041}
1042
1043static __always_inline s64
1044atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1045{
1046 instrument_atomic_read_write(v, sizeof(*v));
1047 return arch_atomic64_xchg_relaxed(v, i);
1048}
1049
1050static __always_inline s64
1051atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1052{
1053 instrument_atomic_read_write(v, sizeof(*v));
1054 return arch_atomic64_cmpxchg(v, old, new);
1055}
1056
1057static __always_inline s64
1058atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1059{
1060 instrument_atomic_read_write(v, sizeof(*v));
1061 return arch_atomic64_cmpxchg_acquire(v, old, new);
1062}
1063
1064static __always_inline s64
1065atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1066{
1067 instrument_atomic_read_write(v, sizeof(*v));
1068 return arch_atomic64_cmpxchg_release(v, old, new);
1069}
1070
1071static __always_inline s64
1072atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1073{
1074 instrument_atomic_read_write(v, sizeof(*v));
1075 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1076}
1077
1078static __always_inline bool
1079atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1080{
1081 instrument_atomic_read_write(v, sizeof(*v));
1082 instrument_atomic_read_write(old, sizeof(*old));
1083 return arch_atomic64_try_cmpxchg(v, old, new);
1084}
1085
1086static __always_inline bool
1087atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1088{
1089 instrument_atomic_read_write(v, sizeof(*v));
1090 instrument_atomic_read_write(old, sizeof(*old));
1091 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1092}
1093
1094static __always_inline bool
1095atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1096{
1097 instrument_atomic_read_write(v, sizeof(*v));
1098 instrument_atomic_read_write(old, sizeof(*old));
1099 return arch_atomic64_try_cmpxchg_release(v, old, new);
1100}
1101
1102static __always_inline bool
1103atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1104{
1105 instrument_atomic_read_write(v, sizeof(*v));
1106 instrument_atomic_read_write(old, sizeof(*old));
1107 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1108}
1109
1110static __always_inline bool
1111atomic64_sub_and_test(s64 i, atomic64_t *v)
1112{
1113 instrument_atomic_read_write(v, sizeof(*v));
1114 return arch_atomic64_sub_and_test(i, v);
1115}
1116
1117static __always_inline bool
1118atomic64_dec_and_test(atomic64_t *v)
1119{
1120 instrument_atomic_read_write(v, sizeof(*v));
1121 return arch_atomic64_dec_and_test(v);
1122}
1123
1124static __always_inline bool
1125atomic64_inc_and_test(atomic64_t *v)
1126{
1127 instrument_atomic_read_write(v, sizeof(*v));
1128 return arch_atomic64_inc_and_test(v);
1129}
1130
1131static __always_inline bool
1132atomic64_add_negative(s64 i, atomic64_t *v)
1133{
1134 instrument_atomic_read_write(v, sizeof(*v));
1135 return arch_atomic64_add_negative(i, v);
1136}
1137
1138static __always_inline s64
1139atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1140{
1141 instrument_atomic_read_write(v, sizeof(*v));
1142 return arch_atomic64_fetch_add_unless(v, a, u);
1143}
1144
1145static __always_inline bool
1146atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1147{
1148 instrument_atomic_read_write(v, sizeof(*v));
1149 return arch_atomic64_add_unless(v, a, u);
1150}
1151
1152static __always_inline bool
1153atomic64_inc_not_zero(atomic64_t *v)
1154{
1155 instrument_atomic_read_write(v, sizeof(*v));
1156 return arch_atomic64_inc_not_zero(v);
1157}
1158
1159static __always_inline bool
1160atomic64_inc_unless_negative(atomic64_t *v)
1161{
1162 instrument_atomic_read_write(v, sizeof(*v));
1163 return arch_atomic64_inc_unless_negative(v);
1164}
1165
1166static __always_inline bool
1167atomic64_dec_unless_positive(atomic64_t *v)
1168{
1169 instrument_atomic_read_write(v, sizeof(*v));
1170 return arch_atomic64_dec_unless_positive(v);
1171}
1172
1173static __always_inline s64
1174atomic64_dec_if_positive(atomic64_t *v)
1175{
1176 instrument_atomic_read_write(v, sizeof(*v));
1177 return arch_atomic64_dec_if_positive(v);
1178}
1179
1180#define xchg(ptr, ...) \
1181({ \
1182 typeof(ptr) __ai_ptr = (ptr); \
1183 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1184 arch_xchg(__ai_ptr, __VA_ARGS__); \
1185})
1186
1187#define xchg_acquire(ptr, ...) \
1188({ \
1189 typeof(ptr) __ai_ptr = (ptr); \
1190 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1191 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1192})
1193
1194#define xchg_release(ptr, ...) \
1195({ \
1196 typeof(ptr) __ai_ptr = (ptr); \
1197 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1198 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1199})
1200
1201#define xchg_relaxed(ptr, ...) \
1202({ \
1203 typeof(ptr) __ai_ptr = (ptr); \
1204 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1205 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1206})
1207
1208#define cmpxchg(ptr, ...) \
1209({ \
1210 typeof(ptr) __ai_ptr = (ptr); \
1211 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1212 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1213})
1214
1215#define cmpxchg_acquire(ptr, ...) \
1216({ \
1217 typeof(ptr) __ai_ptr = (ptr); \
1218 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1219 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1220})
1221
1222#define cmpxchg_release(ptr, ...) \
1223({ \
1224 typeof(ptr) __ai_ptr = (ptr); \
1225 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1226 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1227})
1228
1229#define cmpxchg_relaxed(ptr, ...) \
1230({ \
1231 typeof(ptr) __ai_ptr = (ptr); \
1232 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1233 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1234})
1235
1236#define cmpxchg64(ptr, ...) \
1237({ \
1238 typeof(ptr) __ai_ptr = (ptr); \
1239 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1240 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1241})
1242
1243#define cmpxchg64_acquire(ptr, ...) \
1244({ \
1245 typeof(ptr) __ai_ptr = (ptr); \
1246 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1247 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1248})
1249
1250#define cmpxchg64_release(ptr, ...) \
1251({ \
1252 typeof(ptr) __ai_ptr = (ptr); \
1253 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1254 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1255})
1256
1257#define cmpxchg64_relaxed(ptr, ...) \
1258({ \
1259 typeof(ptr) __ai_ptr = (ptr); \
1260 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1261 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1262})
1263
1264#define try_cmpxchg(ptr, oldp, ...) \
1265({ \
1266 typeof(ptr) __ai_ptr = (ptr); \
1267 typeof(oldp) __ai_oldp = (oldp); \
1268 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1269 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1270 arch_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1271})
1272
1273#define try_cmpxchg_acquire(ptr, oldp, ...) \
1274({ \
1275 typeof(ptr) __ai_ptr = (ptr); \
1276 typeof(oldp) __ai_oldp = (oldp); \
1277 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1278 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1279 arch_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1280})
1281
1282#define try_cmpxchg_release(ptr, oldp, ...) \
1283({ \
1284 typeof(ptr) __ai_ptr = (ptr); \
1285 typeof(oldp) __ai_oldp = (oldp); \
1286 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1287 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1288 arch_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1289})
1290
1291#define try_cmpxchg_relaxed(ptr, oldp, ...) \
1292({ \
1293 typeof(ptr) __ai_ptr = (ptr); \
1294 typeof(oldp) __ai_oldp = (oldp); \
1295 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1296 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1297 arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1298})
1299
1300#define cmpxchg_local(ptr, ...) \
1301({ \
1302 typeof(ptr) __ai_ptr = (ptr); \
1303 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1304 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1305})
1306
1307#define cmpxchg64_local(ptr, ...) \
1308({ \
1309 typeof(ptr) __ai_ptr = (ptr); \
1310 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1311 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1312})
1313
1314#define sync_cmpxchg(ptr, ...) \
1315({ \
1316 typeof(ptr) __ai_ptr = (ptr); \
1317 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1318 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1319})
1320
1321#define cmpxchg_double(ptr, ...) \
1322({ \
1323 typeof(ptr) __ai_ptr = (ptr); \
1324 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1325 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1326})
1327
1328
1329#define cmpxchg_double_local(ptr, ...) \
1330({ \
1331 typeof(ptr) __ai_ptr = (ptr); \
1332 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1333 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1334})
1335
1336#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1337// 1d7c3a25aca5c7fb031c307be4c3d24c7b48fcd5