Lines Matching full:s64

12 	s64 __aligned(8) counter;
24 * s64 val = arch_atomic64_read_nonatomic(v);
31 * s64 val = arch_atomic64_read_nonatomic(v);
37 static __always_inline s64 arch_atomic64_read_nonatomic(const atomic64_t *v) in arch_atomic64_read_nonatomic()
91 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg()
97 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg()
103 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg()
105 s64 o; in arch_atomic64_xchg()
115 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
124 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
126 s64 r; in arch_atomic64_read()
131 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return()
140 static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return()
149 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return()
151 s64 a; in arch_atomic64_inc_return()
158 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v) in arch_atomic64_dec_return()
160 s64 a; in arch_atomic64_dec_return()
167 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
174 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub()
195 static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_add_unless()
215 static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
217 s64 r; in arch_atomic64_dec_if_positive()
227 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v) in arch_atomic64_and()
229 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_and()
234 static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v) in arch_atomic64_fetch_and()
236 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_fetch_and()
244 static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v) in arch_atomic64_or()
246 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_or()
251 static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v) in arch_atomic64_fetch_or()
253 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_fetch_or()
261 static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v) in arch_atomic64_xor()
263 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_xor()
268 static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor()
270 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_fetch_xor()
278 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add()
280 s64 val = arch_atomic64_read_nonatomic(v); in arch_atomic64_fetch_add()