remove the unused exports of sys_open/sys_read
[deliverable/linux.git] / include / asm-alpha / local.h
CommitLineData
1da177e4
LT
1#ifndef _ALPHA_LOCAL_H
2#define _ALPHA_LOCAL_H
3
4#include <linux/percpu.h>
5#include <asm/atomic.h>
6
f43f7b46
MD
7typedef struct
8{
9 atomic_long_t a;
10} local_t;
1da177e4 11
f43f7b46
MD
12#define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
13#define local_read(l) atomic_long_read(&(l)->a)
14#define local_set(l,i) atomic_long_set(&(l)->a, (i))
15#define local_inc(l) atomic_long_inc(&(l)->a)
16#define local_dec(l) atomic_long_dec(&(l)->a)
17#define local_add(i,l) atomic_long_add((i),(&(l)->a))
18#define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
1da177e4 19
f43f7b46
MD
20static __inline__ long local_add_return(long i, local_t * l)
21{
22 long temp, result;
23 __asm__ __volatile__(
24 "1: ldq_l %0,%1\n"
25 " addq %0,%3,%2\n"
26 " addq %0,%3,%0\n"
27 " stq_c %0,%1\n"
28 " beq %0,2f\n"
29 ".subsection 2\n"
30 "2: br 1b\n"
31 ".previous"
32 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
33 :"Ir" (i), "m" (l->a.counter) : "memory");
34 return result;
35}
1da177e4 36
f43f7b46
MD
37static __inline__ long local_sub_return(long i, local_t * l)
38{
39 long temp, result;
40 __asm__ __volatile__(
41 "1: ldq_l %0,%1\n"
42 " subq %0,%3,%2\n"
43 " subq %0,%3,%0\n"
44 " stq_c %0,%1\n"
45 " beq %0,2f\n"
46 ".subsection 2\n"
47 "2: br 1b\n"
48 ".previous"
49 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
50 :"Ir" (i), "m" (l->a.counter) : "memory");
51 return result;
52}
53
54#define local_cmpxchg(l, o, n) \
55 (cmpxchg_local(&((l)->a.counter), (o), (n)))
56#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
57
58/**
59 * local_add_unless - add unless the number is a given value
60 * @l: pointer of type local_t
61 * @a: the amount to add to l...
62 * @u: ...unless l is equal to u.
63 *
64 * Atomically adds @a to @l, so long as it was not @u.
65 * Returns non-zero if @l was not @u, and zero otherwise.
66 */
67#define local_add_unless(l, a, u) \
68({ \
69 long c, old; \
70 c = local_read(l); \
71 for (;;) { \
72 if (unlikely(c == (u))) \
73 break; \
74 old = local_cmpxchg((l), c, c + (a)); \
75 if (likely(old == c)) \
76 break; \
77 c = old; \
78 } \
79 c != (u); \
80})
81#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
82
83#define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
84
85#define local_dec_return(l) local_sub_return(1,(l))
86
87#define local_inc_return(l) local_add_return(1,(l))
88
89#define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
90
91#define local_inc_and_test(l) (local_add_return(1, (l)) == 0)
92
93#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
94
95/* Verify if faster than atomic ops */
96#define __local_inc(l) ((l)->a.counter++)
97#define __local_dec(l) ((l)->a.counter++)
98#define __local_add(i,l) ((l)->a.counter+=(i))
99#define __local_sub(i,l) ((l)->a.counter-=(i))
1da177e4
LT
100
101/* Use these for per-cpu local_t variables: on some archs they are
102 * much more efficient than these naive implementations. Note they take
103 * a variable, not an address.
104 */
f43f7b46
MD
105#define cpu_local_read(l) local_read(&__get_cpu_var(l))
106#define cpu_local_set(l, i) local_set(&__get_cpu_var(l), (i))
107
108#define cpu_local_inc(l) local_inc(&__get_cpu_var(l))
109#define cpu_local_dec(l) local_dec(&__get_cpu_var(l))
110#define cpu_local_add(i, l) local_add((i), &__get_cpu_var(l))
111#define cpu_local_sub(i, l) local_sub((i), &__get_cpu_var(l))
112
113#define __cpu_local_inc(l) __local_inc(&__get_cpu_var(l))
114#define __cpu_local_dec(l) __local_dec(&__get_cpu_var(l))
115#define __cpu_local_add(i, l) __local_add((i), &__get_cpu_var(l))
116#define __cpu_local_sub(i, l) __local_sub((i), &__get_cpu_var(l))
1da177e4
LT
117
118#endif /* _ALPHA_LOCAL_H */
This page took 0.324787 seconds and 5 git commands to generate.