[PATCH] atomic: inc_not_zero
[deliverable/linux.git] / include / asm-v850 / atomic.h
CommitLineData
1da177e4
LT
1/*
2 * include/asm-v850/atomic.h -- Atomic operations
3 *
4 * Copyright (C) 2001,02 NEC Corporation
5 * Copyright (C) 2001,02 Miles Bader <miles@gnu.org>
6 *
7 * This file is subject to the terms and conditions of the GNU General
8 * Public License. See the file COPYING in the main directory of this
9 * archive for more details.
10 *
11 * Written by Miles Bader <miles@gnu.org>
12 */
13
14#ifndef __V850_ATOMIC_H__
15#define __V850_ATOMIC_H__
16
17#include <linux/config.h>
18
19#include <asm/system.h>
20
21#ifdef CONFIG_SMP
22#error SMP not supported
23#endif
24
25typedef struct { int counter; } atomic_t;
26
27#define ATOMIC_INIT(i) { (i) }
28
29#ifdef __KERNEL__
30
31#define atomic_read(v) ((v)->counter)
32#define atomic_set(v,i) (((v)->counter) = (i))
33
23f88fe4 34static inline int atomic_add_return (int i, volatile atomic_t *v)
1da177e4
LT
35{
36 unsigned long flags;
37 int res;
38
39 local_irq_save (flags);
40 res = v->counter + i;
41 v->counter = res;
42 local_irq_restore (flags);
43
44 return res;
45}
46
47static __inline__ int atomic_sub_return (int i, volatile atomic_t *v)
48{
49 unsigned long flags;
50 int res;
51
52 local_irq_save (flags);
53 res = v->counter - i;
54 v->counter = res;
55 local_irq_restore (flags);
56
57 return res;
58}
59
60static __inline__ void atomic_clear_mask (unsigned long mask, unsigned long *addr)
61{
62 unsigned long flags;
63
64 local_irq_save (flags);
65 *addr &= ~mask;
66 local_irq_restore (flags);
67}
68
69#endif
70
71#define atomic_add(i, v) atomic_add_return ((i), (v))
72#define atomic_sub(i, v) atomic_sub_return ((i), (v))
73
74#define atomic_dec_return(v) atomic_sub_return (1, (v))
75#define atomic_inc_return(v) atomic_add_return (1, (v))
76#define atomic_inc(v) atomic_inc_return (v)
77#define atomic_dec(v) atomic_dec_return (v)
78
79/*
80 * atomic_inc_and_test - increment and test
81 * @v: pointer of type atomic_t
82 *
83 * Atomically increments @v by 1
84 * and returns true if the result is zero, or false for all
85 * other cases.
86 */
87#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
88
89#define atomic_sub_and_test(i,v) (atomic_sub_return ((i), (v)) == 0)
90#define atomic_dec_and_test(v) (atomic_sub_return (1, (v)) == 0)
91#define atomic_add_negative(i,v) (atomic_add_return ((i), (v)) < 0)
92
4a6dae6d
NP
93static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
94{
95 int ret;
96 unsigned long flags;
97
98 local_irq_save(flags);
99 ret = v->counter;
100 if (likely(ret == old))
101 v->counter = new;
102 local_irq_restore(flags);
103
104 return ret;
105}
106
8426e1f6
NP
107static inline int atomic_add_unless(atomic_t *v, int a, int u)
108{
109 int ret;
110 unsigned long flags;
111
112 local_irq_save(flags);
113 ret = v->counter;
114 if (ret != u)
115 v->counter += a;
116 local_irq_restore(flags);
117
118 return ret != u;
119}
120
121#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
122
1da177e4
LT
123/* Atomic operations are already serializing on ARM */
124#define smp_mb__before_atomic_dec() barrier()
125#define smp_mb__after_atomic_dec() barrier()
126#define smp_mb__before_atomic_inc() barrier()
127#define smp_mb__after_atomic_inc() barrier()
128
129#endif /* __V850_ATOMIC_H__ */
This page took 0.082653 seconds and 5 git commands to generate.