Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * bitops.c: atomic operations which got too long to be inlined all over | |
3 | * the place. | |
4 | * | |
5 | * Copyright 1999 Philipp Rumpf (prumpf@tux.org) | |
6 | * Copyright 2000 Grant Grundler (grundler@cup.hp.com) | |
7 | */ | |
8 | ||
1da177e4 LT |
9 | #include <linux/kernel.h> |
10 | #include <linux/spinlock.h> | |
11 | #include <asm/system.h> | |
12 | #include <asm/atomic.h> | |
13 | ||
14 | #ifdef CONFIG_SMP | |
fb1c8f93 IM |
15 | raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { |
16 | [0 ... (ATOMIC_HASH_SIZE-1)] = __RAW_SPIN_LOCK_UNLOCKED | |
1da177e4 LT |
17 | }; |
18 | #endif | |
19 | ||
20 | #ifdef __LP64__ | |
21 | unsigned long __xchg64(unsigned long x, unsigned long *ptr) | |
22 | { | |
23 | unsigned long temp, flags; | |
24 | ||
25 | _atomic_spin_lock_irqsave(ptr, flags); | |
26 | temp = *ptr; | |
27 | *ptr = x; | |
28 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
29 | return temp; | |
30 | } | |
31 | #endif | |
32 | ||
33 | unsigned long __xchg32(int x, int *ptr) | |
34 | { | |
35 | unsigned long flags; | |
36 | long temp; | |
37 | ||
38 | _atomic_spin_lock_irqsave(ptr, flags); | |
39 | temp = (long) *ptr; /* XXX - sign extension wanted? */ | |
40 | *ptr = x; | |
41 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
42 | return (unsigned long)temp; | |
43 | } | |
44 | ||
45 | ||
46 | unsigned long __xchg8(char x, char *ptr) | |
47 | { | |
48 | unsigned long flags; | |
49 | long temp; | |
50 | ||
51 | _atomic_spin_lock_irqsave(ptr, flags); | |
52 | temp = (long) *ptr; /* XXX - sign extension wanted? */ | |
53 | *ptr = x; | |
54 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
55 | return (unsigned long)temp; | |
56 | } | |
57 | ||
58 | ||
59 | #ifdef __LP64__ | |
60 | unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new) | |
61 | { | |
62 | unsigned long flags; | |
63 | unsigned long prev; | |
64 | ||
65 | _atomic_spin_lock_irqsave(ptr, flags); | |
66 | if ((prev = *ptr) == old) | |
67 | *ptr = new; | |
68 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
69 | return prev; | |
70 | } | |
71 | #endif | |
72 | ||
73 | unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new) | |
74 | { | |
75 | unsigned long flags; | |
76 | unsigned int prev; | |
77 | ||
78 | _atomic_spin_lock_irqsave(ptr, flags); | |
79 | if ((prev = *ptr) == old) | |
80 | *ptr = new; | |
81 | _atomic_spin_unlock_irqrestore(ptr, flags); | |
82 | return (unsigned long)prev; | |
83 | } |