1 // SPDX-License-Identifier: GPL-2.0
3 * atomic32.c: 32-bit atomic_t implementation
5 * Copyright (C) 2004 Keith M Wesolowski
6 * Copyright (C) 2007 Kyle McMartin
8 * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
11 #include <linux/atomic.h>
12 #include <linux/spinlock.h>
13 #include <linux/module.h>
16 #define ATOMIC_HASH_SIZE 4
17 #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
19 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
20 [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
25 static DEFINE_SPINLOCK(dummy);
26 #define ATOMIC_HASH_SIZE 1
27 #define ATOMIC_HASH(a) (&dummy)
31 #define ATOMIC_FETCH_OP(op, c_op) \
32 int atomic_fetch_##op(int i, atomic_t *v) \
35 unsigned long flags; \
36 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
41 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
44 EXPORT_SYMBOL(atomic_fetch_##op);
46 #define ATOMIC_OP_RETURN(op, c_op) \
47 int atomic_##op##_return(int i, atomic_t *v) \
50 unsigned long flags; \
51 spin_lock_irqsave(ATOMIC_HASH(v), flags); \
53 ret = (v->counter c_op i); \
55 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
58 EXPORT_SYMBOL(atomic_##op##_return);
60 ATOMIC_OP_RETURN(add, +=)
62 ATOMIC_FETCH_OP(add, +=)
63 ATOMIC_FETCH_OP(and, &=)
64 ATOMIC_FETCH_OP(or, |=)
65 ATOMIC_FETCH_OP(xor, ^=)
67 #undef ATOMIC_FETCH_OP
68 #undef ATOMIC_OP_RETURN
70 int atomic_xchg(atomic_t *v, int new)
75 spin_lock_irqsave(ATOMIC_HASH(v), flags);
78 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
81 EXPORT_SYMBOL(atomic_xchg);
83 int atomic_cmpxchg(atomic_t *v, int old, int new)
88 spin_lock_irqsave(ATOMIC_HASH(v), flags);
90 if (likely(ret == old))
93 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
96 EXPORT_SYMBOL(atomic_cmpxchg);
98 int atomic_fetch_add_unless(atomic_t *v, int a, int u)
103 spin_lock_irqsave(ATOMIC_HASH(v), flags);
107 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
110 EXPORT_SYMBOL(atomic_fetch_add_unless);
112 /* Atomic operations are already serializing */
113 void atomic_set(atomic_t *v, int i)
117 spin_lock_irqsave(ATOMIC_HASH(v), flags);
119 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
121 EXPORT_SYMBOL(atomic_set);
123 unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
125 unsigned long old, flags;
127 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
130 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
134 EXPORT_SYMBOL(___set_bit);
136 unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
138 unsigned long old, flags;
140 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
143 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
147 EXPORT_SYMBOL(___clear_bit);
149 unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
151 unsigned long old, flags;
153 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
156 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
160 EXPORT_SYMBOL(___change_bit);
162 unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
167 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
168 if ((prev = *ptr) == old)
170 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
172 return (unsigned long)prev;
174 EXPORT_SYMBOL(__cmpxchg_u32);
176 u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new)
181 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
182 if ((prev = *ptr) == old)
184 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
188 EXPORT_SYMBOL(__cmpxchg_u64);
190 unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
195 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
198 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
200 return (unsigned long)prev;
202 EXPORT_SYMBOL(__xchg_u32);