summaryrefslogtreecommitdiffstats
path: root/include/linux/atomic.h
blob: c7bdf5857ce8fd85f54c25804cc67ab2d7deb9db (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
/* SPDX-License-Identifier: GPL-2.0-only */
#ifndef LINUX_ATOMIC_H_
#define LINUX_ATOMIC_H_

#include <asm-generic/atomic.h>
#include <linux/compiler.h>
#include <asm-generic/cmpxchg.h>

#define raw_cmpxchg_relaxed cmpxchg

/**
 * raw_atomic_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
 * @v: pointer to atomic_t
 * @old: int value to compare with
 * @new: int value to assign
 *
 * If (@v == @old), atomically updates @v to @new with relaxed ordering.
 *
 * Safe to use in noinstr code; prefer atomic_cmpxchg_relaxed() elsewhere.
 *
 * Return: The original value of @v.
 */
static __always_inline int
raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
{
	return raw_cmpxchg_relaxed(&v->counter, old, new);
}

/**
 * atomic_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
 * @v: pointer to atomic_t
 * @old: pointer to int value to compare with
 * @new: int value to assign
 *
 * If (@v == @old), atomically updates @v to @new with relaxed ordering.
 * Otherwise, updates @old to the current value of @v.
 *
 * Return: @true if the exchange occured, @false otherwise.
 */
static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
{
	int r, o = *old;
	r = raw_atomic_cmpxchg_relaxed(v, o, new);
	if (unlikely(r != o))
		*old = r;
	return likely(r == o);
}

/**
 * atomic_fetch_add() - atomic add
 * @i: int value to add
 * @v: pointer to atomic_t
 *
 * Atomically updates @v to (@v + @i).
 *
 * Return: The original value of @v.
 */
static __always_inline int
atomic_fetch_add(int i, atomic_t *v)
{
	int old = v->counter;
	v->counter += i;
	return old;
}
#define atomic_fetch_add_relaxed atomic_fetch_add
#define atomic_fetch_sub(i, v) atomic_fetch_add(-i, v)
#define atomic_fetch_sub_release atomic_fetch_sub

#endif