Linux Audio

Check our new training course

Loading...
v3.1
 
  1#ifndef _ALPHA_LOCAL_H
  2#define _ALPHA_LOCAL_H
  3
  4#include <linux/percpu.h>
  5#include <linux/atomic.h>
  6
  7typedef struct
  8{
  9	atomic_long_t a;
 10} local_t;
 11
 12#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
 13#define local_read(l)	atomic_long_read(&(l)->a)
 14#define local_set(l,i)	atomic_long_set(&(l)->a, (i))
 15#define local_inc(l)	atomic_long_inc(&(l)->a)
 16#define local_dec(l)	atomic_long_dec(&(l)->a)
 17#define local_add(i,l)	atomic_long_add((i),(&(l)->a))
 18#define local_sub(i,l)	atomic_long_sub((i),(&(l)->a))
 19
 20static __inline__ long local_add_return(long i, local_t * l)
 21{
 22	long temp, result;
 23	__asm__ __volatile__(
 24	"1:	ldq_l %0,%1\n"
 25	"	addq %0,%3,%2\n"
 26	"	addq %0,%3,%0\n"
 27	"	stq_c %0,%1\n"
 28	"	beq %0,2f\n"
 29	".subsection 2\n"
 30	"2:	br 1b\n"
 31	".previous"
 32	:"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
 33	:"Ir" (i), "m" (l->a.counter) : "memory");
 34	return result;
 35}
 36
 37static __inline__ long local_sub_return(long i, local_t * l)
 38{
 39	long temp, result;
 40	__asm__ __volatile__(
 41	"1:	ldq_l %0,%1\n"
 42	"	subq %0,%3,%2\n"
 43	"	subq %0,%3,%0\n"
 44	"	stq_c %0,%1\n"
 45	"	beq %0,2f\n"
 46	".subsection 2\n"
 47	"2:	br 1b\n"
 48	".previous"
 49	:"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
 50	:"Ir" (i), "m" (l->a.counter) : "memory");
 51	return result;
 52}
 53
 54#define local_cmpxchg(l, o, n) \
 55	(cmpxchg_local(&((l)->a.counter), (o), (n)))
 56#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
 57
 58/**
 59 * local_add_unless - add unless the number is a given value
 60 * @l: pointer of type local_t
 61 * @a: the amount to add to l...
 62 * @u: ...unless l is equal to u.
 63 *
 64 * Atomically adds @a to @l, so long as it was not @u.
 65 * Returns non-zero if @l was not @u, and zero otherwise.
 66 */
 67#define local_add_unless(l, a, u)				\
 68({								\
 69	long c, old;						\
 70	c = local_read(l);					\
 71	for (;;) {						\
 72		if (unlikely(c == (u)))				\
 73			break;					\
 74		old = local_cmpxchg((l), c, c + (a));	\
 75		if (likely(old == c))				\
 76			break;					\
 77		c = old;					\
 78	}							\
 79	c != (u);						\
 80})
 81#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 82
 83#define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
 84
 85#define local_dec_return(l) local_sub_return(1,(l))
 86
 87#define local_inc_return(l) local_add_return(1,(l))
 88
 89#define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
 90
 91#define local_inc_and_test(l) (local_add_return(1, (l)) == 0)
 92
 93#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
 94
 95/* Verify if faster than atomic ops */
 96#define __local_inc(l)		((l)->a.counter++)
 97#define __local_dec(l)		((l)->a.counter++)
 98#define __local_add(i,l)	((l)->a.counter+=(i))
 99#define __local_sub(i,l)	((l)->a.counter-=(i))
100
101#endif /* _ALPHA_LOCAL_H */
v5.4
  1/* SPDX-License-Identifier: GPL-2.0 */
  2#ifndef _ALPHA_LOCAL_H
  3#define _ALPHA_LOCAL_H
  4
  5#include <linux/percpu.h>
  6#include <linux/atomic.h>
  7
  8typedef struct
  9{
 10	atomic_long_t a;
 11} local_t;
 12
 13#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
 14#define local_read(l)	atomic_long_read(&(l)->a)
 15#define local_set(l,i)	atomic_long_set(&(l)->a, (i))
 16#define local_inc(l)	atomic_long_inc(&(l)->a)
 17#define local_dec(l)	atomic_long_dec(&(l)->a)
 18#define local_add(i,l)	atomic_long_add((i),(&(l)->a))
 19#define local_sub(i,l)	atomic_long_sub((i),(&(l)->a))
 20
 21static __inline__ long local_add_return(long i, local_t * l)
 22{
 23	long temp, result;
 24	__asm__ __volatile__(
 25	"1:	ldq_l %0,%1\n"
 26	"	addq %0,%3,%2\n"
 27	"	addq %0,%3,%0\n"
 28	"	stq_c %0,%1\n"
 29	"	beq %0,2f\n"
 30	".subsection 2\n"
 31	"2:	br 1b\n"
 32	".previous"
 33	:"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
 34	:"Ir" (i), "m" (l->a.counter) : "memory");
 35	return result;
 36}
 37
 38static __inline__ long local_sub_return(long i, local_t * l)
 39{
 40	long temp, result;
 41	__asm__ __volatile__(
 42	"1:	ldq_l %0,%1\n"
 43	"	subq %0,%3,%2\n"
 44	"	subq %0,%3,%0\n"
 45	"	stq_c %0,%1\n"
 46	"	beq %0,2f\n"
 47	".subsection 2\n"
 48	"2:	br 1b\n"
 49	".previous"
 50	:"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
 51	:"Ir" (i), "m" (l->a.counter) : "memory");
 52	return result;
 53}
 54
 55#define local_cmpxchg(l, o, n) \
 56	(cmpxchg_local(&((l)->a.counter), (o), (n)))
 57#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
 58
 59/**
 60 * local_add_unless - add unless the number is a given value
 61 * @l: pointer of type local_t
 62 * @a: the amount to add to l...
 63 * @u: ...unless l is equal to u.
 64 *
 65 * Atomically adds @a to @l, so long as it was not @u.
 66 * Returns non-zero if @l was not @u, and zero otherwise.
 67 */
 68#define local_add_unless(l, a, u)				\
 69({								\
 70	long c, old;						\
 71	c = local_read(l);					\
 72	for (;;) {						\
 73		if (unlikely(c == (u)))				\
 74			break;					\
 75		old = local_cmpxchg((l), c, c + (a));	\
 76		if (likely(old == c))				\
 77			break;					\
 78		c = old;					\
 79	}							\
 80	c != (u);						\
 81})
 82#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 83
 84#define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
 85
 86#define local_dec_return(l) local_sub_return(1,(l))
 87
 88#define local_inc_return(l) local_add_return(1,(l))
 89
 90#define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
 91
 92#define local_inc_and_test(l) (local_add_return(1, (l)) == 0)
 93
 94#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
 95
 96/* Verify if faster than atomic ops */
 97#define __local_inc(l)		((l)->a.counter++)
 98#define __local_dec(l)		((l)->a.counter++)
 99#define __local_add(i,l)	((l)->a.counter+=(i))
100#define __local_sub(i,l)	((l)->a.counter-=(i))
101
102#endif /* _ALPHA_LOCAL_H */