summaryrefslogtreecommitdiff
path: root/include/asm-generic/percpu.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-generic/percpu.h')
-rw-r--r--include/asm-generic/percpu.h71
1 files changed, 18 insertions, 53 deletions
diff --git a/include/asm-generic/percpu.h b/include/asm-generic/percpu.h
index 00f45ff081a6..d7d50d7ee51e 100644
--- a/include/asm-generic/percpu.h
+++ b/include/asm-generic/percpu.h
@@ -1,13 +1,9 @@
#ifndef _ASM_GENERIC_PERCPU_H_
#define _ASM_GENERIC_PERCPU_H_
+
#include <linux/compiler.h>
#include <linux/threads.h>
-
-/*
- * Determine the real variable name from the name visible in the
- * kernel sources.
- */
-#define per_cpu_var(var) per_cpu__##var
+#include <linux/percpu-defs.h>
#ifdef CONFIG_SMP
@@ -73,63 +69,32 @@ extern void setup_per_cpu_areas(void);
#endif /* SMP */
-#ifndef PER_CPU_ATTRIBUTES
-#define PER_CPU_ATTRIBUTES
+#ifndef PER_CPU_BASE_SECTION
+#ifdef CONFIG_SMP
+#define PER_CPU_BASE_SECTION ".data.percpu"
+#else
+#define PER_CPU_BASE_SECTION ".data"
#endif
-
-#define DECLARE_PER_CPU(type, name) extern PER_CPU_ATTRIBUTES \
- __typeof__(type) per_cpu_var(name)
-
-/*
- * Optional methods for optimized non-lvalue per-cpu variable access.
- *
- * @var can be a percpu variable or a field of it and its size should
- * equal char, int or long. percpu_read() evaluates to a lvalue and
- * all others to void.
- *
- * These operations are guaranteed to be atomic w.r.t. preemption.
- * The generic versions use plain get/put_cpu_var(). Archs are
- * encouraged to implement single-instruction alternatives which don't
- * require preemption protection.
- */
-#ifndef percpu_read
-# define percpu_read(var) \
- ({ \
- typeof(per_cpu_var(var)) __tmp_var__; \
- __tmp_var__ = get_cpu_var(var); \
- put_cpu_var(var); \
- __tmp_var__; \
- })
#endif
-#define __percpu_generic_to_op(var, val, op) \
-do { \
- get_cpu_var(var) op val; \
- put_cpu_var(var); \
-} while (0)
-
-#ifndef percpu_write
-# define percpu_write(var, val) __percpu_generic_to_op(var, (val), =)
-#endif
+#ifdef CONFIG_SMP
-#ifndef percpu_add
-# define percpu_add(var, val) __percpu_generic_to_op(var, (val), +=)
+#ifdef MODULE
+#define PER_CPU_SHARED_ALIGNED_SECTION ""
+#else
+#define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned"
#endif
+#define PER_CPU_FIRST_SECTION ".first"
-#ifndef percpu_sub
-# define percpu_sub(var, val) __percpu_generic_to_op(var, (val), -=)
-#endif
+#else
-#ifndef percpu_and
-# define percpu_and(var, val) __percpu_generic_to_op(var, (val), &=)
-#endif
+#define PER_CPU_SHARED_ALIGNED_SECTION ""
+#define PER_CPU_FIRST_SECTION ""
-#ifndef percpu_or
-# define percpu_or(var, val) __percpu_generic_to_op(var, (val), |=)
#endif
-#ifndef percpu_xor
-# define percpu_xor(var, val) __percpu_generic_to_op(var, (val), ^=)
+#ifndef PER_CPU_ATTRIBUTES
+#define PER_CPU_ATTRIBUTES
#endif
#endif /* _ASM_GENERIC_PERCPU_H_ */