1 #ifndef __IA64_DEFNS_H__
2 #define __IA64_DEFNS_H__
11 #define CACHE_LINE_SIZE 64
14 * I. Compare-and-swap.
17 #define CAS32(_a, _o, _n) \
18 ({ __typeof__(_o) __o = _o; \
19 __asm__ __volatile__("mov ar.ccv=%0 ;;" :: "rO" (_o)); \
20 __asm__ __volatile__("cmpxchg4.acq %0=%1,%2,ar.ccv ;; " \
21 : "=r" (__o), "=m" (*(_a)) \
26 #define CAS64(_a, _o, _n) \
27 ({ __typeof__(_o) __o = _o; \
28 __asm__ __volatile__("mov ar.ccv=%0 ;;" :: "rO" (_o)); \
29 __asm__ __volatile__("cmpxchg8.acq %0=%1,%2,ar.ccv ;; " \
30 : "=r" (__o), "=m" (*(_a)) \
35 #define FAS32(_a, _n) \
36 ({ __typeof__(_n) __o; \
37 __asm__ __volatile__("xchg4 %0=%1,%2 ;; " \
38 : "=r" (__o), "=m" (*(_a)) \
43 #define FAS64(_a, _n) \
44 ({ __typeof__(_n) __o; \
45 __asm__ __volatile__("xchg8 %0=%1,%2 ;; " \
46 : "=r" (__o), "=m" (*(_a)) \
51 #define CAS(_x,_o,_n) ((sizeof (*_x) == 4)?CAS32(_x,_o,_n):CAS64(_x,_o,_n))
52 #define FAS(_x,_n) ((sizeof (*_x) == 4)?FAS32(_x,_n) :FAS64(_x,_n))
54 /* Update Integer location, return Old value. */
57 /* Update Pointer location, return Old value. */
60 /* Update 32/64-bit location, return Old value. */
66 * II. Memory barriers.
67 * WMB(): All preceding write operations must commit before any later writes.
68 * RMB(): All preceding read operations must commit before any later reads.
69 * MB(): All preceding memory accesses must commit before any later accesses.
71 * If the compiler does not observe these barriers (but any sane compiler
72 * will!), then VOLATILE should be defined as 'volatile'.
75 #define MB() __asm__ __volatile__ (";; mf ;; " : : : "memory")
78 #define VOLATILE /*volatile*/
81 * III. Cycle counter access.
84 typedef unsigned long long tick_t;
86 ({ tick_t __t; __asm__ __volatile__ ("mov %0=ar.itc ;;" : "=rO" (__t)); __t; })
94 typedef unsigned char _u8;
95 typedef unsigned short _u16;
96 typedef unsigned int _u32;
97 typedef unsigned long long _u64;
99 #endif /* __IA64_DEFNS_H__ */