0
|
1
|
|
2 /*
|
|
3 * Copyright (C) Igor Sysoev
|
|
4 */
|
|
5
|
|
6
|
|
7 #ifndef _NGX_ATOMIC_H_INCLUDED_
|
|
8 #define _NGX_ATOMIC_H_INCLUDED_
|
|
9
|
|
10
|
|
11 #include <ngx_config.h>
|
|
12 #include <ngx_core.h>
|
|
13
|
|
14
|
|
15 #if ( __i386__ || __amd64__ )
|
|
16
|
|
17 #define NGX_HAVE_ATOMIC_OPS 1
|
|
18
|
|
19 typedef volatile uint32_t ngx_atomic_t;
|
|
20
|
|
21 #if (NGX_SMP)
|
|
22 #define NGX_SMP_LOCK "lock;"
|
|
23 #else
|
|
24 #define NGX_SMP_LOCK
|
|
25 #endif
|
|
26
|
|
27
|
|
28 static ngx_inline uint32_t ngx_atomic_inc(ngx_atomic_t *value)
|
|
29 {
|
|
30 uint32_t old;
|
|
31
|
|
32 __asm__ volatile (
|
|
33
|
|
34 NGX_SMP_LOCK
|
|
35 " xaddl %0, %2; "
|
|
36 " incl %0; "
|
|
37
|
|
38 : "=q" (old) : "0" (1), "m" (*value));
|
|
39
|
|
40 return old;
|
|
41 }
|
|
42
|
|
43
|
|
44 #if 0
|
|
45
|
|
46 static ngx_inline uint32_t ngx_atomic_dec(ngx_atomic_t *value)
|
|
47 {
|
|
48 uint32_t old;
|
|
49
|
|
50 __asm__ volatile (
|
|
51
|
|
52 NGX_SMP_LOCK
|
|
53 " xaddl %0, %1; "
|
|
54 " decl %0; "
|
|
55
|
|
56 : "=q" (old) : "0" (-1), "m" (*value));
|
|
57
|
|
58 return old;
|
|
59 }
|
|
60
|
|
61 #endif
|
|
62
|
|
63
|
|
64 static ngx_inline uint32_t ngx_atomic_cmp_set(ngx_atomic_t *lock,
|
|
65 ngx_atomic_t old,
|
|
66 ngx_atomic_t set)
|
|
67 {
|
|
68 uint32_t res;
|
|
69
|
|
70 __asm__ volatile (
|
|
71
|
|
72 NGX_SMP_LOCK
|
|
73 " cmpxchgl %3, %1; "
|
|
74 " setz %%al; "
|
|
75 " movzbl %%al, %0; "
|
|
76
|
|
77 : "=a" (res) : "m" (*lock), "a" (old), "q" (set));
|
|
78
|
|
79 return res;
|
|
80 }
|
|
81
|
|
82
|
|
83 #elif ( __sparc__ )
|
|
84
|
|
85 #define NGX_HAVE_ATOMIC_OPS 1
|
|
86
|
|
87 typedef volatile uint32_t ngx_atomic_t;
|
|
88
|
|
89
|
|
90 static ngx_inline uint32_t ngx_atomic_inc(ngx_atomic_t *value)
|
|
91 {
|
|
92 uint32_t old, new, res;
|
|
93
|
|
94 old = *value;
|
|
95
|
|
96 for ( ;; ) {
|
|
97
|
|
98 new = old + 1;
|
|
99 res = new;
|
|
100
|
|
101 __asm__ volatile (
|
|
102
|
|
103 "casa [%1] 0x80, %2, %0"
|
|
104
|
|
105 : "+r" (res) : "r" (value), "r" (old));
|
|
106
|
|
107 if (res == old) {
|
|
108 return new;
|
|
109 }
|
|
110
|
|
111 old = res;
|
|
112 }
|
|
113 }
|
|
114
|
|
115
|
|
116 static ngx_inline uint32_t ngx_atomic_cmp_set(ngx_atomic_t *lock,
|
|
117 ngx_atomic_t old,
|
|
118 ngx_atomic_t set)
|
|
119 {
|
|
120 uint32_t res = (uint32_t) set;
|
|
121
|
|
122 __asm__ volatile (
|
|
123
|
|
124 "casa [%1] 0x80, %2, %0"
|
|
125
|
|
126 : "+r" (res) : "r" (lock), "r" (old));
|
|
127
|
|
128 return (res == old);
|
|
129 }
|
|
130
|
|
131 #else
|
|
132
|
|
133 #define NGX_HAVE_ATOMIC_OPS 0
|
|
134
|
|
135 typedef volatile uint32_t ngx_atomic_t;
|
|
136
|
|
137 #define ngx_atomic_inc(x) ++(*(x));
|
|
138
|
|
139 static ngx_inline uint32_t ngx_atomic_cmp_set(ngx_atomic_t *lock,
|
|
140 ngx_atomic_t old,
|
|
141 ngx_atomic_t set)
|
|
142 {
|
|
143 return 1;
|
|
144 }
|
|
145
|
|
146 #endif
|
|
147
|
|
148
|
|
149 void ngx_spinlock(ngx_atomic_t *lock, ngx_uint_t spin);
|
|
150
|
|
151 #define ngx_trylock(lock) (*(lock) == 0 && ngx_atomic_cmp_set(lock, 0, 1))
|
|
152 #define ngx_unlock(lock) *(lock) = 0
|
|
153
|
|
154
|
|
155 #endif /* _NGX_ATOMIC_H_INCLUDED_ */
|