27
27
28
28
#include <threading/threading_atomic.h>
29
29
30
+ #if defined(__THREAD_SANITIZER__ )
31
+ #include <threading/threading_mutex.h>
32
+ #endif
33
+
30
34
#ifdef __cplusplus
31
35
extern "C" {
32
36
#endif
@@ -44,7 +48,12 @@ extern "C" {
44
48
45
49
struct threading_atomic_ref_count_type
46
50
{
51
+ #if defined(__THREAD_SANITIZER__ )
52
+ uintmax_t count ;
53
+ struct threading_mutex_type m ;
54
+ #else
47
55
atomic_uintmax_t count ;
56
+ #endif
48
57
};
49
58
50
59
/* -- Type Definitions -- */
@@ -55,28 +64,68 @@ typedef struct threading_atomic_ref_count_type *threading_atomic_ref_count;
55
64
56
65
inline void threading_atomic_ref_count_store (threading_atomic_ref_count ref , uintmax_t v )
57
66
{
67
+ #if defined(__THREAD_SANITIZER__ )
68
+ threading_mutex_store (& ref -> m , & ref -> count , & v , sizeof (uintmax_t ));
69
+ #else
58
70
atomic_store (& ref -> count , v );
71
+ #endif
72
+ }
73
+
74
+ inline void threading_atomic_ref_count_initialize (threading_atomic_ref_count ref )
75
+ {
76
+ #if defined(__THREAD_SANITIZER__ )
77
+ uintmax_t init = THREADING_ATOMIC_REF_COUNT_MIN ;
78
+
79
+ threading_mutex_initialize (& ref -> m );
80
+
81
+ threading_mutex_store (& ref -> m , & ref -> count , & init , sizeof (uintmax_t ));
82
+ #else
83
+ threading_atomic_ref_count_store (ref , THREADING_ATOMIC_REF_COUNT_MIN );
84
+ #endif
59
85
}
60
86
61
87
inline uintmax_t threading_atomic_ref_count_load (threading_atomic_ref_count ref )
62
88
{
89
+ #if defined(__THREAD_SANITIZER__ )
90
+ uintmax_t result = 0 ;
91
+
92
+ threading_mutex_store (& ref -> m , & result , & ref -> count , sizeof (uintmax_t ));
93
+
94
+ return result ;
95
+ #else
63
96
return atomic_load_explicit (& ref -> count , memory_order_relaxed );
97
+ #endif
64
98
}
65
99
66
100
inline int threading_atomic_ref_count_increment (threading_atomic_ref_count ref )
67
101
{
102
+ #if defined(__THREAD_SANITIZER__ )
103
+ threading_mutex_lock (& ref -> m );
104
+ {
105
+ ++ ref -> count ;
106
+ }
107
+ threading_mutex_unlock (& ref -> m );
108
+ #else
68
109
if (atomic_load_explicit (& ref -> count , memory_order_relaxed ) == THREADING_ATOMIC_REF_COUNT_MAX )
69
110
{
70
111
return 1 ;
71
112
}
72
113
73
114
atomic_fetch_add_explicit (& ref -> count , 1 , memory_order_relaxed );
115
+ #endif
74
116
75
117
return 0 ;
76
118
}
77
119
78
120
inline int threading_atomic_ref_count_decrement (threading_atomic_ref_count ref )
79
121
{
122
+ #if defined(__THREAD_SANITIZER__ )
123
+ threading_mutex_lock (& ref -> m );
124
+ {
125
+ -- ref -> count ;
126
+ }
127
+ threading_mutex_unlock (& ref -> m );
128
+ #else
80
129
if (atomic_load_explicit (& ref -> count , memory_order_relaxed ) == THREADING_ATOMIC_REF_COUNT_MIN )
81
130
{
82
131
return 1 ;
@@ -88,10 +137,20 @@ inline int threading_atomic_ref_count_decrement(threading_atomic_ref_count ref)
88
137
{
89
138
atomic_thread_fence (memory_order_acquire );
90
139
}
140
+ #endif
91
141
92
142
return 0 ;
93
143
}
94
144
145
+ inline void threading_atomic_ref_count_destroy (threading_atomic_ref_count ref )
146
+ {
147
+ #if defined(__THREAD_SANITIZER__ )
148
+ threading_mutex_destroy (& ref -> m );
149
+ #else
150
+ (void )ref ;
151
+ #endif
152
+ }
153
+
95
154
#ifdef __cplusplus
96
155
}
97
156
#endif
0 commit comments