@@ -132,6 +132,15 @@ __asm__(".type " # name_ ", @function\n" \
132
132
return ret; \
133
133
}
134
134
135
+ #define ADD_FETCH (n , type ) type __atomic_add_fetch_ ## n (type* ptr, type value, int memorder) \
136
+ { \
137
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
138
+ type ret = *ptr + value; \
139
+ *ptr = ret; \
140
+ _ATOMIC_EXIT_CRITICAL(state); \
141
+ return ret; \
142
+ }
143
+
135
144
#define FETCH_SUB (n , type ) type __atomic_fetch_sub_ ## n (type* ptr, type value, int memorder) \
136
145
{ \
137
146
unsigned state = _ATOMIC_ENTER_CRITICAL(); \
@@ -141,6 +150,15 @@ __asm__(".type " # name_ ", @function\n" \
141
150
return ret; \
142
151
}
143
152
153
+ #define SUB_FETCH (n , type ) type __atomic_sub_fetch_ ## n (type* ptr, type value, int memorder) \
154
+ { \
155
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
156
+ type ret = *ptr - value; \
157
+ *ptr = ret; \
158
+ _ATOMIC_EXIT_CRITICAL(state); \
159
+ return ret; \
160
+ }
161
+
144
162
#define FETCH_AND (n , type ) type __atomic_fetch_and_ ## n (type* ptr, type value, int memorder) \
145
163
{ \
146
164
unsigned state = _ATOMIC_ENTER_CRITICAL(); \
@@ -150,6 +168,15 @@ __asm__(".type " # name_ ", @function\n" \
150
168
return ret; \
151
169
}
152
170
171
+ #define AND_FETCH (n , type ) type __atomic_and_fetch_ ## n (type* ptr, type value, int memorder) \
172
+ { \
173
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
174
+ type ret = *ptr & value; \
175
+ *ptr = ret; \
176
+ _ATOMIC_EXIT_CRITICAL(state); \
177
+ return ret; \
178
+ }
179
+
153
180
#define FETCH_OR (n , type ) type __atomic_fetch_or_ ## n (type* ptr, type value, int memorder) \
154
181
{ \
155
182
unsigned state = _ATOMIC_ENTER_CRITICAL(); \
@@ -159,6 +186,15 @@ __asm__(".type " # name_ ", @function\n" \
159
186
return ret; \
160
187
}
161
188
189
+ #define OR_FETCH (n , type ) type __atomic_or_fetch_ ## n (type* ptr, type value, int memorder) \
190
+ { \
191
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
192
+ type ret = *ptr | value; \
193
+ *ptr = ret; \
194
+ _ATOMIC_EXIT_CRITICAL(state); \
195
+ return ret; \
196
+ }
197
+
162
198
#define FETCH_XOR (n , type ) type __atomic_fetch_xor_ ## n (type* ptr, type value, int memorder) \
163
199
{ \
164
200
unsigned state = _ATOMIC_ENTER_CRITICAL(); \
@@ -168,13 +204,45 @@ __asm__(".type " # name_ ", @function\n" \
168
204
return ret; \
169
205
}
170
206
207
+ #define XOR_FETCH (n , type ) type __atomic_xor_fetch_ ## n (type* ptr, type value, int memorder) \
208
+ { \
209
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
210
+ type ret = *ptr ^ value; \
211
+ *ptr = ret; \
212
+ _ATOMIC_EXIT_CRITICAL(state); \
213
+ return ret; \
214
+ }
215
+
216
+ #define FETCH_NAND (n , type ) type __atomic_fetch_nand_ ## n (type* ptr, type value, int memorder) \
217
+ { \
218
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
219
+ type ret = *ptr; \
220
+ *ptr = ~(*ptr & value); \
221
+ _ATOMIC_EXIT_CRITICAL(state); \
222
+ return ret; \
223
+ }
224
+
225
+ #define NAND_FETCH (n , type ) type __atomic_nand_fetch_ ## n (type* ptr, type value, int memorder) \
226
+ { \
227
+ unsigned state = _ATOMIC_ENTER_CRITICAL(); \
228
+ type ret = ~(*ptr & value); \
229
+ *ptr = ret; \
230
+ _ATOMIC_EXIT_CRITICAL(state); \
231
+ return ret; \
232
+ }
171
233
172
234
#define SYNC_FETCH_OP (op , n , type ) type CLANG_ATOMIC_SUFFIX(__sync_fetch_and_ ## op ##_ ## n) (type* ptr, type value) \
173
235
{ \
174
236
return __atomic_fetch_ ## op ##_ ## n (ptr, value, __ATOMIC_SEQ_CST); \
175
237
} \
176
238
CLANG_DECLARE_ALIAS( __sync_fetch_and_ ## op ##_ ## n )
177
239
240
+ #define SYNC_OP_FETCH (op , n , type ) type CLANG_ATOMIC_SUFFIX(__sync_ ## op ##_and_fetch_ ## n) (type* ptr, type value) \
241
+ { \
242
+ return __atomic_ ## op ##_fetch_ ## n (ptr, value, __ATOMIC_SEQ_CST); \
243
+ } \
244
+ CLANG_DECLARE_ALIAS( __sync_ ## op ##_and_fetch_ ## n )
245
+
178
246
#define SYNC_BOOL_CMP_EXCHANGE (n , type ) bool CLANG_ATOMIC_SUFFIX(__sync_bool_compare_and_swap_ ## n) (type *ptr, type oldval, type newval) \
179
247
{ \
180
248
bool ret = false; \
@@ -233,42 +301,98 @@ FETCH_ADD(1, uint8_t)
233
301
FETCH_ADD (2 , uint16_t )
234
302
FETCH_ADD (4 , uint32_t )
235
303
304
+ ADD_FETCH (1 , uint8_t )
305
+ ADD_FETCH (2 , uint16_t )
306
+ ADD_FETCH (4 , uint32_t )
307
+
236
308
FETCH_SUB (1 , uint8_t )
237
309
FETCH_SUB (2 , uint16_t )
238
310
FETCH_SUB (4 , uint32_t )
239
311
312
+ SUB_FETCH (1 , uint8_t )
313
+ SUB_FETCH (2 , uint16_t )
314
+ SUB_FETCH (4 , uint32_t )
315
+
240
316
FETCH_AND (1 , uint8_t )
241
317
FETCH_AND (2 , uint16_t )
242
318
FETCH_AND (4 , uint32_t )
243
319
320
+ AND_FETCH (1 , uint8_t )
321
+ AND_FETCH (2 , uint16_t )
322
+ AND_FETCH (4 , uint32_t )
323
+
244
324
FETCH_OR (1 , uint8_t )
245
325
FETCH_OR (2 , uint16_t )
246
326
FETCH_OR (4 , uint32_t )
247
327
328
+ OR_FETCH (1 , uint8_t )
329
+ OR_FETCH (2 , uint16_t )
330
+ OR_FETCH (4 , uint32_t )
331
+
248
332
FETCH_XOR (1 , uint8_t )
249
333
FETCH_XOR (2 , uint16_t )
250
334
FETCH_XOR (4 , uint32_t )
251
335
336
+ XOR_FETCH (1 , uint8_t )
337
+ XOR_FETCH (2 , uint16_t )
338
+ XOR_FETCH (4 , uint32_t )
339
+
340
+ FETCH_NAND (1 , uint8_t )
341
+ FETCH_NAND (2 , uint16_t )
342
+ FETCH_NAND (4 , uint32_t )
343
+
344
+ NAND_FETCH (1 , uint8_t )
345
+ NAND_FETCH (2 , uint16_t )
346
+ NAND_FETCH (4 , uint32_t )
347
+
252
348
SYNC_FETCH_OP (add , 1 , uint8_t )
253
349
SYNC_FETCH_OP (add , 2 , uint16_t )
254
350
SYNC_FETCH_OP (add , 4 , uint32_t )
255
351
352
+ SYNC_OP_FETCH (add , 1 , uint8_t )
353
+ SYNC_OP_FETCH (add , 2 , uint16_t )
354
+ SYNC_OP_FETCH (add , 4 , uint32_t )
355
+
256
356
SYNC_FETCH_OP (sub , 1 , uint8_t )
257
357
SYNC_FETCH_OP (sub , 2 , uint16_t )
258
358
SYNC_FETCH_OP (sub , 4 , uint32_t )
259
359
360
+ SYNC_OP_FETCH (sub , 1 , uint8_t )
361
+ SYNC_OP_FETCH (sub , 2 , uint16_t )
362
+ SYNC_OP_FETCH (sub , 4 , uint32_t )
363
+
260
364
SYNC_FETCH_OP (and , 1 , uint8_t )
261
365
SYNC_FETCH_OP (and , 2 , uint16_t )
262
366
SYNC_FETCH_OP (and , 4 , uint32_t )
263
367
368
+ SYNC_OP_FETCH (and , 1 , uint8_t )
369
+ SYNC_OP_FETCH (and , 2 , uint16_t )
370
+ SYNC_OP_FETCH (and , 4 , uint32_t )
371
+
264
372
SYNC_FETCH_OP (or , 1 , uint8_t )
265
373
SYNC_FETCH_OP (or , 2 , uint16_t )
266
374
SYNC_FETCH_OP (or , 4 , uint32_t )
267
375
376
+ SYNC_OP_FETCH (or , 1 , uint8_t )
377
+ SYNC_OP_FETCH (or , 2 , uint16_t )
378
+ SYNC_OP_FETCH (or , 4 , uint32_t )
379
+
268
380
SYNC_FETCH_OP (xor , 1 , uint8_t )
269
381
SYNC_FETCH_OP (xor , 2 , uint16_t )
270
382
SYNC_FETCH_OP (xor , 4 , uint32_t )
271
383
384
+ SYNC_OP_FETCH (xor , 1 , uint8_t )
385
+ SYNC_OP_FETCH (xor , 2 , uint16_t )
386
+ SYNC_OP_FETCH (xor , 4 , uint32_t )
387
+
388
+ SYNC_FETCH_OP (nand , 1 , uint8_t )
389
+ SYNC_FETCH_OP (nand , 2 , uint16_t )
390
+ SYNC_FETCH_OP (nand , 4 , uint32_t )
391
+
392
+ SYNC_OP_FETCH (nand , 1 , uint8_t )
393
+ SYNC_OP_FETCH (nand , 2 , uint16_t )
394
+ SYNC_OP_FETCH (nand , 4 , uint32_t )
395
+
272
396
SYNC_BOOL_CMP_EXCHANGE (1 , uint8_t )
273
397
SYNC_BOOL_CMP_EXCHANGE (2 , uint16_t )
274
398
SYNC_BOOL_CMP_EXCHANGE (4 , uint32_t )
@@ -313,6 +437,20 @@ FETCH_OR(8, uint64_t)
313
437
314
438
FETCH_XOR (8 , uint64_t )
315
439
440
+ FETCH_NAND (8 , uint64_t )
441
+
442
+ ADD_FETCH (8 , uint64_t )
443
+
444
+ SUB_FETCH (8 , uint64_t )
445
+
446
+ AND_FETCH (8 , uint64_t )
447
+
448
+ OR_FETCH (8 , uint64_t )
449
+
450
+ XOR_FETCH (8 , uint64_t )
451
+
452
+ NAND_FETCH (8 , uint64_t )
453
+
316
454
SYNC_FETCH_OP (add , 8 , uint64_t )
317
455
318
456
SYNC_FETCH_OP (sub , 8 , uint64_t )
@@ -323,6 +461,20 @@ SYNC_FETCH_OP(or, 8, uint64_t)
323
461
324
462
SYNC_FETCH_OP (xor , 8 , uint64_t )
325
463
464
+ SYNC_FETCH_OP (nand , 8 , uint64_t )
465
+
466
+ SYNC_OP_FETCH (add , 8 , uint64_t )
467
+
468
+ SYNC_OP_FETCH (sub , 8 , uint64_t )
469
+
470
+ SYNC_OP_FETCH (and , 8 , uint64_t )
471
+
472
+ SYNC_OP_FETCH (or , 8 , uint64_t )
473
+
474
+ SYNC_OP_FETCH (xor , 8 , uint64_t )
475
+
476
+ SYNC_OP_FETCH (nand , 8 , uint64_t )
477
+
326
478
SYNC_BOOL_CMP_EXCHANGE (8 , uint64_t )
327
479
328
480
SYNC_VAL_CMP_EXCHANGE (8 , uint64_t )
0 commit comments