00001
00002
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #ifndef __L4UTIL__INCLUDE__ARCH_AMD64__ATOMIC_ARCH_H__
00029 #define __L4UTIL__INCLUDE__ARCH_AMD64__ATOMIC_ARCH_H__
00030
00031
00032
00033
00034
00035 EXTERN_C_BEGIN
00036
00037 L4_INLINE l4_uint64_t
00038 l4util_xchg64(volatile l4_uint64_t * dest, l4_uint64_t val);
00039
00040 L4_INLINE l4_uint64_t
00041 l4util_cmpxchg64_res(volatile l4_uint64_t *dest,
00042 l4_uint64_t cmp_val, l4_uint64_t new_val);
00043 L4_INLINE void
00044 l4util_add64(volatile l4_uint64_t *dest, l4_uint64_t val);
00045 L4_INLINE void
00046 l4util_sub64(volatile l4_uint64_t *dest, l4_uint64_t val);
00047 L4_INLINE void
00048 l4util_and64(volatile l4_uint64_t *dest, l4_uint64_t val);
00049 L4_INLINE void
00050 l4util_or64(volatile l4_uint64_t *dest, l4_uint64_t val);
00051 L4_INLINE l4_uint64_t
00052 l4util_add64_res(volatile l4_uint64_t *dest, l4_uint64_t val);
00053 L4_INLINE l4_uint64_t
00054 l4util_sub64_res(volatile l4_uint64_t *dest, l4_uint64_t val);
00055 L4_INLINE l4_uint64_t
00056 l4util_and64_res(volatile l4_uint64_t *dest, l4_uint64_t val);
00057 L4_INLINE l4_uint64_t
00058 l4util_or64_res(volatile l4_uint64_t *dest, l4_uint64_t val);
00059 L4_INLINE void
00060 l4util_inc64(volatile l4_uint64_t *dest);
00061 L4_INLINE void
00062 l4util_dec64(volatile l4_uint64_t *dest);
00063 L4_INLINE l4_uint64_t
00064 l4util_inc64_res(volatile l4_uint64_t *dest);
00065 L4_INLINE l4_uint64_t
00066 l4util_dec64_res(volatile l4_uint64_t *dest);
00067
00068 EXTERN_C_END
00069
00070
00071
00072
00073
00074
00075 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG64
00076 L4_INLINE int
00077 l4util_cmpxchg64(volatile l4_uint64_t * dest,
00078 l4_uint64_t cmp_val, l4_uint64_t new_val)
00079 {
00080 l4_uint64_t tmp;
00081
00082 __asm__ __volatile__
00083 (
00084 "cmpxchgq %1, %3 \n\t"
00085 :
00086 "=a" (tmp)
00087 :
00088 "r" (new_val),
00089 "0" (cmp_val),
00090 "m" (*dest)
00091 :
00092 "memory", "cc"
00093 );
00094
00095 return tmp == cmp_val;
00096 }
00097
00098
00099 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG32
00100 L4_INLINE int
00101 l4util_cmpxchg32(volatile l4_uint32_t * dest,
00102 l4_uint32_t cmp_val, l4_uint32_t new_val)
00103 {
00104 l4_uint32_t tmp;
00105
00106 __asm__ __volatile__
00107 (
00108 "cmpxchgl %1, %3 \n\t"
00109 :
00110 "=a" (tmp)
00111 :
00112 "r" (new_val),
00113 "0" (cmp_val),
00114 "m" (*dest)
00115 :
00116 "memory", "cc"
00117 );
00118
00119 return tmp == cmp_val;
00120 }
00121
00122
00123 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG16
00124 L4_INLINE int
00125 l4util_cmpxchg16(volatile l4_uint16_t * dest,
00126 l4_uint16_t cmp_val, l4_uint16_t new_val)
00127 {
00128 l4_uint16_t tmp;
00129
00130 __asm__ __volatile__
00131 (
00132 "cmpxchgw %1, %3 \n\t"
00133 :
00134 "=a" (tmp)
00135 :
00136 "c" (new_val),
00137 "0" (cmp_val),
00138 "m" (*dest)
00139 :
00140 "memory", "cc"
00141 );
00142
00143 return tmp == cmp_val;
00144 }
00145
00146
00147 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG8
00148 L4_INLINE int
00149 l4util_cmpxchg8(volatile l4_uint8_t * dest,
00150 l4_uint8_t cmp_val, l4_uint8_t new_val)
00151 {
00152 l4_uint8_t tmp;
00153
00154 __asm__ __volatile__
00155 (
00156 "cmpxchgb %1, %3 \n\t"
00157 :
00158 "=a" (tmp)
00159 :
00160 "c" (new_val),
00161 "0" (cmp_val),
00162 "m" (*dest)
00163 :
00164 "memory", "cc"
00165 );
00166
00167 return tmp == cmp_val;
00168 }
00169
00170 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG
00171 L4_INLINE int
00172 l4util_cmpxchg(volatile l4_umword_t * dest,
00173 l4_umword_t cmp_val, l4_umword_t new_val)
00174 {
00175 return l4util_cmpxchg64((volatile l4_uint64_t *)dest,
00176 (l4_uint64_t)cmp_val, (l4_uint64_t)new_val);
00177 }
00178
00179
00180 #define __L4UTIL_ATOMIC_HAVE_ARCH_XCHG64
00181 L4_INLINE l4_uint64_t
00182 l4util_xchg64(volatile l4_uint64_t * dest, l4_uint64_t val)
00183 {
00184 __asm__ __volatile__
00185 (
00186 "xchg %0, %1 \n\t"
00187 :
00188 "=r" (val)
00189 :
00190 "m" (*dest), "0" (val)
00191 :
00192 "memory"
00193 );
00194
00195 return val;
00196 }
00197
00198
00199 #define __L4UTIL_ATOMIC_HAVE_ARCH_XCHG32
00200 L4_INLINE l4_uint32_t
00201 l4util_xchg32(volatile l4_uint32_t * dest, l4_uint32_t val)
00202 {
00203 __asm__ __volatile__
00204 (
00205 "xchg %0, %1 \n\t"
00206 :
00207 "=r" (val)
00208 :
00209 "m" (*dest), "0" (val)
00210 :
00211 "memory"
00212 );
00213
00214 return val;
00215 }
00216
00217
00218 #define __L4UTIL_ATOMIC_HAVE_ARCH_XCHG16
00219 L4_INLINE l4_uint16_t
00220 l4util_xchg16(volatile l4_uint16_t * dest, l4_uint16_t val)
00221 {
00222 __asm__ __volatile__
00223 (
00224 "xchg %w0, %1 \n\t"
00225 :
00226 "=r" (val)
00227 :
00228 "m" (*dest), "0" (val)
00229 :
00230 "memory"
00231 );
00232
00233 return val;
00234 }
00235
00236
00237 #define __L4UTIL_ATOMIC_HAVE_ARCH_XCHG8
00238 L4_INLINE l4_uint8_t
00239 l4util_xchg8(volatile l4_uint8_t * dest, l4_uint8_t val)
00240 {
00241 __asm__ __volatile__
00242 (
00243 "xchg %b0, %1 \n\t"
00244 :
00245 "=r" (val)
00246 :
00247 "m" (*dest), "0" (val)
00248 :
00249 "memory"
00250 );
00251
00252 return val;
00253 }
00254
00255
00256 #define __L4UTIL_ATOMIC_HAVE_ARCH_XCHG
00257 L4_INLINE l4_umword_t
00258 l4util_xchg(volatile l4_umword_t * dest, l4_umword_t val)
00259 {
00260 return l4util_xchg64((volatile l4_uint64_t *)dest, (l4_uint64_t)val);
00261 }
00262
00263 #define l4util_gen_allop(args...) \
00264 l4util_genop( 8,"b", "", args) \
00265 l4util_genop(16,"w", "", args) \
00266 l4util_genop(32,"l", "", args) \
00267 l4util_genop(64,"q", "", args)
00268
00269 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG8_RES
00270 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG16_RES
00271 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG32_RES
00272 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG64_RES
00273
00274 #undef l4util_genop
00275 #define l4util_genop(bit, mod, op1, opname, opchar) \
00276 L4_INLINE l4_uint##bit##_t \
00277 l4util_##opname##bit##_res(volatile l4_uint##bit##_t* dest, \
00278 l4_uint##bit##_t cmp_val, \
00279 l4_uint##bit##_t new_val) \
00280 { \
00281 l4_uint##bit##_t old_val; \
00282 __asm__ __volatile__ \
00283 ( \
00284 #opname mod " %"op1"1,%3 \n\t" \
00285 : \
00286 "=a"(old_val) \
00287 : \
00288 "r"(new_val), "a"(cmp_val), "m" (*dest) \
00289 : \
00290 "memory" \
00291 ); \
00292 return old_val; \
00293 }
00294 l4util_gen_allop(cmpxchg,cmpxchg)
00295
00296 #define __L4UTIL_ATOMIC_HAVE_ARCH_CMPXCHG_RES
00297 L4_INLINE l4_umword_t
00298 l4util_cmpxchg_res(volatile l4_umword_t *dest,
00299 l4_umword_t cmp_val, l4_umword_t new_val)
00300 {
00301 return l4util_cmpxchg64_res((volatile l4_uint64_t *)dest,
00302 (l4_uint64_t)cmp_val, (l4_uint64_t)new_val);
00303 }
00304
00305 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD8
00306 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD16
00307 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD32
00308 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD64
00309 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB8
00310 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB16
00311 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB32
00312 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB64
00313 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND8
00314 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND16
00315 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND32
00316 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND64
00317 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR8
00318 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR16
00319 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR32
00320 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR64
00321
00322 #undef l4util_genop
00323 #define l4util_genop(bit, mod, op1, opname) \
00324 L4_INLINE void \
00325 l4util_##opname##bit(volatile l4_uint##bit##_t* dest, l4_uint##bit##_t val) \
00326 { \
00327 __asm__ __volatile__ \
00328 ( \
00329 #opname mod " %1,%0 \n\t" \
00330 : \
00331 : \
00332 "m" (*dest), "ir" (val) \
00333 : \
00334 "memory" \
00335 ); \
00336 }
00337 l4util_gen_allop(add)
00338 l4util_gen_allop(sub)
00339 l4util_gen_allop(and)
00340 l4util_gen_allop(or)
00341
00342 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD8_RES
00343 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD16_RES
00344 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD32_RES
00345 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD64_RES
00346 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB8_RES
00347 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB16_RES
00348 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB32_RES
00349 #define __L4UTIL_ATOMIC_HAVE_ARCH_SUB64_RES
00350 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND8_RES
00351 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND16_RES
00352 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND32_RES
00353 #define __L4UTIL_ATOMIC_HAVE_ARCH_AND64_RES
00354 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR8_RES
00355 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR16_RES
00356 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR32_RES
00357 #define __L4UTIL_ATOMIC_HAVE_ARCH_OR64_RES
00358
00359 #undef l4util_genop
00360 #define l4util_genop(bit, mod, op1, opname, opchar) \
00361 L4_INLINE l4_uint##bit##_t \
00362 l4util_##opname##bit##_res(volatile l4_uint##bit##_t* dest, \
00363 l4_uint##bit##_t val) \
00364 { \
00365 l4_uint##bit##_t res, old; \
00366 \
00367 do \
00368 { \
00369 old = *dest; \
00370 res = old opchar val; \
00371 } \
00372 while (!l4util_cmpxchg##bit(dest, old, res)); \
00373 \
00374 return res; \
00375 }
00376 l4util_gen_allop(add, +)
00377 l4util_gen_allop(sub, -)
00378 l4util_gen_allop(and, &)
00379 l4util_gen_allop(or, &&)
00380
00381 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC8
00382 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC16
00383 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC32
00384 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC64
00385 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC8
00386 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC16
00387 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC32
00388 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC64
00389
00390 #undef l4util_genop
00391 #define l4util_genop(bit, mod, op1, opname) \
00392 L4_INLINE void \
00393 l4util_##opname##bit(volatile l4_uint##bit##_t* dest) \
00394 { \
00395 __asm__ __volatile__ \
00396 ( \
00397 #opname mod " %0 \n\t" \
00398 : \
00399 : \
00400 "m" (*dest) \
00401 : \
00402 "memory" \
00403 ); \
00404 }
00405 l4util_gen_allop(inc)
00406 l4util_gen_allop(dec)
00407
00408 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC8_RES
00409 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC16_RES
00410 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC32_RES
00411 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC64_RES
00412 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC8_RES
00413 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC16_RES
00414 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC32_RES
00415 #define __L4UTIL_ATOMIC_HAVE_ARCH_DEC64_RES
00416
00417 #undef l4util_genop
00418 #define l4util_genop(bit, mod, op1, opname, opchar) \
00419 L4_INLINE l4_uint##bit##_t \
00420 l4util_##opname##bit##_res(volatile l4_uint##bit##_t* dest) \
00421 { \
00422 l4_uint##bit##_t res, old; \
00423 \
00424 do \
00425 { \
00426 res = *dest; \
00427 old = res opchar; \
00428 } \
00429 while (!l4util_cmpxchg##bit(dest, old, res)); \
00430 \
00431 return res; \
00432 }
00433 l4util_gen_allop(inc, ++)
00434 l4util_gen_allop(dec, --)
00435
00436 #undef l4util_genop
00437 #undef l4util_gen_allop
00438
00439
00440 #define __L4UTIL_ATOMIC_HAVE_ARCH_ADD
00441 L4_INLINE void
00442 l4util_atomic_add(volatile long *dest, long val)
00443 {
00444 __asm__ __volatile__("addq %1, %0 \n"
00445 : "=m" (*dest)
00446 : "ri" (val), "m" (*dest));
00447 }
00448
00449 #define __L4UTIL_ATOMIC_HAVE_ARCH_INC
00450 L4_INLINE void
00451 l4util_atomic_inc(volatile long *dest)
00452 {
00453 __asm__ __volatile__("incq %0"
00454 : "=m" (*dest)
00455 : "m" (*dest));
00456 }
00457
00458 #endif