my_atomic.h 9.45 KB
Newer Older
1 2 3
#ifndef MY_ATOMIC_INCLUDED
#define MY_ATOMIC_INCLUDED

unknown's avatar
unknown committed
4 5 6 7
/* Copyright (C) 2006 MySQL AB

   This program is free software; you can redistribute it and/or modify
   it under the terms of the GNU General Public License as published by
unknown's avatar
unknown committed
8
   the Free Software Foundation; version 2 of the License.
unknown's avatar
unknown committed
9 10 11 12 13 14 15 16 17 18

   This program is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   GNU General Public License for more details.

   You should have received a copy of the GNU General Public License
   along with this program; if not, write to the Free Software
   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA */

19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
/*
  This header defines five atomic operations:

  my_atomic_add#(&var, what)
    add 'what' to *var, and return the old value of *var

  my_atomic_fas#(&var, what)
    'Fetch And Store'
    store 'what' in *var, and return the old value of *var

  my_atomic_cas#(&var, &old, new)
    'Compare And Swap'
    if *var is equal to *old, then store 'new' in *var, and return TRUE
    otherwise store *var in *old, and return FALSE

  my_atomic_load#(&var)
    return *var

  my_atomic_store#(&var, what)
    store 'what' in *var

40
  '#' is substituted by a size suffix - 8, 16, 32, 64, or ptr
41 42 43 44 45 46 47 48 49 50 51 52 53 54
  (e.g. my_atomic_add8, my_atomic_fas32, my_atomic_casptr).

  NOTE This operations are not always atomic, so they always must be
  enclosed in my_atomic_rwlock_rdlock(lock)/my_atomic_rwlock_rdunlock(lock)
  or my_atomic_rwlock_wrlock(lock)/my_atomic_rwlock_wrunlock(lock).
  Hint: if a code block makes intensive use of atomic ops, it make sense
  to take/release rwlock once for the whole block, not for every statement.

  On architectures where these operations are really atomic, rwlocks will
  be optimized away.
  8- and 16-bit atomics aren't implemented for windows (see generic-msvc.h),
  but can be added, if necessary. 
*/

unknown's avatar
unknown committed
55
#ifndef my_atomic_rwlock_init
unknown's avatar
unknown committed
56

unknown's avatar
unknown committed
57
#define intptr         void *
58
/**
59 60
  Currently we don't support 8-bit and 16-bit operations.
  It can be added later if needed.
61
*/
62
#undef MY_ATOMIC_HAS_8_16
unknown's avatar
unknown committed
63 64

#ifndef MY_ATOMIC_MODE_RWLOCKS
65 66 67
/*
 * Attempt to do atomic ops without locks
 */
unknown's avatar
unknown committed
68 69 70
#include "atomic/nolock.h"
#endif

Mikael Ronstrom's avatar
Mikael Ronstrom committed
71
#ifndef MY_ATOMIC_NOLOCK
72
/* nolock.h was not able to generate even a CAS function, fall back */
unknown's avatar
unknown committed
73
#include "atomic/rwlock.h"
Mikael Ronstrom's avatar
Mikael Ronstrom committed
74
#endif
Mikael Ronstrom's avatar
Mikael Ronstrom committed
75 76

#ifndef MY_ATOMICS_MADE
77
/* define missing functions by using the already generated ones */
78
#ifndef make_atomic_add_body
79
#define make_atomic_add_body(S)                                 \
80 81 82 83
  int ## S tmp=*a;                                              \
  while (!my_atomic_cas ## S(a, &tmp, tmp+v));                  \
  v=tmp;
#endif
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
#ifndef make_atomic_fas_body
#define make_atomic_fas_body(S)                                 \
  int ## S tmp=*a;                                              \
  while (!my_atomic_cas ## S(a, &tmp, v));                      \
  v=tmp;
#endif
#ifndef make_atomic_load_body
#define make_atomic_load_body(S)                                \
  ret= 0; /* avoid compiler warning */                          \
  (void)(my_atomic_cas ## S(a, &ret, ret));
#endif
#ifndef make_atomic_store_body
#define make_atomic_store_body(S)                               \
  (void)(my_atomic_fas ## S (a, v));
#endif

/*
  transparent_union doesn't work in g++
  Bug ?

  Darwin's gcc doesn't want to put pointers in a transparent_union
  when built with -arch ppc64. Complains:
  warning: 'transparent_union' attribute ignored
*/
#if defined(__GNUC__) && !defined(__cplusplus) && \
      ! (defined(__APPLE__) && defined(_ARCH_PPC64))
/*
  we want to be able to use my_atomic_xxx functions with
  both signed and unsigned integers. But gcc will issue a warning
  "passing arg N of `my_atomic_XXX' as [un]signed due to prototype"
  if the signedness of the argument doesn't match the prototype, or
  "pointer targets in passing argument N of my_atomic_XXX differ in signedness"
  if int* is used where uint* is expected (or vice versa).
  Let's shut these warnings up
*/
#define make_transparent_unions(S)                              \
        typedef union {                                         \
          int  ## S  i;                                         \
          uint ## S  u;                                         \
        } U_ ## S   __attribute__ ((transparent_union));        \
        typedef union {                                         \
          int  ## S volatile *i;                                \
          uint ## S volatile *u;                                \
        } Uv_ ## S   __attribute__ ((transparent_union));
#define uintptr intptr
make_transparent_unions(8)
make_transparent_unions(16)
make_transparent_unions(32)
132
make_transparent_unions(64)
133 134 135 136 137 138 139 140 141 142 143
make_transparent_unions(ptr)
#undef uintptr
#undef make_transparent_unions
#define a       U_a.i
#define cmp     U_cmp.i
#define v       U_v.i
#define set     U_set.i
#else
#define U_8    int8
#define U_16   int16
#define U_32   int32
144
#define U_64   int64
145 146 147 148
#define U_ptr  intptr
#define Uv_8   int8
#define Uv_16  int16
#define Uv_32  int32
149
#define Uv_64  int64
150 151 152 153 154 155
#define Uv_ptr intptr
#define U_a    volatile *a
#define U_cmp  *cmp
#define U_v    v
#define U_set  set
#endif /* __GCC__ transparent_union magic */
156

unknown's avatar
unknown committed
157 158
#ifdef HAVE_INLINE

159 160 161 162 163 164 165
#define make_atomic_cas(S)                                      \
STATIC_INLINE int my_atomic_cas ## S(Uv_ ## S U_a,              \
                            Uv_ ## S U_cmp, U_ ## S U_set)      \
{                                                               \
  int8 ret;                                                     \
  make_atomic_cas_body(S);                                      \
  return ret;                                                   \
unknown's avatar
unknown committed
166 167
}

168 169 170 171 172 173
#define make_atomic_add(S)                                      \
STATIC_INLINE int ## S my_atomic_add ## S(                      \
                        Uv_ ## S U_a, U_ ## S U_v)              \
{                                                               \
  make_atomic_add_body(S);                                      \
  return v;                                                     \
unknown's avatar
unknown committed
174 175
}

176 177 178 179 180 181
#define make_atomic_fas(S)                                      \
STATIC_INLINE int ## S my_atomic_fas ## S(                      \
                         Uv_ ## S U_a, U_ ## S U_v)             \
{                                                               \
  make_atomic_fas_body(S);                                      \
  return v;                                                     \
unknown's avatar
unknown committed
182 183
}

184 185 186 187 188 189
#define make_atomic_load(S)                                     \
STATIC_INLINE int ## S my_atomic_load ## S(Uv_ ## S U_a)        \
{                                                               \
  int ## S ret;                                                 \
  make_atomic_load_body(S);                                     \
  return ret;                                                   \
unknown's avatar
unknown committed
190 191
}

192 193 194 195 196
#define make_atomic_store(S)                                    \
STATIC_INLINE void my_atomic_store ## S(                        \
                     Uv_ ## S U_a, U_ ## S U_v)                 \
{                                                               \
  make_atomic_store_body(S);                                    \
unknown's avatar
unknown committed
197 198 199 200
}

#else /* no inline functions */

201 202
#define make_atomic_add(S)                                      \
extern int ## S my_atomic_add ## S(Uv_ ## S U_a, U_ ## S U_v);
unknown's avatar
unknown committed
203

204 205
#define make_atomic_fas(S)                                      \
extern int ## S my_atomic_fas ## S(Uv_ ## S U_a, U_ ## S U_v);
unknown's avatar
unknown committed
206

207 208
#define make_atomic_cas(S)                                      \
extern int my_atomic_cas ## S(Uv_ ## S U_a, Uv_ ## S U_cmp, U_ ## S U_set);
unknown's avatar
unknown committed
209

210 211
#define make_atomic_load(S)                                     \
extern int ## S my_atomic_load ## S(Uv_ ## S U_a);
unknown's avatar
unknown committed
212

213 214
#define make_atomic_store(S)                                    \
extern void my_atomic_store ## S(Uv_ ## S U_a, U_ ## S U_v);
unknown's avatar
unknown committed
215

216
#endif /* HAVE_INLINE */
unknown's avatar
unknown committed
217

218 219
#ifdef MY_ATOMIC_HAS_8_16
make_atomic_cas(8)
unknown's avatar
unknown committed
220
make_atomic_cas(16)
221
#endif
unknown's avatar
unknown committed
222
make_atomic_cas(32)
223
make_atomic_cas(64)
unknown's avatar
unknown committed
224 225
make_atomic_cas(ptr)

226 227
#ifdef MY_ATOMIC_HAS_8_16
make_atomic_add(8)
228
make_atomic_add(16)
229
#endif
230
make_atomic_add(32)
231
make_atomic_add(64)
232

233 234
#ifdef MY_ATOMIC_HAS_8_16
make_atomic_load(8)
unknown's avatar
unknown committed
235
make_atomic_load(16)
236
#endif
unknown's avatar
unknown committed
237
make_atomic_load(32)
238
make_atomic_load(64)
unknown's avatar
unknown committed
239 240
make_atomic_load(ptr)

241 242 243 244 245
#ifdef MY_ATOMIC_HAS_8_16
make_atomic_fas(8)
make_atomic_fas(16)
#endif
make_atomic_fas(32)
246
make_atomic_fas(64)
247 248 249 250
make_atomic_fas(ptr)

#ifdef MY_ATOMIC_HAS_8_16
make_atomic_store(8)
unknown's avatar
unknown committed
251
make_atomic_store(16)
252
#endif
unknown's avatar
unknown committed
253
make_atomic_store(32)
254
make_atomic_store(64)
unknown's avatar
unknown committed
255 256
make_atomic_store(ptr)

257 258 259 260
#ifdef _atomic_h_cleanup_
#include _atomic_h_cleanup_
#undef _atomic_h_cleanup_
#endif
unknown's avatar
unknown committed
261

262 263 264
#undef U_8
#undef U_16
#undef U_32
265
#undef U_64
266 267 268 269
#undef U_ptr
#undef Uv_8
#undef Uv_16
#undef Uv_32
270
#undef Uv_64
271 272 273 274 275 276 277 278 279
#undef Uv_ptr
#undef a
#undef cmp
#undef v
#undef set
#undef U_a
#undef U_cmp
#undef U_v
#undef U_set
unknown's avatar
unknown committed
280 281 282 283
#undef make_atomic_add
#undef make_atomic_cas
#undef make_atomic_load
#undef make_atomic_store
284
#undef make_atomic_fas
285 286 287 288
#undef make_atomic_add_body
#undef make_atomic_cas_body
#undef make_atomic_load_body
#undef make_atomic_store_body
289
#undef make_atomic_fas_body
Mikael Ronstrom's avatar
Mikael Ronstrom committed
290
#endif
291
#undef intptr
unknown's avatar
unknown committed
292

293 294 295 296 297 298 299
/*
  the macro below defines (as an expression) the code that
  will be run in spin-loops. Intel manuals recummend to have PAUSE there.
  It is expected to be defined in include/atomic/ *.h files
*/
#ifndef LF_BACKOFF
#define LF_BACKOFF (1)
unknown's avatar
unknown committed
300 301
#endif

unknown's avatar
unknown committed
302 303 304 305 306 307
#define MY_ATOMIC_OK       0
#define MY_ATOMIC_NOT_1CPU 1
extern int my_atomic_initialize();

#endif

308
#endif /* MY_ATOMIC_INCLUDED */