@ -19,11 +19,9 @@
# define PORT_ATOMIC_POINTER_H_
# include <stdint.h>
# ifdef LEVELDB_ATOMIC_PRESENT
# include <atomic>
# elif defined(__APPLE__)
# include <libkern/OSAtomic.h>
# endif
# ifdef OS_WIN
# include <windows.h>
# endif
@ -55,11 +53,7 @@ namespace port {
/ / Mac OS
# elif defined(__APPLE__)
inline void MemoryBarrier ( ) {
# if defined(LEVELDB_ATOMIC_PRESENT)
std : : atomic_thread_fence ( std : : memory_order_seq_cst ) ;
# else
OSMemoryBarrier ( ) ;
# endif / / defined(LEVELDB_ATOMIC_PRESENT)
}
# define LEVELDB_HAVE_MEMORY_BARRIER
@ -124,7 +118,7 @@ inline void MemoryBarrier() {
# endif
/ / AtomicPointer built using platform - specific MemoryBarrier ( )
/ / AtomicPointer built using platform - specific MemoryBarrier ( ) .
# if defined(LEVELDB_HAVE_MEMORY_BARRIER)
class AtomicPointer {
private :
@ -145,8 +139,8 @@ class AtomicPointer {
}
} ;
/ / AtomicPointer based on < cstd atomic>
# elif defined(LEVELDB_ATOMIC_PRESENT)
/ / AtomicPointer based on C + + 11 < atomic > .
# else
class AtomicPointer {
private :
std : : atomic < void * > rep_ ;
@ -167,70 +161,6 @@ class AtomicPointer {
}
} ;
/ / Atomic pointer based on sparc memory barriers
# elif defined(__sparcv9) && defined(__GNUC__)
class AtomicPointer {
private :
void * rep_ ;
public :
AtomicPointer ( ) { }
explicit AtomicPointer ( void * v ) : rep_ ( v ) { }
inline void * Acquire_Load ( ) const {
void * val ;
__asm__ __volatile__ (
" ldx [%[rep_]], %[val] \n \t "
" membar #LoadLoad|#LoadStore \n \t "
: [ val ] " =r " ( val )
: [ rep_ ] " r " ( & rep_ )
: " memory " ) ;
return val ;
}
inline void Release_Store ( void * v ) {
__asm__ __volatile__ (
" membar #LoadStore|#StoreStore \n \t "
" stx %[v], [%[rep_]] \n \t "
:
: [ rep_ ] " r " ( & rep_ ) , [ v ] " r " ( v )
: " memory " ) ;
}
inline void * NoBarrier_Load ( ) const { return rep_ ; }
inline void NoBarrier_Store ( void * v ) { rep_ = v ; }
} ;
/ / Atomic pointer based on ia64 acq / rel
# elif defined(__ia64) && defined(__GNUC__)
class AtomicPointer {
private :
void * rep_ ;
public :
AtomicPointer ( ) { }
explicit AtomicPointer ( void * v ) : rep_ ( v ) { }
inline void * Acquire_Load ( ) const {
void * val ;
__asm__ __volatile__ (
" ld8.acq %[val] = [%[rep_]] \n \t "
: [ val ] " =r " ( val )
: [ rep_ ] " r " ( & rep_ )
: " memory "
) ;
return val ;
}
inline void Release_Store ( void * v ) {
__asm__ __volatile__ (
" st8.rel [%[rep_]] = %[v] \n \t "
:
: [ rep_ ] " r " ( & rep_ ) , [ v ] " r " ( v )
: " memory "
) ;
}
inline void * NoBarrier_Load ( ) const { return rep_ ; }
inline void NoBarrier_Store ( void * v ) { rep_ = v ; }
} ;
/ / We have neither MemoryBarrier ( ) , nor < atomic >
# else
# error Please implement AtomicPointer for this platform.
# endif
# undef LEVELDB_HAVE_MEMORY_BARRIER