Remove android_... store_barriers and 64-bit atomics.

These are no longer used, and we want to strongly discourage future use.
Keep the 32-bit variants while there are still uses.  All users should move
to C11 or C++11 atomics.

(Resolved conflicts in atomic-...64.h with uniprocessor support
removal as in AOSP.)

Bug:16880454

Change-Id: I122b541cfd29ef4a6c932647f85d0d6a9d802061
(cherry picked from commit 9959ed9530)
This commit is contained in:
Hans Boehm 2014-07-31 15:56:50 -07:00
parent 6a594683ec
commit cad56b6ad3
8 changed files with 0 additions and 262 deletions

View file

@ -37,15 +37,6 @@ extern ANDROID_ATOMIC_INLINE void android_memory_barrier()
#endif
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier()
{
#if ANDROID_SMP == 0
android_compiler_barrier();
#else
__asm__ __volatile__ ("dmb st" : : : "memory");
#endif
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
{

View file

@ -46,29 +46,11 @@ void android_compiler_barrier(void)
__asm__ __volatile__ ("" : : : "memory");
}
#if ANDROID_SMP == 0
extern ANDROID_ATOMIC_INLINE
void android_memory_barrier(void)
{
android_compiler_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#else
extern ANDROID_ATOMIC_INLINE
void android_memory_barrier(void)
{
__asm__ __volatile__ ("dmb ish" : : : "memory");
}
extern ANDROID_ATOMIC_INLINE
void android_memory_store_barrier(void)
{
__asm__ __volatile__ ("dmb ishst" : : : "memory");
}
#endif
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
@ -78,14 +60,6 @@ int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
return value;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_acquire_load64(volatile const int64_t *ptr)
{
int64_t value = *ptr;
android_memory_barrier();
return value;
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_release_load(volatile const int32_t *ptr)
{
@ -93,13 +67,6 @@ int32_t android_atomic_release_load(volatile const int32_t *ptr)
return *ptr;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_release_load64(volatile const int64_t *ptr)
{
android_memory_barrier();
return *ptr;
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
{
@ -107,13 +74,6 @@ void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
android_memory_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_acquire_store64(int64_t value, volatile int64_t *ptr)
{
*ptr = value;
android_memory_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
{
@ -121,13 +81,6 @@ void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
*ptr = value;
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_release_store64(int64_t value, volatile int64_t *ptr)
{
android_memory_barrier();
*ptr = value;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_cas(int32_t old_value, int32_t new_value,
volatile int32_t *ptr)
@ -135,13 +88,6 @@ int android_atomic_cas(int32_t old_value, int32_t new_value,
return __sync_val_compare_and_swap(ptr, old_value, new_value) != old_value;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
return __sync_val_compare_and_swap(ptr, old_value, new_value) != old_value;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
volatile int32_t *ptr)
@ -151,15 +97,6 @@ int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
return status;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_acquire_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
int status = android_atomic_cas64(old_value, new_value, ptr);
android_memory_barrier();
return status;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_release_cas(int32_t old_value, int32_t new_value,
volatile int32_t *ptr)
@ -168,14 +105,6 @@ int android_atomic_release_cas(int32_t old_value, int32_t new_value,
return android_atomic_cas(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_release_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
android_memory_barrier();
return android_atomic_cas64(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
{

View file

@ -65,12 +65,6 @@ extern "C" {
#define ANDROID_MEMBAR_FULL android_memory_barrier
#endif
#if ANDROID_SMP == 0
#define ANDROID_MEMBAR_STORE android_compiler_barrier
#else
#define ANDROID_MEMBAR_STORE android_memory_store_barrier
#endif
#ifdef __cplusplus
}
#endif

View file

@ -33,19 +33,11 @@ extern ANDROID_ATOMIC_INLINE void android_memory_barrier(void)
{
android_compiler_barrier();
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#else
extern ANDROID_ATOMIC_INLINE void android_memory_barrier(void)
{
__asm__ __volatile__ ("sync" : : : "memory");
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier(void)
{
__asm__ __volatile__ ("sync" : : : "memory");
}
#endif
extern ANDROID_ATOMIC_INLINE int32_t

View file

@ -28,25 +28,10 @@ extern ANDROID_ATOMIC_INLINE void android_compiler_barrier(void)
__asm__ __volatile__ ("" : : : "memory");
}
#if ANDROID_SMP == 0
extern ANDROID_ATOMIC_INLINE void android_memory_barrier(void)
{
android_compiler_barrier();
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#else
extern ANDROID_ATOMIC_INLINE void android_memory_barrier(void)
{
__asm__ __volatile__ ("sync" : : : "memory");
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier(void)
{
__asm__ __volatile__ ("sync" : : : "memory");
}
#endif
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
@ -56,14 +41,6 @@ int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
return value;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_acquire_load64(volatile const int64_t *ptr)
{
int64_t value = *ptr;
android_memory_barrier();
return value;
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_release_load(volatile const int32_t *ptr)
{
@ -71,13 +48,6 @@ int32_t android_atomic_release_load(volatile const int32_t *ptr)
return *ptr;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_release_load64(volatile const int64_t *ptr)
{
android_memory_barrier();
return *ptr;
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
{
@ -85,13 +55,6 @@ void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
android_memory_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_acquire_store64(int64_t value, volatile int64_t *ptr)
{
*ptr = value;
android_memory_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
{
@ -99,13 +62,6 @@ void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
*ptr = value;
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_release_store64(int64_t value, volatile int64_t *ptr)
{
android_memory_barrier();
*ptr = value;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_cas(int32_t old_value, int32_t new_value, volatile int32_t *ptr)
{
@ -125,13 +81,6 @@ int android_atomic_cas(int32_t old_value, int32_t new_value, volatile int32_t *p
return prev != old_value;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
return __sync_val_compare_and_swap(ptr, old_value, new_value) != old_value;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_acquire_cas(int32_t old_value,
int32_t new_value,
@ -142,15 +91,6 @@ int android_atomic_acquire_cas(int32_t old_value,
return status;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_acquire_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
int status = android_atomic_cas64(old_value, new_value, ptr);
android_memory_barrier();
return status;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_release_cas(int32_t old_value,
int32_t new_value,
@ -160,14 +100,6 @@ int android_atomic_release_cas(int32_t old_value,
return android_atomic_cas(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_release_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
android_memory_barrier();
return android_atomic_cas64(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
{

View file

@ -33,19 +33,11 @@ extern ANDROID_ATOMIC_INLINE void android_memory_barrier(void)
{
android_compiler_barrier();
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#else
extern ANDROID_ATOMIC_INLINE void android_memory_barrier(void)
{
__asm__ __volatile__ ("mfence" : : : "memory");
}
extern ANDROID_ATOMIC_INLINE void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#endif
extern ANDROID_ATOMIC_INLINE int32_t

View file

@ -41,29 +41,11 @@ void android_compiler_barrier(void)
__asm__ __volatile__ ("" : : : "memory");
}
#if ANDROID_SMP == 0
extern ANDROID_ATOMIC_INLINE
void android_memory_barrier(void)
{
android_compiler_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#else
extern ANDROID_ATOMIC_INLINE
void android_memory_barrier(void)
{
__asm__ __volatile__ ("mfence" : : : "memory");
}
extern ANDROID_ATOMIC_INLINE
void android_memory_store_barrier(void)
{
android_compiler_barrier();
}
#endif
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
@ -73,14 +55,6 @@ int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
return value;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_acquire_load64(volatile const int64_t *ptr)
{
int64_t value = *ptr;
android_compiler_barrier();
return value;
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_release_load(volatile const int32_t *ptr)
{
@ -88,13 +62,6 @@ int32_t android_atomic_release_load(volatile const int32_t *ptr)
return *ptr;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_release_load64(volatile const int64_t *ptr)
{
android_memory_barrier();
return *ptr;
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
{
@ -102,13 +69,6 @@ void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
android_memory_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_acquire_store64(int64_t value, volatile int64_t *ptr)
{
*ptr = value;
android_memory_barrier();
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
{
@ -116,13 +76,6 @@ void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
*ptr = value;
}
extern ANDROID_ATOMIC_INLINE
void android_atomic_release_store64(int64_t value, volatile int64_t *ptr)
{
android_compiler_barrier();
*ptr = value;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_cas(int32_t old_value, int32_t new_value,
volatile int32_t *ptr)
@ -135,18 +88,6 @@ int android_atomic_cas(int32_t old_value, int32_t new_value,
return prev != old_value;
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
int64_t prev;
__asm__ __volatile__ ("lock; cmpxchgq %1, %2"
: "=a" (prev)
: "q" (new_value), "m" (*ptr), "0" (old_value)
: "memory");
return prev != old_value;
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
volatile int32_t *ptr)
@ -155,14 +96,6 @@ int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
return android_atomic_cas(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_acquire_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
/* Loads are not reordered with other loads. */
return android_atomic_cas64(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int android_atomic_release_cas(int32_t old_value, int32_t new_value,
volatile int32_t *ptr)
@ -171,14 +104,6 @@ int android_atomic_release_cas(int32_t old_value, int32_t new_value,
return android_atomic_cas(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int64_t android_atomic_release_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr)
{
/* Stores are not reordered with other stores. */
return android_atomic_cas64(old_value, new_value, ptr);
}
extern ANDROID_ATOMIC_INLINE
int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
{

View file

@ -103,11 +103,6 @@ int32_t android_atomic_or(int32_t value, volatile int32_t* addr);
int32_t android_atomic_acquire_load(volatile const int32_t* addr);
int32_t android_atomic_release_load(volatile const int32_t* addr);
#if defined (__LP64__)
int64_t android_atomic_acquire_load64(volatile const int64_t* addr);
int64_t android_atomic_release_load64(volatile const int64_t* addr);
#endif
/*
* Perform an atomic store with "acquire" or "release" ordering.
*
@ -125,11 +120,6 @@ int64_t android_atomic_release_load64(volatile const int64_t* addr);
void android_atomic_acquire_store(int32_t value, volatile int32_t* addr);
void android_atomic_release_store(int32_t value, volatile int32_t* addr);
#if defined (__LP64__)
void android_atomic_acquire_store64(int64_t value, volatile int64_t* addr);
void android_atomic_release_store64(int64_t value, volatile int64_t* addr);
#endif
/*
* Compare-and-set operation with "acquire" or "release" ordering.
*
@ -147,13 +137,6 @@ int android_atomic_acquire_cas(int32_t oldvalue, int32_t newvalue,
int android_atomic_release_cas(int32_t oldvalue, int32_t newvalue,
volatile int32_t* addr);
#if defined (__LP64__)
int64_t android_atomic_acquire_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr);
int64_t android_atomic_release_cas64(int64_t old_value, int64_t new_value,
volatile int64_t *ptr);
#endif
/*
* Aliases for code using an older version of this header. These are now
* deprecated and should not be used. The definitions will be removed