summaryrefslogtreecommitdiffstats
path: root/vm/Atomic.h
diff options
context:
space:
mode:
authorAndy McFadden <fadden@android.com>2010-05-19 22:36:33 -0700
committerAndy McFadden <fadden@android.com>2010-05-20 21:40:02 -0700
commit6a877083893fb466ebb6ee6bcbdc5f1ca8609cb1 (patch)
tree1f0ced804a916abd7a45191d803bc70f150f084e /vm/Atomic.h
parente54a4c581459bf899b285506aafd577a396b9671 (diff)
downloadandroid_dalvik-6a877083893fb466ebb6ee6bcbdc5f1ca8609cb1.tar.gz
android_dalvik-6a877083893fb466ebb6ee6bcbdc5f1ca8609cb1.tar.bz2
android_dalvik-6a877083893fb466ebb6ee6bcbdc5f1ca8609cb1.zip
Atomic/SMP update.
Moved quasiatomic 64-bit operations in here. They still need work. Use the new libcutils atomic-inline header for our memory barriers. Adjust makefiles to set SMP definition appropriately. Change-Id: Id2ab2123724bb0baeb32f862b5568392aba35a59
Diffstat (limited to 'vm/Atomic.h')
-rw-r--r--vm/Atomic.h40
1 files changed, 27 insertions, 13 deletions
diff --git a/vm/Atomic.h b/vm/Atomic.h
index bc0203c10..aa2f10318 100644
--- a/vm/Atomic.h
+++ b/vm/Atomic.h
@@ -13,31 +13,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
/*
* Atomic operations
*/
#ifndef _DALVIK_ATOMIC
#define _DALVIK_ATOMIC
-#include <utils/Atomic.h> /* use common Android atomic ops */
+#include <cutils/atomic.h> /* use common Android atomic ops */
+#include <cutils/atomic-inline.h> /* and some uncommon ones */
/*
- * Memory barrier. Guarantee that register-resident variables
- * are flushed to memory, and guarantee that instructions before
- * the barrier do not get reordered to appear past it.
- *
- * 'asm volatile ("":::"memory")' is probably overkill, but it's correct.
- * There may be a way to do it that doesn't flush every single register.
- *
- * TODO: look into the wmb() family on Linux and equivalents on other systems.
+ * Full memory barrier. Ensures compiler ordering and SMP behavior.
*/
-#define MEM_BARRIER() do { asm volatile ("":::"memory"); } while (0)
+#define MEM_BARRIER() android_membar_full()
/*
- * Atomic compare-and-swap macro.
+ * 32-bit atomic compare-and-swap macro. Performs a memory barrier
+ * before the swap (store-release).
*
- * If *_addr equals "_old", replace it with "_new" and return 1. Otherwise
- * return 0. (e.g. x86 "cmpxchgl" instruction.)
+ * If *_addr equals "_old", replace it with "_new" and return nonzero.
*
* Underlying function is currently declared:
* int android_atomic_cmpxchg(int32_t old, int32_t new, volatile int32_t* addr)
@@ -45,4 +40,23 @@
#define ATOMIC_CMP_SWAP(_addr, _old, _new) \
(android_atomic_cmpxchg((_old), (_new), (_addr)) == 0)
+
+/*
+ * NOTE: Two "quasiatomic" operations on the exact same memory address
+ * are guaranteed to operate atomically with respect to each other,
+ * but no guarantees are made about quasiatomic operations mixed with
+ * non-quasiatomic operations on the same address, nor about
+ * quasiatomic operations that are performed on partially-overlapping
+ * memory.
+ */
+
+/*
+ * TODO: rename android_quasiatomic_* to dvmQuasiatomic*. Don't want to do
+ * that yet due to branch merge issues.
+ */
+int64_t android_quasiatomic_swap_64(int64_t value, volatile int64_t* addr);
+int64_t android_quasiatomic_read_64(volatile int64_t* addr);
+int android_quasiatomic_cmpxchg_64(int64_t oldvalue, int64_t newvalue,
+ volatile int64_t* addr);
+
#endif /*_DALVIK_ATOMIC*/