summaryrefslogtreecommitdiffstats
path: root/runtime
diff options
context:
space:
mode:
Diffstat (limited to 'runtime')
-rw-r--r--runtime/arch/arm/entrypoints_init_arm.cc24
-rw-r--r--runtime/arch/arm/quick_entrypoints_arm.S255
-rw-r--r--runtime/arch/arm64/entrypoints_init_arm64.cc24
-rw-r--r--runtime/arch/arm64/quick_entrypoints_arm64.S12
-rw-r--r--runtime/arch/mips/entrypoints_init_mips.cc24
-rw-r--r--runtime/arch/mips/quick_entrypoints_mips.S167
-rw-r--r--runtime/arch/stub_test.cc349
-rw-r--r--runtime/arch/x86/entrypoints_init_x86.cc24
-rw-r--r--runtime/arch/x86/quick_entrypoints_x86.S171
-rw-r--r--runtime/arch/x86_64/entrypoints_init_x86_64.cc24
-rw-r--r--runtime/arch/x86_64/quick_entrypoints_x86_64.S12
-rw-r--r--runtime/class_linker.cc189
-rw-r--r--runtime/class_linker.h13
-rw-r--r--runtime/common_throws.cc4
-rw-r--r--runtime/dex_instruction_list.h8
-rw-r--r--runtime/entrypoints/quick/quick_entrypoints_list.h12
-rw-r--r--runtime/entrypoints/quick/quick_field_entrypoints.cc310
-rw-r--r--runtime/entrypoints_order_test.cc16
-rw-r--r--runtime/interpreter/interpreter_common.cc26
-rw-r--r--runtime/interpreter/interpreter_goto_table_impl.cc40
-rw-r--r--runtime/interpreter/interpreter_switch_impl.cc26
-rw-r--r--runtime/mirror/array-inl.h2
-rw-r--r--runtime/mirror/art_field-inl.h54
-rw-r--r--runtime/mirror/art_field.h4
-rw-r--r--runtime/mirror/art_method-inl.h2
-rw-r--r--runtime/mirror/class-inl.h40
-rw-r--r--runtime/mirror/class.h6
-rw-r--r--runtime/mirror/dex_cache-inl.h2
-rw-r--r--runtime/mirror/object-inl.h202
-rw-r--r--runtime/mirror/object.h80
-rw-r--r--runtime/mirror/reference-inl.h2
-rw-r--r--runtime/mirror/string-inl.h2
-rw-r--r--runtime/oat.cc2
-rw-r--r--runtime/primitive.h4
-rw-r--r--runtime/runtime.cc28
-rw-r--r--runtime/runtime.h8
-rw-r--r--runtime/thread.cc12
-rw-r--r--runtime/transaction.cc113
-rw-r--r--runtime/transaction.h21
-rw-r--r--runtime/verifier/method_verifier.cc16
40 files changed, 1932 insertions, 398 deletions
diff --git a/runtime/arch/arm/entrypoints_init_arm.cc b/runtime/arch/arm/entrypoints_init_arm.cc
index 8c6afd66bd..38a88c574c 100644
--- a/runtime/arch/arm/entrypoints_init_arm.cc
+++ b/runtime/arch/arm/entrypoints_init_arm.cc
@@ -48,12 +48,24 @@ extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
extern "C" void* art_quick_resolve_string(void*, uint32_t);
// Field entrypoints.
+extern "C" int art_quick_set8_instance(uint32_t, void*, int8_t);
+extern "C" int art_quick_set8_static(uint32_t, int8_t);
+extern "C" int art_quick_set16_instance(uint32_t, void*, int16_t);
+extern "C" int art_quick_set16_static(uint32_t, int16_t);
extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
extern "C" int art_quick_set32_static(uint32_t, int32_t);
extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
extern "C" int art_quick_set64_static(uint32_t, int64_t);
extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_instance(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_instance(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_static(uint32_t);
+extern "C" uint8_t art_quick_get_boolean_static(uint32_t);
+extern "C" int16_t art_quick_get_short_instance(uint32_t, void*);
+extern "C" uint16_t art_quick_get_char_instance(uint32_t, void*);
+extern "C" int16_t art_quick_get_short_static(uint32_t);
+extern "C" uint16_t art_quick_get_char_static(uint32_t);
extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
extern "C" int32_t art_quick_get32_static(uint32_t);
extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
@@ -154,15 +166,27 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pResolveString = art_quick_resolve_string;
// Field
+ qpoints->pSet8Instance = art_quick_set8_instance;
+ qpoints->pSet8Static = art_quick_set8_static;
+ qpoints->pSet16Instance = art_quick_set16_instance;
+ qpoints->pSet16Static = art_quick_set16_static;
qpoints->pSet32Instance = art_quick_set32_instance;
qpoints->pSet32Static = art_quick_set32_static;
qpoints->pSet64Instance = art_quick_set64_instance;
qpoints->pSet64Static = art_quick_set64_static;
qpoints->pSetObjInstance = art_quick_set_obj_instance;
qpoints->pSetObjStatic = art_quick_set_obj_static;
+ qpoints->pGetByteInstance = art_quick_get_byte_instance;
+ qpoints->pGetBooleanInstance = art_quick_get_boolean_instance;
+ qpoints->pGetShortInstance = art_quick_get_short_instance;
+ qpoints->pGetCharInstance = art_quick_get_char_instance;
qpoints->pGet32Instance = art_quick_get32_instance;
qpoints->pGet64Instance = art_quick_get64_instance;
qpoints->pGetObjInstance = art_quick_get_obj_instance;
+ qpoints->pGetByteStatic = art_quick_get_byte_static;
+ qpoints->pGetBooleanStatic = art_quick_get_boolean_static;
+ qpoints->pGetShortStatic = art_quick_get_short_static;
+ qpoints->pGetCharStatic = art_quick_get_char_static;
qpoints->pGet32Static = art_quick_get32_static;
qpoints->pGet64Static = art_quick_get64_static;
qpoints->pGetObjStatic = art_quick_get_obj_static;
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 1b30c9cca2..51bcd3c654 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -203,6 +203,77 @@ ENTRY \c_name
END \c_name
.endm
+.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
+ ldr \reg, [r9, #THREAD_EXCEPTION_OFFSET] // Get exception field.
+ cbnz \reg, 1f
+ bx lr
+1:
+ DELIVER_PENDING_EXCEPTION
+.endm
+
+.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ RETURN_OR_DELIVER_PENDING_EXCEPTION_REG r1
+.endm
+
+.macro RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+ RETURN_IF_RESULT_IS_ZERO
+ DELIVER_PENDING_EXCEPTION
+.endm
+
+// Macros taking opportunity of code similarities for downcalls with referrer for non-wide fields.
+.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
+ .extern \entrypoint
+ENTRY \name
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
+ ldr r1, [sp, #32] @ pass referrer
+ mov r2, r9 @ pass Thread::Current
+ mov r3, sp @ pass SP
+ bl \entrypoint @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
+ \return
+END \name
+.endm
+
+.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
+ .extern \entrypoint
+ENTRY \name
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
+ ldr r2, [sp, #32] @ pass referrer
+ mov r3, r9 @ pass Thread::Current
+ mov r12, sp
+ str r12, [sp, #-16]! @ expand the frame and pass SP
+ .pad #16
+ .cfi_adjust_cfa_offset 16
+ bl \entrypoint @ (field_idx, Object*, referrer, Thread*, SP)
+ add sp, #16 @ strip the extra frame
+ .cfi_adjust_cfa_offset -16
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
+ \return
+END \name
+.endm
+
+.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
+ .extern \entrypoint
+ENTRY \name
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
+ ldr r3, [sp, #32] @ pass referrer
+ mov r12, sp @ save SP
+ sub sp, #8 @ grow frame for alignment with stack args
+ .pad #8
+ .cfi_adjust_cfa_offset 8
+ push {r9, r12} @ pass Thread::Current and SP
+ .save {r9, r12}
+ .cfi_adjust_cfa_offset 8
+ .cfi_rel_offset r9, 0
+ .cfi_rel_offset r12, 4
+ bl \entrypoint @ (field_idx, Object*, new_val, referrer, Thread*, SP)
+ add sp, #16 @ release out args
+ .cfi_adjust_cfa_offset -16
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here
+ \return
+END \name
+.endm
+
/*
* Called by managed code, saves callee saves and then calls artThrowException
* that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
@@ -601,23 +672,14 @@ ENTRY art_quick_initialize_type_and_verify_access
END art_quick_initialize_type_and_verify_access
/*
- * Called by managed code to resolve a static field and load a 32-bit primitive value.
+ * Called by managed code to resolve a static field and load a non-wide value.
*/
- .extern artGet32StaticFromCode
-ENTRY art_quick_get32_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r1, [sp, #32] @ pass referrer
- mov r2, r9 @ pass Thread::Current
- mov r3, sp @ pass SP
- bl artGet32StaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
- ldr r1, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- cbnz r1, 1f @ success if no exception pending
- bx lr @ return on success
-1:
- DELIVER_PENDING_EXCEPTION
-END art_quick_get32_static
-
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
/*
* Called by managed code to resolve a static field and load a 64-bit primitive value.
*/
@@ -637,43 +699,14 @@ ENTRY art_quick_get64_static
END art_quick_get64_static
/*
- * Called by managed code to resolve a static field and load an object reference.
+ * Called by managed code to resolve an instance field and load a non-wide value.
*/
- .extern artGetObjStaticFromCode
-ENTRY art_quick_get_obj_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r1, [sp, #32] @ pass referrer
- mov r2, r9 @ pass Thread::Current
- mov r3, sp @ pass SP
- bl artGetObjStaticFromCode @ (uint32_t field_idx, const Method* referrer, Thread*, SP)
- ldr r1, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- cbnz r1, 1f @ success if no exception pending
- bx lr @ return on success
-1:
- DELIVER_PENDING_EXCEPTION
-END art_quick_get_obj_static
-
- /*
- * Called by managed code to resolve an instance field and load a 32-bit primitive value.
- */
- .extern artGet32InstanceFromCode
-ENTRY art_quick_get32_instance
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r2, [sp, #32] @ pass referrer
- mov r3, r9 @ pass Thread::Current
- mov r12, sp
- str r12, [sp, #-16]! @ expand the frame and pass SP
- bl artGet32InstanceFromCode @ (field_idx, Object*, referrer, Thread*, SP)
- add sp, #16 @ strip the extra frame
- ldr r1, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- cbnz r1, 1f @ success if no exception pending
- bx lr @ return on success
-1:
- DELIVER_PENDING_EXCEPTION
-END art_quick_get32_instance
-
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
/*
* Called by managed code to resolve an instance field and load a 64-bit primitive value.
*/
@@ -698,48 +731,12 @@ ENTRY art_quick_get64_instance
END art_quick_get64_instance
/*
- * Called by managed code to resolve an instance field and load an object reference.
- */
- .extern artGetObjInstanceFromCode
-ENTRY art_quick_get_obj_instance
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r2, [sp, #32] @ pass referrer
- mov r3, r9 @ pass Thread::Current
- mov r12, sp
- str r12, [sp, #-16]! @ expand the frame and pass SP
- .pad #16
- .cfi_adjust_cfa_offset 16
- bl artGetObjInstanceFromCode @ (field_idx, Object*, referrer, Thread*, SP)
- add sp, #16 @ strip the extra frame
- .cfi_adjust_cfa_offset -16
- ldr r1, [r9, #THREAD_EXCEPTION_OFFSET] @ load Thread::Current()->exception_
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- cbnz r1, 1f @ success if no exception pending
- bx lr @ return on success
-1:
- DELIVER_PENDING_EXCEPTION
-END art_quick_get_obj_instance
-
- /*
- * Called by managed code to resolve a static field and store a 32-bit primitive value.
+ * Called by managed code to resolve a static field and store a non-wide value.
*/
- .extern artSet32StaticFromCode
-ENTRY art_quick_set32_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r2, [sp, #32] @ pass referrer
- mov r3, r9 @ pass Thread::Current
- mov r12, sp
- str r12, [sp, #-16]! @ expand the frame and pass SP
- .pad #16
- .cfi_adjust_cfa_offset 16
- bl artSet32StaticFromCode @ (field_idx, new_val, referrer, Thread*, SP)
- add sp, #16 @ strip the extra frame
- .cfi_adjust_cfa_offset -16
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- RETURN_IF_RESULT_IS_ZERO
- DELIVER_PENDING_EXCEPTION
-END art_quick_set32_static
-
+TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
/*
* Called by managed code to resolve a static field and store a 64-bit primitive value.
* On entry r0 holds field index, r1:r2 hold new_val
@@ -767,53 +764,16 @@ ENTRY art_quick_set64_static
END art_quick_set64_static
/*
- * Called by managed code to resolve a static field and store an object reference.
- */
- .extern artSetObjStaticFromCode
-ENTRY art_quick_set_obj_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r2, [sp, #32] @ pass referrer
- mov r3, r9 @ pass Thread::Current
- mov r12, sp
- str r12, [sp, #-16]! @ expand the frame and pass SP
- .pad #16
- .cfi_adjust_cfa_offset 16
- bl artSetObjStaticFromCode @ (field_idx, new_val, referrer, Thread*, SP)
- add sp, #16 @ strip the extra frame
- .cfi_adjust_cfa_offset -16
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- RETURN_IF_RESULT_IS_ZERO
- DELIVER_PENDING_EXCEPTION
-END art_quick_set_obj_static
-
- /*
- * Called by managed code to resolve an instance field and store a 32-bit primitive value.
+ * Called by managed code to resolve an instance field and store a non-wide value.
*/
- .extern artSet32InstanceFromCode
-ENTRY art_quick_set32_instance
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r3, [sp, #32] @ pass referrer
- mov r12, sp @ save SP
- sub sp, #8 @ grow frame for alignment with stack args
- .pad #8
- .cfi_adjust_cfa_offset 8
- push {r9, r12} @ pass Thread::Current and SP
- .save {r9, r12}
- .cfi_adjust_cfa_offset 8
- .cfi_rel_offset r9, 0
- .cfi_rel_offset r12, 4
- bl artSet32InstanceFromCode @ (field_idx, Object*, new_val, referrer, Thread*, SP)
- add sp, #16 @ release out args
- .cfi_adjust_cfa_offset -16
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here
- RETURN_IF_RESULT_IS_ZERO
- DELIVER_PENDING_EXCEPTION
-END art_quick_set32_instance
-
+THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
/*
* Called by managed code to resolve an instance field and store a 64-bit primitive value.
*/
- .extern artSet32InstanceFromCode
+ .extern artSet64InstanceFromCode
ENTRY art_quick_set64_instance
SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
mov r12, sp @ save SP
@@ -833,29 +793,6 @@ ENTRY art_quick_set64_instance
END art_quick_set64_instance
/*
- * Called by managed code to resolve an instance field and store an object reference.
- */
- .extern artSetObjInstanceFromCode
-ENTRY art_quick_set_obj_instance
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME @ save callee saves in case of GC
- ldr r3, [sp, #32] @ pass referrer
- mov r12, sp @ save SP
- sub sp, #8 @ grow frame for alignment with stack args
- .pad #8
- .cfi_adjust_cfa_offset 8
- push {r9, r12} @ pass Thread::Current and SP
- .save {r9, r12}
- .cfi_adjust_cfa_offset 8
- .cfi_rel_offset r9, 0
- bl artSetObjInstanceFromCode @ (field_idx, Object*, new_val, referrer, Thread*, SP)
- add sp, #16 @ release out args
- .cfi_adjust_cfa_offset -16
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME @ TODO: we can clearly save an add here
- RETURN_IF_RESULT_IS_ZERO
- DELIVER_PENDING_EXCEPTION
-END art_quick_set_obj_instance
-
- /*
* Entry from managed code to resolve a string, this stub will allocate a String and deliver an
* exception on error. On success the String is returned. R0 holds the referring method,
* R1 holds the string index. The fast path check for hit in strings cache has already been
diff --git a/runtime/arch/arm64/entrypoints_init_arm64.cc b/runtime/arch/arm64/entrypoints_init_arm64.cc
index 0c33d9ce83..70e93b3051 100644
--- a/runtime/arch/arm64/entrypoints_init_arm64.cc
+++ b/runtime/arch/arm64/entrypoints_init_arm64.cc
@@ -47,12 +47,24 @@ extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
extern "C" void* art_quick_resolve_string(void*, uint32_t);
// Field entrypoints.
+extern "C" int art_quick_set8_instance(uint32_t, void*, int8_t);
+extern "C" int art_quick_set8_static(uint32_t, int8_t);
+extern "C" int art_quick_set16_instance(uint32_t, void*, int16_t);
+extern "C" int art_quick_set16_static(uint32_t, int16_t);
extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
extern "C" int art_quick_set32_static(uint32_t, int32_t);
extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
extern "C" int art_quick_set64_static(uint32_t, int64_t);
extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_instance(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_instance(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_static(uint32_t);
+extern "C" int8_t art_quick_get_byte_static(uint32_t);
+extern "C" uint16_t art_quick_get_char_instance(uint32_t, void*);
+extern "C" int16_t art_quick_get_short_instance(uint32_t, void*);
+extern "C" uint16_t art_quick_get_char_static(uint32_t);
+extern "C" int16_t art_quick_get_short_static(uint32_t);
extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
extern "C" int32_t art_quick_get32_static(uint32_t);
extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
@@ -136,15 +148,27 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pResolveString = art_quick_resolve_string;
// Field
+ qpoints->pSet8Instance = art_quick_set8_instance;
+ qpoints->pSet8Static = art_quick_set8_static;
+ qpoints->pSet16Instance = art_quick_set16_instance;
+ qpoints->pSet16Static = art_quick_set16_static;
qpoints->pSet32Instance = art_quick_set32_instance;
qpoints->pSet32Static = art_quick_set32_static;
qpoints->pSet64Instance = art_quick_set64_instance;
qpoints->pSet64Static = art_quick_set64_static;
qpoints->pSetObjInstance = art_quick_set_obj_instance;
qpoints->pSetObjStatic = art_quick_set_obj_static;
+ qpoints->pGetBooleanInstance = art_quick_get_boolean_instance;
+ qpoints->pGetByteInstance = art_quick_get_byte_instance;
+ qpoints->pGetCharInstance = art_quick_get_char_instance;
+ qpoints->pGetShortInstance = art_quick_get_short_instance;
qpoints->pGet32Instance = art_quick_get32_instance;
qpoints->pGet64Instance = art_quick_get64_instance;
qpoints->pGetObjInstance = art_quick_get_obj_instance;
+ qpoints->pGetBooleanStatic = art_quick_get_boolean_static;
+ qpoints->pGetByteStatic = art_quick_get_byte_static;
+ qpoints->pGetCharStatic = art_quick_get_char_static;
+ qpoints->pGetShortStatic = art_quick_get_short_static;
qpoints->pGet32Static = art_quick_get32_static;
qpoints->pGet64Static = art_quick_get64_static;
qpoints->pGetObjStatic = art_quick_get_obj_static;
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 2a19e27b04..606816a181 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1266,17 +1266,29 @@ TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorage
TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO
TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
+TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
+THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
+THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
diff --git a/runtime/arch/mips/entrypoints_init_mips.cc b/runtime/arch/mips/entrypoints_init_mips.cc
index d3e7d5e904..25e911d765 100644
--- a/runtime/arch/mips/entrypoints_init_mips.cc
+++ b/runtime/arch/mips/entrypoints_init_mips.cc
@@ -49,12 +49,24 @@ extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
extern "C" void* art_quick_resolve_string(void*, uint32_t);
// Field entrypoints.
+extern "C" int art_quick_set8_instance(uint32_t, void*, int8_t);
+extern "C" int art_quick_set8_static(uint32_t, int8_t);
+extern "C" int art_quick_set16_instance(uint32_t, void*, int16_t);
+extern "C" int art_quick_set16_static(uint32_t, int16_t);
extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
extern "C" int art_quick_set32_static(uint32_t, int32_t);
extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
extern "C" int art_quick_set64_static(uint32_t, int64_t);
extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_instance(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_instance(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_static(uint32_t);
+extern "C" int8_t art_quick_get_byte_static(uint32_t);
+extern "C" uint16_t art_quick_get_char_instance(uint32_t, void*);
+extern "C" int16_t art_quick_get_short_instance(uint32_t, void*);
+extern "C" uint16_t art_quick_get_char_static(uint32_t);
+extern "C" int16_t art_quick_get_short_static(uint32_t);
extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
extern "C" int32_t art_quick_get32_static(uint32_t);
extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
@@ -159,15 +171,27 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pResolveString = art_quick_resolve_string;
// Field
+ qpoints->pSet8Instance = art_quick_set8_instance;
+ qpoints->pSet8Static = art_quick_set8_static;
+ qpoints->pSet16Instance = art_quick_set16_instance;
+ qpoints->pSet16Static = art_quick_set16_static;
qpoints->pSet32Instance = art_quick_set32_instance;
qpoints->pSet32Static = art_quick_set32_static;
qpoints->pSet64Instance = art_quick_set64_instance;
qpoints->pSet64Static = art_quick_set64_static;
qpoints->pSetObjInstance = art_quick_set_obj_instance;
qpoints->pSetObjStatic = art_quick_set_obj_static;
+ qpoints->pGetBooleanInstance = art_quick_get_boolean_instance;
+ qpoints->pGetByteInstance = art_quick_get_byte_instance;
+ qpoints->pGetCharInstance = art_quick_get_char_instance;
+ qpoints->pGetShortInstance = art_quick_get_short_instance;
qpoints->pGet32Instance = art_quick_get32_instance;
qpoints->pGet64Instance = art_quick_get64_instance;
qpoints->pGetObjInstance = art_quick_get_obj_instance;
+ qpoints->pGetBooleanStatic = art_quick_get_boolean_static;
+ qpoints->pGetByteStatic = art_quick_get_byte_static;
+ qpoints->pGetCharStatic = art_quick_get_char_static;
+ qpoints->pGetShortStatic = art_quick_get_short_static;
qpoints->pGet32Static = art_quick_get32_static;
qpoints->pGet64Static = art_quick_get64_static;
qpoints->pGetObjStatic = art_quick_get_obj_static;
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 8786222250..9e9e5236af 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -739,6 +739,59 @@ ENTRY art_quick_initialize_type_and_verify_access
move $a3, $sp # pass $sp
RETURN_IF_RESULT_IS_NON_ZERO
END art_quick_initialize_type_and_verify_access
+ /*
+ * Called by managed code to resolve a static field and load a boolean primitive value.
+ */
+ .extern artGetBooleanStaticFromCode
+ENTRY art_quick_get_boolean_static
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a1, 64($sp) # pass referrer's Method*
+ move $a2, rSELF # pass Thread::Current
+ jal artGetBooleanStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
+ move $a3, $sp # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_boolean_static
+ /*
+ * Called by managed code to resolve a static field and load a byte primitive value.
+ */
+ .extern artGetByteStaticFromCode
+ENTRY art_quick_get_byte_static
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a1, 64($sp) # pass referrer's Method*
+ move $a2, rSELF # pass Thread::Current
+ jal artGetByteStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
+ move $a3, $sp # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_byte_static
+
+ /*
+ * Called by managed code to resolve a static field and load a char primitive value.
+ */
+ .extern artGetCharStaticFromCode
+ENTRY art_quick_get_char_static
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a1, 64($sp) # pass referrer's Method*
+ move $a2, rSELF # pass Thread::Current
+ jal artGetCharStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
+ move $a3, $sp # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_char_static
+ /*
+ * Called by managed code to resolve a static field and load a short primitive value.
+ */
+ .extern artGetShortStaticFromCode
+ENTRY art_quick_get_short_static
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a1, 64($sp) # pass referrer's Method*
+ move $a2, rSELF # pass Thread::Current
+ jal artGetShortStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
+ move $a3, $sp # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_short_static
/*
* Called by managed code to resolve a static field and load a 32-bit primitive value.
@@ -783,6 +836,60 @@ ENTRY art_quick_get_obj_static
END art_quick_get_obj_static
/*
+ * Called by managed code to resolve an instance field and load a boolean primitive value.
+ */
+ .extern artGetBooleanInstanceFromCode
+ENTRY art_quick_get_boolean_instance
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a2, 64($sp) # pass referrer's Method*
+ move $a3, rSELF # pass Thread::Current
+ jal artGetBooleanInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp)
+ sw $sp, 16($sp) # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_boolean_instance
+ /*
+ * Called by managed code to resolve an instance field and load a byte primitive value.
+ */
+ .extern artGetByteInstanceFromCode
+ENTRY art_quick_get_byte_instance
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a2, 64($sp) # pass referrer's Method*
+ move $a3, rSELF # pass Thread::Current
+ jal artGetByteInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp)
+ sw $sp, 16($sp) # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_byte_instance
+
+ /*
+ * Called by managed code to resolve an instance field and load a char primitive value.
+ */
+ .extern artGetCharInstanceFromCode
+ENTRY art_quick_get_char_instance
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a2, 64($sp) # pass referrer's Method*
+ move $a3, rSELF # pass Thread::Current
+ jal artGetCharInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp)
+ sw $sp, 16($sp) # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_char_instance
+ /*
+ * Called by managed code to resolve an instance field and load a short primitive value.
+ */
+ .extern artGetShortInstanceFromCode
+ENTRY art_quick_get_short_instance
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a2, 64($sp) # pass referrer's Method*
+ move $a3, rSELF # pass Thread::Current
+ jal artGetShortInstanceFromCode # (field_idx, Object*, referrer, Thread*, $sp)
+ sw $sp, 16($sp) # pass $sp
+ RETURN_IF_NO_EXCEPTION
+END art_quick_get_short_instance
+
+ /*
* Called by managed code to resolve an instance field and load a 32-bit primitive value.
*/
.extern artGet32InstanceFromCode
@@ -825,6 +932,34 @@ ENTRY art_quick_get_obj_instance
END art_quick_get_obj_instance
/*
+ * Called by managed code to resolve a static field and store a 8-bit primitive value.
+ */
+ .extern artSet8StaticFromCode
+ENTRY art_quick_set8_static
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a2, 64($sp) # pass referrer's Method*
+ move $a3, rSELF # pass Thread::Current
+ jal artSet8StaticFromCode # (field_idx, new_val, referrer, Thread*, $sp)
+ sw $sp, 16($sp) # pass $sp
+ RETURN_IF_ZERO
+END art_quick_set8_static
+
+ /*
+ * Called by managed code to resolve a static field and store a 16-bit primitive value.
+ */
+ .extern artSet16StaticFromCode
+ENTRY art_quick_set16_static
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a2, 64($sp) # pass referrer's Method*
+ move $a3, rSELF # pass Thread::Current
+ jal artSet16StaticFromCode # (field_idx, new_val, referrer, Thread*, $sp)
+ sw $sp, 16($sp) # pass $sp
+ RETURN_IF_ZERO
+END art_quick_set16_static
+
+ /*
* Called by managed code to resolve a static field and store a 32-bit primitive value.
*/
.extern artSet32StaticFromCode
@@ -841,7 +976,7 @@ END art_quick_set32_static
/*
* Called by managed code to resolve a static field and store a 64-bit primitive value.
*/
- .extern artSet32StaticFromCode
+ .extern artSet64StaticFromCode
ENTRY art_quick_set64_static
GENERATE_GLOBAL_POINTER
SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
@@ -867,6 +1002,34 @@ ENTRY art_quick_set_obj_static
END art_quick_set_obj_static
/*
+ * Called by managed code to resolve an instance field and store a 8-bit primitive value.
+ */
+ .extern artSet8InstanceFromCode
+ENTRY art_quick_set8_instance
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a3, 64($sp) # pass referrer's Method*
+ sw rSELF, 16($sp) # pass Thread::Current
+ jal artSet8InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*, $sp)
+ sw $sp, 20($sp) # pass $sp
+ RETURN_IF_ZERO
+END art_quick_set8_instance
+
+ /*
+ * Called by managed code to resolve an instance field and store a 16-bit primitive value.
+ */
+ .extern artSet16InstanceFromCode
+ENTRY art_quick_set16_instance
+ GENERATE_GLOBAL_POINTER
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
+ lw $a3, 64($sp) # pass referrer's Method*
+ sw rSELF, 16($sp) # pass Thread::Current
+ jal artSet16InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*, $sp)
+ sw $sp, 20($sp) # pass $sp
+ RETURN_IF_ZERO
+END art_quick_set16_instance
+
+ /*
* Called by managed code to resolve an instance field and store a 32-bit primitive value.
*/
.extern artSet32InstanceFromCode
@@ -883,7 +1046,7 @@ END art_quick_set32_instance
/*
* Called by managed code to resolve an instance field and store a 64-bit primitive value.
*/
- .extern artSet32InstanceFromCode
+ .extern artSet64InstanceFromCode
ENTRY art_quick_set64_instance
GENERATE_GLOBAL_POINTER
SETUP_REF_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc
index 864e3f7ad0..1215d8b322 100644
--- a/runtime/arch/stub_test.cc
+++ b/runtime/arch/stub_test.cc
@@ -1306,6 +1306,288 @@ TEST_F(StubTest, StringCompareTo) {
}
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+extern "C" void art_quick_set8_static(void);
+extern "C" void art_quick_get_byte_static(void);
+extern "C" void art_quick_get_boolean_static(void);
+#endif
+
+static void GetSetBooleanStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
+ mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 5;
+ uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ static_cast<size_t>(values[i]),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickSet8Static),
+ self,
+ referrer);
+
+ size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ 0U, 0U,
+ StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
+ self,
+ referrer);
+ // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
+ EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
+ }
+#else
+ LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+static void GetSetByteStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
+ mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 5;
+ int8_t values[num_values] = { -128, -64, 0, 64, 127 };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ static_cast<size_t>(values[i]),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickSet8Static),
+ self,
+ referrer);
+
+ size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ 0U, 0U,
+ StubTest::GetEntrypoint(self, kQuickGetByteStatic),
+ self,
+ referrer);
+ EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
+ }
+#else
+ LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+
+
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+extern "C" void art_quick_set8_instance(void);
+extern "C" void art_quick_get_byte_instance(void);
+extern "C" void art_quick_get_boolean_instance(void);
+#endif
+
+static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
+ Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 5;
+ uint8_t values[num_values] = { 0, true, 2, 128, 0xFF };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ static_cast<size_t>(values[i]),
+ StubTest::GetEntrypoint(self, kQuickSet8Instance),
+ self,
+ referrer);
+
+ uint8_t res = f->Get()->GetBoolean(obj->Get());
+ EXPECT_EQ(values[i], res) << "Iteration " << i;
+
+ f->Get()->SetBoolean<false>(obj->Get(), res);
+
+ size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
+ self,
+ referrer);
+ EXPECT_EQ(res, static_cast<uint8_t>(res2));
+ }
+#else
+ LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
+ Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 5;
+ int8_t values[num_values] = { -128, -64, 0, 64, 127 };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ static_cast<size_t>(values[i]),
+ StubTest::GetEntrypoint(self, kQuickSet8Instance),
+ self,
+ referrer);
+
+ int8_t res = f->Get()->GetByte(obj->Get());
+ EXPECT_EQ(res, values[i]) << "Iteration " << i;
+ f->Get()->SetByte<false>(obj->Get(), ++res);
+
+ size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickGetByteInstance),
+ self,
+ referrer);
+ EXPECT_EQ(res, static_cast<int8_t>(res2));
+ }
+#else
+ LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+extern "C" void art_quick_set16_static(void);
+extern "C" void art_quick_get_short_static(void);
+extern "C" void art_quick_get_char_static(void);
+#endif
+
+static void GetSetCharStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
+ mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 6;
+ uint16_t values[num_values] = { 0, 1, 2, 255, 32768, 0xFFFF };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ static_cast<size_t>(values[i]),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickSet16Static),
+ self,
+ referrer);
+
+ size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ 0U, 0U,
+ StubTest::GetEntrypoint(self, kQuickGetCharStatic),
+ self,
+ referrer);
+
+ EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
+ }
+#else
+ LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+static void GetSetShortStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
+ mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 6;
+ int16_t values[num_values] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ static_cast<size_t>(values[i]),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickSet16Static),
+ self,
+ referrer);
+
+ size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ 0U, 0U,
+ StubTest::GetEntrypoint(self, kQuickGetShortStatic),
+ self,
+ referrer);
+
+ EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
+ }
+#else
+ LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+extern "C" void art_quick_set16_instance(void);
+extern "C" void art_quick_get_short_instance(void);
+extern "C" void art_quick_get_char_instance(void);
+#endif
+
+static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
+ Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 6;
+ uint16_t values[num_values] = { 0, 1, 2, 255, 32768, 0xFFFF };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ static_cast<size_t>(values[i]),
+ StubTest::GetEntrypoint(self, kQuickSet16Instance),
+ self,
+ referrer);
+
+ uint16_t res = f->Get()->GetChar(obj->Get());
+ EXPECT_EQ(res, values[i]) << "Iteration " << i;
+ f->Get()->SetChar<false>(obj->Get(), ++res);
+
+ size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickGetCharInstance),
+ self,
+ referrer);
+ EXPECT_EQ(res, static_cast<uint16_t>(res2));
+ }
+#else
+ LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
+ Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+ constexpr size_t num_values = 6;
+ int16_t values[num_values] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
+
+ for (size_t i = 0; i < num_values; ++i) {
+ test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ static_cast<size_t>(values[i]),
+ StubTest::GetEntrypoint(self, kQuickSet16Instance),
+ self,
+ referrer);
+
+ int16_t res = f->Get()->GetShort(obj->Get());
+ EXPECT_EQ(res, values[i]) << "Iteration " << i;
+ f->Get()->SetShort<false>(obj->Get(), ++res);
+
+ size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
+ reinterpret_cast<size_t>(obj->Get()),
+ 0U,
+ StubTest::GetEntrypoint(self, kQuickGetShortInstance),
+ self,
+ referrer);
+ EXPECT_EQ(res, static_cast<int16_t>(res2));
+ }
+#else
+ LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
+ // Force-print to std::cout so it's also outside the logcat.
+ std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
+#endif
+}
+
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
+extern "C" void art_quick_set32_static(void);
+extern "C" void art_quick_get32_static(void);
+#endif
+
static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
mirror::ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -1555,6 +1837,26 @@ static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type)
Primitive::Type type = f->GetTypeAsPrimitiveType();
switch (type) {
+ case Primitive::Type::kPrimBoolean:
+ if (test_type == type) {
+ GetSetBooleanStatic(&obj, &f, self, m.Get(), test);
+ }
+ break;
+ case Primitive::Type::kPrimByte:
+ if (test_type == type) {
+ GetSetByteStatic(&obj, &f, self, m.Get(), test);
+ }
+ break;
+ case Primitive::Type::kPrimChar:
+ if (test_type == type) {
+ GetSetCharStatic(&obj, &f, self, m.Get(), test);
+ }
+ break;
+ case Primitive::Type::kPrimShort:
+ if (test_type == type) {
+ GetSetShortStatic(&obj, &f, self, m.Get(), test);
+ }
+ break;
case Primitive::Type::kPrimInt:
if (test_type == type) {
GetSet32Static(&obj, &f, self, m.Get(), test);
@@ -1590,6 +1892,26 @@ static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type)
Primitive::Type type = f->GetTypeAsPrimitiveType();
switch (type) {
+ case Primitive::Type::kPrimBoolean:
+ if (test_type == type) {
+ GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
+ }
+ break;
+ case Primitive::Type::kPrimByte:
+ if (test_type == type) {
+ GetSetByteInstance(&obj, &f, self, m.Get(), test);
+ }
+ break;
+ case Primitive::Type::kPrimChar:
+ if (test_type == type) {
+ GetSetCharInstance(&obj, &f, self, m.Get(), test);
+ }
+ break;
+ case Primitive::Type::kPrimShort:
+ if (test_type == type) {
+ GetSetShortInstance(&obj, &f, self, m.Get(), test);
+ }
+ break;
case Primitive::Type::kPrimInt:
if (test_type == type) {
GetSet32Instance(&obj, &f, self, m.Get(), test);
@@ -1618,6 +1940,33 @@ static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type)
// TODO: Deallocate things.
}
+TEST_F(StubTest, Fields8) {
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
+
+ Thread* self = Thread::Current();
+
+ self->TransitionFromSuspendedToRunnable();
+ LoadDex("AllFields");
+ bool started = runtime_->Start();
+ CHECK(started);
+
+ TestFields(self, this, Primitive::Type::kPrimBoolean);
+ TestFields(self, this, Primitive::Type::kPrimByte);
+}
+
+TEST_F(StubTest, Fields16) {
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
+
+ Thread* self = Thread::Current();
+
+ self->TransitionFromSuspendedToRunnable();
+ LoadDex("AllFields");
+ bool started = runtime_->Start();
+ CHECK(started);
+
+ TestFields(self, this, Primitive::Type::kPrimChar);
+ TestFields(self, this, Primitive::Type::kPrimShort);
+}
TEST_F(StubTest, Fields32) {
TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
diff --git a/runtime/arch/x86/entrypoints_init_x86.cc b/runtime/arch/x86/entrypoints_init_x86.cc
index a072996504..682c5021ce 100644
--- a/runtime/arch/x86/entrypoints_init_x86.cc
+++ b/runtime/arch/x86/entrypoints_init_x86.cc
@@ -47,12 +47,24 @@ extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
extern "C" void* art_quick_resolve_string(void*, uint32_t);
// Field entrypoints.
+extern "C" int art_quick_set8_instance(uint32_t, void*, int8_t);
+extern "C" int art_quick_set8_static(uint32_t, int8_t);
+extern "C" int art_quick_set16_instance(uint32_t, void*, int16_t);
+extern "C" int art_quick_set16_static(uint32_t, int16_t);
extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
extern "C" int art_quick_set32_static(uint32_t, int32_t);
extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
extern "C" int art_quick_set64_static(uint32_t, int64_t);
extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_instance(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_instance(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_static(uint32_t);
+extern "C" uint8_t art_quick_get_boolean_static(uint32_t);
+extern "C" int16_t art_quick_get_short_instance(uint32_t, void*);
+extern "C" uint16_t art_quick_get_char_instance(uint32_t, void*);
+extern "C" int16_t art_quick_get_short_static(uint32_t);
+extern "C" uint16_t art_quick_get_char_static(uint32_t);
extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
extern "C" int32_t art_quick_get32_static(uint32_t);
extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
@@ -137,15 +149,27 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pResolveString = art_quick_resolve_string;
// Field
+ qpoints->pSet8Instance = art_quick_set8_instance;
+ qpoints->pSet8Static = art_quick_set8_static;
+ qpoints->pSet16Instance = art_quick_set16_instance;
+ qpoints->pSet16Static = art_quick_set16_static;
qpoints->pSet32Instance = art_quick_set32_instance;
qpoints->pSet32Static = art_quick_set32_static;
qpoints->pSet64Instance = art_quick_set64_instance;
qpoints->pSet64Static = art_quick_set64_static;
qpoints->pSetObjInstance = art_quick_set_obj_instance;
qpoints->pSetObjStatic = art_quick_set_obj_static;
+ qpoints->pGetByteInstance = art_quick_get_byte_instance;
+ qpoints->pGetBooleanInstance = art_quick_get_boolean_instance;
+ qpoints->pGetShortInstance = art_quick_get_short_instance;
+ qpoints->pGetCharInstance = art_quick_get_char_instance;
qpoints->pGet32Instance = art_quick_get32_instance;
qpoints->pGet64Instance = art_quick_get64_instance;
qpoints->pGetObjInstance = art_quick_get_obj_instance;
+ qpoints->pGetByteStatic = art_quick_get_byte_static;
+ qpoints->pGetBooleanStatic = art_quick_get_boolean_static;
+ qpoints->pGetShortStatic = art_quick_get_short_static;
+ qpoints->pGetCharStatic = art_quick_get_char_static;
qpoints->pGet32Static = art_quick_get32_static;
qpoints->pGet64Static = art_quick_get64_static;
qpoints->pGetObjStatic = art_quick_get_obj_static;
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index c6e704a9cd..6166cb5cd5 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -384,6 +384,48 @@ MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
END_FUNCTION RAW_VAR(c_name, 0)
END_MACRO
+MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
+ DEFINE_FUNCTION RAW_VAR(c_name, 0)
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
+ mov %esp, %edx // remember SP
+ mov 32(%esp), %ecx // get referrer
+ // Outgoing argument set up
+ PUSH edx // pass SP
+ pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
+ PUSH ecx // pass referrer
+ PUSH eax // pass arg1
+ call VAR(cxx_name, 1) // cxx_name(arg1, referrer, Thread*, SP)
+ addl MACRO_LITERAL(16), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-16)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ CALL_MACRO(return_macro, 2) // return or deliver exception
+ END_FUNCTION RAW_VAR(c_name, 0)
+END_MACRO
+
+MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
+ DEFINE_FUNCTION RAW_VAR(c_name, 0)
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
+ mov %esp, %ebx // remember SP
+ mov 32(%esp), %edx // get referrer
+ subl LITERAL(12), %esp // alignment padding
+ CFI_ADJUST_CFA_OFFSET(12)
+ PUSH ebx // pass SP
+ pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
+ // Outgoing argument set up
+ PUSH edx // pass referrer
+ PUSH ecx // pass arg2
+ PUSH eax // pass arg1
+ call VAR(cxx_name, 1) // cxx_name(arg1, arg2, referrer, Thread*, SP)
+ addl LITERAL(32), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-32)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ CALL_MACRO(return_macro, 2) // return or deliver exception
+ END_FUNCTION RAW_VAR(c_name, 0)
+END_MACRO
+
+
MACRO0(RETURN_IF_RESULT_IS_NON_ZERO)
testl %eax, %eax // eax == 0 ?
jz 1f // if eax == 0 goto 1
@@ -814,6 +856,46 @@ DEFINE_FUNCTION art_quick_lushr
ret
END_FUNCTION art_quick_lushr
+DEFINE_FUNCTION art_quick_set8_instance
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
+ mov %esp, %ebx // remember SP
+ subl LITERAL(8), %esp // alignment padding
+ CFI_ADJUST_CFA_OFFSET(8)
+ PUSH ebx // pass SP
+ pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
+ mov 32(%ebx), %ebx // get referrer
+ PUSH ebx // pass referrer
+ PUSH edx // pass new_val
+ PUSH ecx // pass object
+ PUSH eax // pass field_idx
+ call PLT_SYMBOL(artSet8InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP)
+ addl LITERAL(32), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-32)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set8_instance
+
+DEFINE_FUNCTION art_quick_set16_instance
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
+ mov %esp, %ebx // remember SP
+ subl LITERAL(8), %esp // alignment padding
+ CFI_ADJUST_CFA_OFFSET(8)
+ PUSH ebx // pass SP
+ pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
+ mov 32(%ebx), %ebx // get referrer
+ PUSH ebx // pass referrer
+ PUSH edx // pass new_val
+ PUSH ecx // pass object
+ PUSH eax // pass field_idx
+ call PLT_SYMBOL(artSet16InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP)
+ addl LITERAL(32), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-32)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set16_instance
+
DEFINE_FUNCTION art_quick_set32_instance
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
@@ -873,8 +955,15 @@ DEFINE_FUNCTION art_quick_set_obj_instance
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set_obj_instance
-DEFINE_FUNCTION art_quick_get32_instance
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+
+DEFINE_FUNCTION art_quick_get64_instance
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
subl LITERAL(12), %esp // alignment padding
@@ -885,14 +974,14 @@ DEFINE_FUNCTION art_quick_get32_instance
PUSH edx // pass referrer
PUSH ecx // pass object
PUSH eax // pass field_idx
- call SYMBOL(artGet32InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
+ call SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
-END_FUNCTION art_quick_get32_instance
+END_FUNCTION art_quick_get64_instance
-DEFINE_FUNCTION art_quick_get64_instance
+DEFINE_FUNCTION art_quick_set8_static
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
@@ -902,16 +991,16 @@ DEFINE_FUNCTION art_quick_get64_instance
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
- PUSH ecx // pass object
+ PUSH ecx // pass new_val
PUSH eax // pass field_idx
- call SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
+ call SYMBOL(artSet8StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
- RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
-END_FUNCTION art_quick_get64_instance
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set8_static
-DEFINE_FUNCTION art_quick_get_obj_instance
+DEFINE_FUNCTION art_quick_set16_static
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
@@ -921,14 +1010,14 @@ DEFINE_FUNCTION art_quick_get_obj_instance
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
- PUSH ecx // pass object
+ PUSH ecx // pass new_val
PUSH eax // pass field_idx
- call SYMBOL(artGetObjInstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
+ call SYMBOL(artSet16StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
- RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
-END_FUNCTION art_quick_get_obj_instance
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set16_static
DEFINE_FUNCTION art_quick_set32_static
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
@@ -987,53 +1076,13 @@ DEFINE_FUNCTION art_quick_set_obj_static
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set_obj_static
-DEFINE_FUNCTION art_quick_get32_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
- mov %esp, %edx // remember SP
- mov 32(%esp), %ecx // get referrer
- PUSH edx // pass SP
- pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- CFI_ADJUST_CFA_OFFSET(4)
- PUSH ecx // pass referrer
- PUSH eax // pass field_idx
- call SYMBOL(artGet32StaticFromCode) // (field_idx, referrer, Thread*, SP)
- addl LITERAL(16), %esp // pop arguments
- CFI_ADJUST_CFA_OFFSET(-16)
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
- RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
-END_FUNCTION art_quick_get32_static
-
-DEFINE_FUNCTION art_quick_get64_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
- mov %esp, %edx // remember SP
- mov 32(%esp), %ecx // get referrer
- PUSH edx // pass SP
- pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- CFI_ADJUST_CFA_OFFSET(4)
- PUSH ecx // pass referrer
- PUSH eax // pass field_idx
- call SYMBOL(artGet64StaticFromCode) // (field_idx, referrer, Thread*, SP)
- addl LITERAL(16), %esp // pop arguments
- CFI_ADJUST_CFA_OFFSET(-16)
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
- RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
-END_FUNCTION art_quick_get64_static
-
-DEFINE_FUNCTION art_quick_get_obj_static
- SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
- mov %esp, %edx // remember SP
- mov 32(%esp), %ecx // get referrer
- PUSH edx // pass SP
- pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- CFI_ADJUST_CFA_OFFSET(4)
- PUSH ecx // pass referrer
- PUSH eax // pass field_idx
- call SYMBOL(artGetObjStaticFromCode) // (field_idx, referrer, Thread*, SP)
- addl LITERAL(16), %esp // pop arguments
- CFI_ADJUST_CFA_OFFSET(-16)
- RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
- RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
-END_FUNCTION art_quick_get_obj_static
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
DEFINE_FUNCTION art_quick_proxy_invoke_handler
SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save frame and Method*
diff --git a/runtime/arch/x86_64/entrypoints_init_x86_64.cc b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
index 35a0cf4f2c..c9028e1355 100644
--- a/runtime/arch/x86_64/entrypoints_init_x86_64.cc
+++ b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
@@ -48,12 +48,24 @@ extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
extern "C" void* art_quick_resolve_string(void*, uint32_t);
// Field entrypoints.
+extern "C" int art_quick_set8_instance(uint32_t, void*, int8_t);
+extern "C" int art_quick_set8_static(uint32_t, int8_t);
+extern "C" int art_quick_set16_instance(uint32_t, void*, int16_t);
+extern "C" int art_quick_set16_static(uint32_t, int16_t);
extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
extern "C" int art_quick_set32_static(uint32_t, int32_t);
extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
extern "C" int art_quick_set64_static(uint32_t, int64_t);
extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_instance(uint32_t, void*);
+extern "C" uint8_t art_quick_get_boolean_instance(uint32_t, void*);
+extern "C" int8_t art_quick_get_byte_static(uint32_t);
+extern "C" uint8_t art_quick_get_boolean_static(uint32_t);
+extern "C" int16_t art_quick_get_short_instance(uint32_t, void*);
+extern "C" uint16_t art_quick_get_char_instance(uint32_t, void*);
+extern "C" int16_t art_quick_get_short_static(uint32_t);
+extern "C" uint16_t art_quick_get_char_static(uint32_t);
extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
extern "C" int32_t art_quick_get32_static(uint32_t);
extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
@@ -141,15 +153,27 @@ void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
qpoints->pResolveString = art_quick_resolve_string;
// Field
+ qpoints->pSet8Instance = art_quick_set8_instance;
+ qpoints->pSet8Static = art_quick_set8_static;
+ qpoints->pSet16Instance = art_quick_set16_instance;
+ qpoints->pSet16Static = art_quick_set16_static;
qpoints->pSet32Instance = art_quick_set32_instance;
qpoints->pSet32Static = art_quick_set32_static;
qpoints->pSet64Instance = art_quick_set64_instance;
qpoints->pSet64Static = art_quick_set64_static;
qpoints->pSetObjInstance = art_quick_set_obj_instance;
qpoints->pSetObjStatic = art_quick_set_obj_static;
+ qpoints->pGetByteInstance = art_quick_get_byte_instance;
+ qpoints->pGetBooleanInstance = art_quick_get_boolean_instance;
+ qpoints->pGetShortInstance = art_quick_get_short_instance;
+ qpoints->pGetCharInstance = art_quick_get_char_instance;
qpoints->pGet32Instance = art_quick_get32_instance;
qpoints->pGet64Instance = art_quick_get64_instance;
qpoints->pGetObjInstance = art_quick_get_obj_instance;
+ qpoints->pGetByteStatic = art_quick_get_byte_static;
+ qpoints->pGetBooleanStatic = art_quick_get_boolean_static;
+ qpoints->pGetShortStatic = art_quick_get_short_static;
+ qpoints->pGetCharStatic = art_quick_get_char_static;
qpoints->pGet32Static = art_quick_get32_static;
qpoints->pGet64Static = art_quick_get64_static;
qpoints->pGetObjStatic = art_quick_get_obj_static;
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index f95bd22e9f..e9b5a7274b 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1076,17 +1076,29 @@ UNIMPLEMENTED art_quick_lshl
UNIMPLEMENTED art_quick_lshr
UNIMPLEMENTED art_quick_lushr
+THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO
+THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO
THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO
THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_EAX_ZERO
THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO
+TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO
TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO
TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index dcf8f5f42e..9d85fa6e05 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -2078,6 +2078,8 @@ uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
const DexFile::ClassDef& dex_class_def) {
const byte* class_data = dex_file.GetClassData(dex_class_def);
size_t num_ref = 0;
+ size_t num_8 = 0;
+ size_t num_16 = 0;
size_t num_32 = 0;
size_t num_64 = 0;
if (class_data != NULL) {
@@ -2085,16 +2087,33 @@ uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
const DexFile::FieldId& field_id = dex_file.GetFieldId(it.GetMemberIndex());
const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
char c = descriptor[0];
- if (c == 'L' || c == '[') {
- num_ref++;
- } else if (c == 'J' || c == 'D') {
- num_64++;
- } else {
- num_32++;
+ switch (c) {
+ case 'L':
+ case '[':
+ num_ref++;
+ break;
+ case 'J':
+ case 'D':
+ num_64++;
+ break;
+ case 'I':
+ case 'F':
+ num_32++;
+ break;
+ case 'S':
+ case 'C':
+ num_16++;
+ break;
+ case 'B':
+ case 'Z':
+ num_8++;
+ break;
+ default:
+ LOG(FATAL) << "Unknown descriptor: " << c;
}
}
}
- return mirror::Class::ComputeClassSize(false, 0, num_32, num_64, num_ref);
+ return mirror::Class::ComputeClassSize(false, 0, num_8, num_16, num_32, num_64, num_ref);
}
OatFile::OatClass ClassLinker::FindOatClass(const DexFile& dex_file, uint16_t class_def_idx,
@@ -2455,6 +2474,89 @@ void ClassLinker::LinkCode(ConstHandle<mirror::ArtMethod> method,
have_portable_code);
}
+template<int n>
+void ClassLinker::AlignFields(size_t& current_field, const size_t num_fields,
+ MemberOffset& field_offset,
+ mirror::ObjectArray<mirror::ArtField>* fields,
+ std::deque<mirror::ArtField*>& grouped_and_sorted_fields) {
+ if (current_field != num_fields && !IsAligned<n>(field_offset.Uint32Value())) {
+ size_t gap = (n - (field_offset.Uint32Value() & (n - 1)));
+ // Avoid padding unless a field that requires alignment actually exists.
+ bool needs_padding = false;
+ for (size_t i = 0; i < grouped_and_sorted_fields.size(); ++i) {
+ mirror::ArtField* field = grouped_and_sorted_fields[i];
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ CHECK(type != Primitive::kPrimNot) << PrettyField(field); // should be primitive types
+ // Too big to fill the gap.
+ if (Primitive::ComponentSize(type) >= n) {
+ needs_padding = true;
+ continue;
+ }
+ if (needs_padding) {
+ // Shift as many fields as possible to fill the gaps.
+ size_t cursor = i;
+ mirror::ArtField* shift_field;
+ Primitive::Type shift_type;
+ while (cursor < grouped_and_sorted_fields.size() && gap > 0) {
+ // Find field that can current in current gap.
+ do {
+ DCHECK_LT(cursor, grouped_and_sorted_fields.size()) << "Cursor overran fields.";
+ shift_field = grouped_and_sorted_fields[cursor];
+ shift_type = shift_field->GetTypeAsPrimitiveType();
+ CHECK(shift_type != Primitive::kPrimNot) << PrettyField(shift_field);
+ // Can fit.
+ if (Primitive::ComponentSize(shift_type) <= gap) {
+ break;
+ }
+ ++cursor;
+ } while (cursor < grouped_and_sorted_fields.size());
+
+ if (cursor < grouped_and_sorted_fields.size()) {
+ fields->Set<false>(current_field++, shift_field);
+ shift_field->SetOffset(field_offset);
+ field_offset = MemberOffset(field_offset.Uint32Value() +
+ Primitive::ComponentSize(shift_type));
+ gap -= Primitive::ComponentSize(shift_type);
+ grouped_and_sorted_fields.erase(grouped_and_sorted_fields.begin() + cursor);
+ }
+ }
+ }
+ break;
+ }
+ // No further shuffling available, pad whatever space is left.
+ if (needs_padding) {
+ field_offset = MemberOffset(field_offset.Uint32Value() + gap);
+ }
+ DCHECK(!needs_padding || IsAligned<n>(field_offset.Uint32Value())) << "Needed " <<
+ n << " byte alignment, but not aligned after align with offset: " <<
+ field_offset.Uint32Value();
+ }
+}
+
+template<int n>
+void ClassLinker::ShuffleForward(size_t &current_field, const size_t num_fields,
+ MemberOffset& field_offset,
+ mirror::ObjectArray<mirror::ArtField>* fields,
+ std::deque<mirror::ArtField*>& grouped_and_sorted_fields) {
+ while (!grouped_and_sorted_fields.empty() && current_field != num_fields) {
+ mirror::ArtField* field = grouped_and_sorted_fields.front();
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ CHECK(type != Primitive::kPrimNot) << PrettyField(field); // should be primitive types
+ if (Primitive::ComponentSize(type) != n) {
+ DCHECK_LT(Primitive::ComponentSize(type), static_cast<unsigned int>(n)) <<
+ "Encountered a larger field (" << Primitive::ComponentSize(type) << ") " <<
+ "while shuffling fields of size: " << n;
+ // We should've shuffled all field of size n forward by this point.
+ break;
+ }
+ DCHECK(IsAligned<n>(field_offset.Uint32Value()));
+ grouped_and_sorted_fields.pop_front();
+ fields->Set<false>(current_field++, field);
+ field->SetOffset(field_offset);
+ field_offset = MemberOffset(field_offset.Uint32Value() + n);
+ }
+}
+
void ClassLinker::LoadClass(const DexFile& dex_file,
const DexFile::ClassDef& dex_class_def,
ConstHandle<mirror::Class> klass,
@@ -4674,20 +4776,20 @@ struct LinkFieldsComparator {
// No thread safety analysis as will be called from STL. Checked lock held in constructor.
bool operator()(mirror::ArtField* field1, mirror::ArtField* field2)
NO_THREAD_SAFETY_ANALYSIS {
- // First come reference fields, then 64-bit, and finally 32-bit
+ // First come reference fields, then 64-bit, then 32-bit, and then 16-bit, then finally 8-bit.
Primitive::Type type1 = field1->GetTypeAsPrimitiveType();
Primitive::Type type2 = field2->GetTypeAsPrimitiveType();
if (type1 != type2) {
bool is_primitive1 = type1 != Primitive::kPrimNot;
bool is_primitive2 = type2 != Primitive::kPrimNot;
- bool is64bit1 = is_primitive1 && (type1 == Primitive::kPrimLong ||
- type1 == Primitive::kPrimDouble);
- bool is64bit2 = is_primitive2 && (type2 == Primitive::kPrimLong ||
- type2 == Primitive::kPrimDouble);
- int order1 = !is_primitive1 ? 0 : (is64bit1 ? 1 : 2);
- int order2 = !is_primitive2 ? 0 : (is64bit2 ? 1 : 2);
- if (order1 != order2) {
- return order1 < order2;
+ if (type1 != type2) {
+ if (is_primitive1 && is_primitive2) {
+ // Larger primitive types go first.
+ return Primitive::ComponentSize(type1) > Primitive::ComponentSize(type2);
+ } else {
+ // Reference always goes first.
+ return !is_primitive1;
+ }
}
}
// same basic group? then sort by string.
@@ -4709,7 +4811,7 @@ bool ClassLinker::LinkFields(ConstHandle<mirror::Class> klass, bool is_static, s
if (klass->ShouldHaveEmbeddedImtAndVTable()) {
// Static fields come after the embedded tables.
base = mirror::Class::ComputeClassSize(true, klass->GetVTableDuringLinking()->GetLength(),
- 0, 0, 0);
+ 0, 0, 0, 0, 0);
}
field_offset = MemberOffset(base);
} else {
@@ -4726,6 +4828,8 @@ bool ClassLinker::LinkFields(ConstHandle<mirror::Class> klass, bool is_static, s
// we want a relatively stable order so that adding new fields
// minimizes disruption of C++ version such as Class and Method.
std::deque<mirror::ArtField*> grouped_and_sorted_fields;
+ const char* old_no_suspend_cause = Thread::Current()->StartAssertNoThreadSuspension(
+ "Naked ArtField references in deque");
for (size_t i = 0; i < num_fields; i++) {
mirror::ArtField* f = fields->Get(i);
CHECK(f != NULL) << PrettyClass(klass.Get());
@@ -4734,7 +4838,7 @@ bool ClassLinker::LinkFields(ConstHandle<mirror::Class> klass, bool is_static, s
std::sort(grouped_and_sorted_fields.begin(), grouped_and_sorted_fields.end(),
LinkFieldsComparator());
- // References should be at the front.
+ // References should be at the front, unless we need to pad.
size_t current_field = 0;
size_t num_reference_fields = 0;
for (; current_field < num_fields; current_field++) {
@@ -4751,44 +4855,21 @@ bool ClassLinker::LinkFields(ConstHandle<mirror::Class> klass, bool is_static, s
field_offset = MemberOffset(field_offset.Uint32Value() + sizeof(uint32_t));
}
- // Now we want to pack all of the double-wide fields together. If
- // we're not aligned, though, we want to shuffle one 32-bit field
- // into place. If we can't find one, we'll have to pad it.
- if (current_field != num_fields && !IsAligned<8>(field_offset.Uint32Value())) {
- for (size_t i = 0; i < grouped_and_sorted_fields.size(); i++) {
- mirror::ArtField* field = grouped_and_sorted_fields[i];
- Primitive::Type type = field->GetTypeAsPrimitiveType();
- CHECK(type != Primitive::kPrimNot) << PrettyField(field); // should be primitive types
- if (type == Primitive::kPrimLong || type == Primitive::kPrimDouble) {
- continue;
- }
- fields->Set<false>(current_field++, field);
- field->SetOffset(field_offset);
- // drop the consumed field
- grouped_and_sorted_fields.erase(grouped_and_sorted_fields.begin() + i);
- break;
- }
- // whether we found a 32-bit field for padding or not, we advance
- field_offset = MemberOffset(field_offset.Uint32Value() + sizeof(uint32_t));
- }
+ AlignFields<8>(current_field, num_fields, field_offset, fields, grouped_and_sorted_fields);
+ ShuffleForward<8>(current_field, num_fields, field_offset, fields, grouped_and_sorted_fields);
+ // No need for further alignment, start of object is 4-byte aligned.
+ ShuffleForward<4>(current_field, num_fields, field_offset, fields, grouped_and_sorted_fields);
+ ShuffleForward<2>(current_field, num_fields, field_offset, fields, grouped_and_sorted_fields);
+ ShuffleForward<1>(current_field, num_fields, field_offset, fields, grouped_and_sorted_fields);
+ CHECK(grouped_and_sorted_fields.empty()) << "Missed " << grouped_and_sorted_fields.size() <<
+ " fields.";
- // Alignment is good, shuffle any double-wide fields forward, and
- // finish assigning field offsets to all fields.
- DCHECK(current_field == num_fields || IsAligned<8>(field_offset.Uint32Value()))
- << PrettyClass(klass.Get());
- while (!grouped_and_sorted_fields.empty()) {
- mirror::ArtField* field = grouped_and_sorted_fields.front();
- grouped_and_sorted_fields.pop_front();
- Primitive::Type type = field->GetTypeAsPrimitiveType();
- CHECK(type != Primitive::kPrimNot) << PrettyField(field); // should be primitive types
- fields->Set<false>(current_field, field);
- field->SetOffset(field_offset);
- field_offset = MemberOffset(field_offset.Uint32Value() +
- ((type == Primitive::kPrimLong || type == Primitive::kPrimDouble)
- ? sizeof(uint64_t)
- : sizeof(uint32_t)));
- current_field++;
+ // Subclass expects superclass to be 4 byte aligned at end.
+ if (!IsAligned<4>(field_offset.Uint32Value())) {
+ field_offset = MemberOffset(RoundUp(field_offset.Uint32Value(), 4));
}
+ CHECK(IsAligned<4>(field_offset.Uint32Value()));
+ Thread::Current()->EndAssertNoThreadSuspension(old_no_suspend_cause);
// We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
if (!is_static && klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 14a9e4ad1b..67a7b2356d 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -17,6 +17,7 @@
#ifndef ART_RUNTIME_CLASS_LINKER_H_
#define ART_RUNTIME_CLASS_LINKER_H_
+#include <deque>
#include <string>
#include <utility>
#include <vector>
@@ -531,6 +532,18 @@ class ClassLinker {
void LinkCode(ConstHandle<mirror::ArtMethod> method, const OatFile::OatClass* oat_class,
const DexFile& dex_file, uint32_t dex_method_index, uint32_t method_index)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ template<int n>
+ void AlignFields(size_t& current_field, const size_t num_fields,
+ MemberOffset& field_offset,
+ mirror::ObjectArray<mirror::ArtField>* fields,
+ std::deque<mirror::ArtField*>& grouped_and_sorted_fields)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ template<int n>
+ void ShuffleForward(size_t& current_field, const size_t num_fields,
+ MemberOffset& field_offset,
+ mirror::ObjectArray<mirror::ArtField>* fields,
+ std::deque<mirror::ArtField*>& grouped_and_sorted_fields)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void CreateReferenceInstanceOffsets(ConstHandle<mirror::Class> klass)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/common_throws.cc b/runtime/common_throws.cc
index bb48be30e4..846216c52d 100644
--- a/runtime/common_throws.cc
+++ b/runtime/common_throws.cc
@@ -449,6 +449,10 @@ void ThrowNullPointerExceptionFromDexPC(const ThrowLocation& throw_location) {
break;
}
case Instruction::IPUT_QUICK:
+ case Instruction::IPUT_BOOLEAN_QUICK:
+ case Instruction::IPUT_BYTE_QUICK:
+ case Instruction::IPUT_CHAR_QUICK:
+ case Instruction::IPUT_SHORT_QUICK:
case Instruction::IPUT_WIDE_QUICK:
case Instruction::IPUT_OBJECT_QUICK: {
// Since we replaced the field index, we ask the verifier to tell us which
diff --git a/runtime/dex_instruction_list.h b/runtime/dex_instruction_list.h
index 103b0d74ef..64c9185c87 100644
--- a/runtime/dex_instruction_list.h
+++ b/runtime/dex_instruction_list.h
@@ -253,10 +253,10 @@
V(0xE8, IPUT_OBJECT_QUICK, "iput-object-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
V(0xE9, INVOKE_VIRTUAL_QUICK, "invoke-virtual-quick", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyVarArgNonZero | kVerifyRuntimeOnly) \
V(0xEA, INVOKE_VIRTUAL_RANGE_QUICK, "invoke-virtual/range-quick", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyVarArgRangeNonZero | kVerifyRuntimeOnly) \
- V(0xEB, UNUSED_EB, "unused-eb", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xEC, UNUSED_EC, "unused-ec", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xED, UNUSED_ED, "unused-ed", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xEE, UNUSED_EE, "unused-ee", k10x, false, kUnknown, 0, kVerifyError) \
+ V(0xEB, IPUT_BOOLEAN_QUICK, "iput-boolean-quick", k22c, false, kUnknown, 0, kVerifyError) \
+ V(0xEC, IPUT_BYTE_QUICK, "iput-byte-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xED, IPUT_CHAR_QUICK, "iput-char-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xEE, IPUT_SHORT_QUICK, "iput-short-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
V(0xEF, UNUSED_EF, "unused-ef", k10x, false, kUnknown, 0, kVerifyError) \
V(0xF0, UNUSED_F0, "unused-f0", k10x, false, kUnknown, 0, kVerifyError) \
V(0xF1, UNUSED_F1, "unused-f1", k10x, false, kUnknown, 0, kVerifyError) \
diff --git a/runtime/entrypoints/quick/quick_entrypoints_list.h b/runtime/entrypoints/quick/quick_entrypoints_list.h
index f858743314..fbc7913d06 100644
--- a/runtime/entrypoints/quick/quick_entrypoints_list.h
+++ b/runtime/entrypoints/quick/quick_entrypoints_list.h
@@ -38,12 +38,24 @@
V(InitializeType, void*, uint32_t, void*) \
V(ResolveString, void*, void*, uint32_t) \
\
+ V(Set8Instance, int, uint32_t, void*, int8_t) \
+ V(Set8Static, int, uint32_t, int8_t) \
+ V(Set16Instance, int, uint32_t, void*, int16_t) \
+ V(Set16Static, int, uint32_t, int16_t) \
V(Set32Instance, int, uint32_t, void*, int32_t) \
V(Set32Static, int, uint32_t, int32_t) \
V(Set64Instance, int, uint32_t, void*, int64_t) \
V(Set64Static, int, uint32_t, int64_t) \
V(SetObjInstance, int, uint32_t, void*, void*) \
V(SetObjStatic, int, uint32_t, void*) \
+ V(GetByteInstance, int8_t, uint32_t, void*) \
+ V(GetBooleanInstance, uint8_t, uint32_t, void*) \
+ V(GetByteStatic, int8_t, uint32_t) \
+ V(GetBooleanStatic, uint8_t, uint32_t) \
+ V(GetShortInstance, int16_t, uint32_t, void*) \
+ V(GetCharInstance, uint16_t, uint32_t, void*) \
+ V(GetShortStatic, int16_t, uint32_t) \
+ V(GetCharStatic, uint16_t, uint32_t) \
V(Get32Instance, int32_t, uint32_t, void*) \
V(Get32Static, int32_t, uint32_t) \
V(Get64Instance, int64_t, uint32_t, void*) \
diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc
index cd1e2470cf..b89c015cd6 100644
--- a/runtime/entrypoints/quick/quick_field_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc
@@ -25,6 +25,74 @@
namespace art {
+extern "C" int8_t artGetByteStaticFromCode(uint32_t field_idx,
+ mirror::ArtMethod* referrer,
+ Thread* self, StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetByte(field->GetDeclaringClass());
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetByte(field->GetDeclaringClass());
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
+extern "C" uint8_t artGetBooleanStaticFromCode(uint32_t field_idx,
+ mirror::ArtMethod* referrer,
+ Thread* self, StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetBoolean(field->GetDeclaringClass());
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetBoolean(field->GetDeclaringClass());
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
+extern "C" int16_t artGetShortStaticFromCode(uint32_t field_idx,
+ mirror::ArtMethod* referrer,
+ Thread* self, StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetShort(field->GetDeclaringClass());
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetShort(field->GetDeclaringClass());
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
+extern "C" uint16_t artGetCharStaticFromCode(uint32_t field_idx,
+ mirror::ArtMethod* referrer,
+ Thread* self, StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveRead,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetChar(field->GetDeclaringClass());
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ return field->GetChar(field->GetDeclaringClass());
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
extern "C" uint32_t artGet32StaticFromCode(uint32_t field_idx,
mirror::ArtMethod* referrer,
Thread* self, StackReference<mirror::ArtMethod>* sp)
@@ -78,6 +146,97 @@ extern "C" mirror::Object* artGetObjStaticFromCode(uint32_t field_idx,
return NULL; // Will throw exception by checking with Thread::Current
}
+extern "C" int8_t artGetByteInstanceFromCode(uint32_t field_idx, mirror::Object* obj,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL && obj != NULL)) {
+ return field->GetByte(obj);
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<InstancePrimitiveRead, true>(field_idx, referrer, self,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ if (UNLIKELY(obj == NULL)) {
+ ThrowLocation throw_location = self->GetCurrentLocationForThrow();
+ ThrowNullPointerExceptionForFieldAccess(throw_location, field, true);
+ } else {
+ return field->GetByte(obj);
+ }
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
+extern "C" uint8_t artGetBooleanInstanceFromCode(uint32_t field_idx, mirror::Object* obj,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL && obj != NULL)) {
+ return field->GetBoolean(obj);
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<InstancePrimitiveRead, true>(field_idx, referrer, self,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ if (UNLIKELY(obj == NULL)) {
+ ThrowLocation throw_location = self->GetCurrentLocationForThrow();
+ ThrowNullPointerExceptionForFieldAccess(throw_location, field, true);
+ } else {
+ return field->GetBoolean(obj);
+ }
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+extern "C" int16_t artGetShortInstanceFromCode(uint32_t field_idx, mirror::Object* obj,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL && obj != NULL)) {
+ return field->GetShort(obj);
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<InstancePrimitiveRead, true>(field_idx, referrer, self,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ if (UNLIKELY(obj == NULL)) {
+ ThrowLocation throw_location = self->GetCurrentLocationForThrow();
+ ThrowNullPointerExceptionForFieldAccess(throw_location, field, true);
+ } else {
+ return field->GetShort(obj);
+ }
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
+extern "C" uint16_t artGetCharInstanceFromCode(uint32_t field_idx, mirror::Object* obj,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveRead,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL && obj != NULL)) {
+ return field->GetChar(obj);
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<InstancePrimitiveRead, true>(field_idx, referrer, self,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ if (UNLIKELY(obj == NULL)) {
+ ThrowLocation throw_location = self->GetCurrentLocationForThrow();
+ ThrowNullPointerExceptionForFieldAccess(throw_location, field, true);
+ } else {
+ return field->GetChar(obj);
+ }
+ }
+ return 0; // Will throw exception by checking with Thread::Current
+}
+
extern "C" uint32_t artGet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj,
mirror::ArtMethod* referrer, Thread* self,
StackReference<mirror::ArtMethod>* sp)
@@ -148,6 +307,72 @@ extern "C" mirror::Object* artGetObjInstanceFromCode(uint32_t field_idx, mirror:
return NULL; // Will throw exception by checking with Thread::Current
}
+extern "C" int artSet8StaticFromCode(uint32_t field_idx, uint32_t new_value,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimBoolean) {
+ field->SetBoolean<false>(field->GetDeclaringClass(), new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimByte, type);
+ field->SetByte<false>(field->GetDeclaringClass(), new_value);
+ }
+ return 0; // success
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<StaticPrimitiveWrite, true>(field_idx, referrer, self, sizeof(int8_t));
+ if (LIKELY(field != NULL)) {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimBoolean) {
+ field->SetBoolean<false>(field->GetDeclaringClass(), new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimByte, type);
+ field->SetByte<false>(field->GetDeclaringClass(), new_value);
+ }
+ return 0; // success
+ }
+ return -1; // failure
+}
+
+extern "C" int artSet16StaticFromCode(uint32_t field_idx, uint16_t new_value,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, StaticPrimitiveWrite,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimChar) {
+ field->SetChar<false>(field->GetDeclaringClass(), new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimShort, type);
+ field->SetShort<false>(field->GetDeclaringClass(), new_value);
+ }
+ return 0; // success
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ field = FindFieldFromCode<StaticPrimitiveWrite, true>(field_idx, referrer, self, sizeof(int16_t));
+ if (LIKELY(field != NULL)) {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimChar) {
+ field->SetChar<false>(field->GetDeclaringClass(), new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimShort, type);
+ field->SetShort<false>(field->GetDeclaringClass(), new_value);
+ }
+ return 0; // success
+ }
+ return -1; // failure
+}
+
extern "C" int artSet32StaticFromCode(uint32_t field_idx, uint32_t new_value,
mirror::ArtMethod* referrer, Thread* self,
StackReference<mirror::ArtMethod>* sp)
@@ -214,6 +439,91 @@ extern "C" int artSetObjStaticFromCode(uint32_t field_idx, mirror::Object* new_v
return -1; // failure
}
+extern "C" int artSet8InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint8_t new_value,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite,
+ sizeof(int8_t));
+ if (LIKELY(field != NULL && obj != NULL)) {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimBoolean) {
+ field->SetBoolean<false>(obj, new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimByte, type);
+ field->SetByte<false>(obj, new_value);
+ }
+ return 0; // success
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ {
+ StackHandleScope<1> hs(self);
+ HandleWrapper<mirror::Object> h_obj(hs.NewHandleWrapper(&obj));
+ field = FindFieldFromCode<InstancePrimitiveWrite, true>(field_idx, referrer, self,
+ sizeof(int8_t));
+ }
+ if (LIKELY(field != NULL)) {
+ if (UNLIKELY(obj == NULL)) {
+ ThrowLocation throw_location = self->GetCurrentLocationForThrow();
+ ThrowNullPointerExceptionForFieldAccess(throw_location, field, false);
+ } else {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimBoolean) {
+ field->SetBoolean<false>(obj, new_value);
+ } else {
+ field->SetByte<false>(obj, new_value);
+ }
+ return 0; // success
+ }
+ }
+ return -1; // failure
+}
+
+extern "C" int artSet16InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint16_t new_value,
+ mirror::ArtMethod* referrer, Thread* self,
+ StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = FindFieldFast(field_idx, referrer, InstancePrimitiveWrite,
+ sizeof(int16_t));
+ if (LIKELY(field != NULL && obj != NULL)) {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimChar) {
+ field->SetChar<false>(obj, new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimShort, type);
+ field->SetShort<false>(obj, new_value);
+ }
+ return 0; // success
+ }
+ FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly);
+ {
+ StackHandleScope<1> hs(self);
+ HandleWrapper<mirror::Object> h_obj(hs.NewHandleWrapper(&obj));
+ field = FindFieldFromCode<InstancePrimitiveWrite, true>(field_idx, referrer, self,
+ sizeof(int16_t));
+ }
+ if (LIKELY(field != NULL)) {
+ if (UNLIKELY(obj == NULL)) {
+ ThrowLocation throw_location = self->GetCurrentLocationForThrow();
+ ThrowNullPointerExceptionForFieldAccess(throw_location, field, false);
+ } else {
+ Primitive::Type type = field->GetTypeAsPrimitiveType();
+ // Compiled code can't use transactional mode.
+ if (type == Primitive::kPrimChar) {
+ field->SetChar<false>(obj, new_value);
+ } else {
+ DCHECK_EQ(Primitive::kPrimShort, type);
+ field->SetShort<false>(obj, new_value);
+ }
+ return 0; // success
+ }
+ }
+ return -1; // failure
+}
+
extern "C" int artSet32InstanceFromCode(uint32_t field_idx, mirror::Object* obj, uint32_t new_value,
mirror::ArtMethod* referrer, Thread* self,
StackReference<mirror::ArtMethod>* sp)
diff --git a/runtime/entrypoints_order_test.cc b/runtime/entrypoints_order_test.cc
index f572d27c39..a3014b3a9b 100644
--- a/runtime/entrypoints_order_test.cc
+++ b/runtime/entrypoints_order_test.cc
@@ -185,13 +185,25 @@ class EntrypointsOrderTest : public CommonRuntimeTest {
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pInitializeTypeAndVerifyAccess, pInitializeType,
kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pInitializeType, pResolveString, kPointerSize);
- EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pResolveString, pSet32Instance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pResolveString, pSet8Instance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet8Instance, pSet8Static, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet8Static, pSet16Instance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet16Instance, pSet16Static, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet16Static, pSet32Instance, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet32Instance, pSet32Static, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet32Static, pSet64Instance, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet64Instance, pSet64Static, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSet64Static, pSetObjInstance, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSetObjInstance, pSetObjStatic, kPointerSize);
- EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSetObjStatic, pGet32Instance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pSetObjStatic, pGetByteInstance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetByteInstance, pGetBooleanInstance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetBooleanInstance, pGetByteStatic, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetByteStatic, pGetBooleanStatic, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetBooleanStatic, pGetShortInstance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetShortInstance, pGetCharInstance, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetCharInstance, pGetShortStatic, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetShortStatic, pGetCharStatic, kPointerSize);
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGetCharStatic, pGet32Instance, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGet32Instance, pGet32Static, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGet32Static, pGet64Instance, kPointerSize);
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pGet64Instance, pGet64Static, kPointerSize);
diff --git a/runtime/interpreter/interpreter_common.cc b/runtime/interpreter/interpreter_common.cc
index 74fa981d56..5724e3519b 100644
--- a/runtime/interpreter/interpreter_common.cc
+++ b/runtime/interpreter/interpreter_common.cc
@@ -32,7 +32,7 @@ bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst
const bool is_static = (find_type == StaticObjectRead) || (find_type == StaticPrimitiveRead);
const uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c();
ArtField* f = FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self,
- Primitive::FieldSize(field_type));
+ Primitive::ComponentSize(field_type));
if (UNLIKELY(f == nullptr)) {
CHECK(self->IsExceptionPending());
return false;
@@ -208,7 +208,7 @@ bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame, const Instruction
bool is_static = (find_type == StaticObjectWrite) || (find_type == StaticPrimitiveWrite);
uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c();
ArtField* f = FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self,
- Primitive::FieldSize(field_type));
+ Primitive::ComponentSize(field_type));
if (UNLIKELY(f == nullptr)) {
CHECK(self->IsExceptionPending());
return false;
@@ -346,6 +346,18 @@ bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint1
}
// Note: iput-x-quick instructions are only for non-volatile fields.
switch (field_type) {
+ case Primitive::kPrimBoolean:
+ obj->SetFieldBoolean<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
+ break;
+ case Primitive::kPrimByte:
+ obj->SetFieldByte<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
+ break;
+ case Primitive::kPrimChar:
+ obj->SetFieldChar<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
+ break;
+ case Primitive::kPrimShort:
+ obj->SetFieldShort<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
+ break;
case Primitive::kPrimInt:
obj->SetField32<transaction_active>(field_offset, shadow_frame.GetVReg(vregA));
break;
@@ -371,9 +383,13 @@ bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint1
EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, false); \
EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, true);
-EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimInt); // iget-quick.
-EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimLong); // iget-wide-quick.
-EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimNot); // iget-object-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimInt); // iput-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimBoolean); // iput-boolean-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimByte); // iput-byte-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimChar); // iput-char-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimShort); // iput-short-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimLong); // iput-wide-quick.
+EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimNot); // iput-object-quick.
#undef EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL
#undef EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL
diff --git a/runtime/interpreter/interpreter_goto_table_impl.cc b/runtime/interpreter/interpreter_goto_table_impl.cc
index e098ac86ed..755e1ed814 100644
--- a/runtime/interpreter/interpreter_goto_table_impl.cc
+++ b/runtime/interpreter/interpreter_goto_table_impl.cc
@@ -1369,6 +1369,30 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
}
HANDLE_INSTRUCTION_END();
+ HANDLE_INSTRUCTION_START(IPUT_BOOLEAN_QUICK) {
+ bool success = DoIPutQuick<Primitive::kPrimBoolean, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, 2);
+ }
+ HANDLE_INSTRUCTION_END();
+
+ HANDLE_INSTRUCTION_START(IPUT_BYTE_QUICK) {
+ bool success = DoIPutQuick<Primitive::kPrimByte, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, 2);
+ }
+ HANDLE_INSTRUCTION_END();
+
+ HANDLE_INSTRUCTION_START(IPUT_CHAR_QUICK) {
+ bool success = DoIPutQuick<Primitive::kPrimChar, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, 2);
+ }
+ HANDLE_INSTRUCTION_END();
+
+ HANDLE_INSTRUCTION_START(IPUT_SHORT_QUICK) {
+ bool success = DoIPutQuick<Primitive::kPrimShort, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, 2);
+ }
+ HANDLE_INSTRUCTION_END();
+
HANDLE_INSTRUCTION_START(IPUT_WIDE_QUICK) {
bool success = DoIPutQuick<Primitive::kPrimLong, transaction_active>(shadow_frame, inst, inst_data);
POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, 2);
@@ -2304,22 +2328,6 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
UnexpectedOpcode(inst, mh);
HANDLE_INSTRUCTION_END();
- HANDLE_INSTRUCTION_START(UNUSED_EB)
- UnexpectedOpcode(inst, mh);
- HANDLE_INSTRUCTION_END();
-
- HANDLE_INSTRUCTION_START(UNUSED_EC)
- UnexpectedOpcode(inst, mh);
- HANDLE_INSTRUCTION_END();
-
- HANDLE_INSTRUCTION_START(UNUSED_ED)
- UnexpectedOpcode(inst, mh);
- HANDLE_INSTRUCTION_END();
-
- HANDLE_INSTRUCTION_START(UNUSED_EE)
- UnexpectedOpcode(inst, mh);
- HANDLE_INSTRUCTION_END();
-
HANDLE_INSTRUCTION_START(UNUSED_EF)
UnexpectedOpcode(inst, mh);
HANDLE_INSTRUCTION_END();
diff --git a/runtime/interpreter/interpreter_switch_impl.cc b/runtime/interpreter/interpreter_switch_impl.cc
index 5401495155..6054a2531d 100644
--- a/runtime/interpreter/interpreter_switch_impl.cc
+++ b/runtime/interpreter/interpreter_switch_impl.cc
@@ -1266,6 +1266,30 @@ JValue ExecuteSwitchImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem
POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
break;
}
+ case Instruction::IPUT_BOOLEAN_QUICK: {
+ PREAMBLE();
+ bool success = DoIPutQuick<Primitive::kPrimBoolean, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
+ break;
+ }
+ case Instruction::IPUT_BYTE_QUICK: {
+ PREAMBLE();
+ bool success = DoIPutQuick<Primitive::kPrimByte, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
+ break;
+ }
+ case Instruction::IPUT_CHAR_QUICK: {
+ PREAMBLE();
+ bool success = DoIPutQuick<Primitive::kPrimChar, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
+ break;
+ }
+ case Instruction::IPUT_SHORT_QUICK: {
+ PREAMBLE();
+ bool success = DoIPutQuick<Primitive::kPrimShort, transaction_active>(shadow_frame, inst, inst_data);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_2xx);
+ break;
+ }
case Instruction::IPUT_WIDE_QUICK: {
PREAMBLE();
bool success = DoIPutQuick<Primitive::kPrimLong, transaction_active>(shadow_frame, inst, inst_data);
@@ -2164,7 +2188,7 @@ JValue ExecuteSwitchImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem
inst = inst->Next_2xx();
break;
case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
- case Instruction::UNUSED_EB ... Instruction::UNUSED_FF:
+ case Instruction::UNUSED_EF ... Instruction::UNUSED_FF:
case Instruction::UNUSED_79:
case Instruction::UNUSED_7A:
UnexpectedOpcode(inst, mh);
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 2c0ea367cc..213dbc20e9 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -29,7 +29,7 @@ namespace mirror {
inline uint32_t Array::ClassSize() {
uint32_t vtable_entries = Object::kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
diff --git a/runtime/mirror/art_field-inl.h b/runtime/mirror/art_field-inl.h
index 00bed92cb3..d37fa41d3e 100644
--- a/runtime/mirror/art_field-inl.h
+++ b/runtime/mirror/art_field-inl.h
@@ -31,7 +31,7 @@ namespace mirror {
inline uint32_t ArtField::ClassSize() {
uint32_t vtable_entries = Object::kVTableLength + 6;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
}
inline Class* ArtField::GetDeclaringClass() {
@@ -120,50 +120,64 @@ inline void ArtField::SetObj(Object* object, Object* new_value) {
}
}
-inline bool ArtField::GetBoolean(Object* object) {
- DCHECK_EQ(Primitive::kPrimBoolean, GetTypeAsPrimitiveType()) << PrettyField(this);
- return Get32(object);
+#define FIELD_GET(object, type) \
+ DCHECK_EQ(Primitive::kPrim ## type, GetTypeAsPrimitiveType()) << PrettyField(this); \
+ DCHECK(object != nullptr) << PrettyField(this); \
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted()); \
+ if (UNLIKELY(IsVolatile())) { \
+ return object->GetField ## type ## Volatile(GetOffset()); \
+ } \
+ return object->GetField ## type(GetOffset());
+
+#define FIELD_SET(object, type, value) \
+ DCHECK_EQ(Primitive::kPrim ## type, GetTypeAsPrimitiveType()) << PrettyField(this); \
+ DCHECK(object != nullptr) << PrettyField(this); \
+ DCHECK(!IsStatic() || (object == GetDeclaringClass()) || !Runtime::Current()->IsStarted()); \
+ if (UNLIKELY(IsVolatile())) { \
+ object->SetField ## type ## Volatile<kTransactionActive>(GetOffset(), value); \
+ } else { \
+ object->SetField ## type<kTransactionActive>(GetOffset(), value); \
+ }
+
+inline uint8_t ArtField::GetBoolean(Object* object) {
+ FIELD_GET(object, Boolean);
}
template<bool kTransactionActive>
-inline void ArtField::SetBoolean(Object* object, bool z) {
- DCHECK_EQ(Primitive::kPrimBoolean, GetTypeAsPrimitiveType()) << PrettyField(this);
- Set32<kTransactionActive>(object, z);
+inline void ArtField::SetBoolean(Object* object, uint8_t z) {
+ FIELD_SET(object, Boolean, z);
}
inline int8_t ArtField::GetByte(Object* object) {
- DCHECK_EQ(Primitive::kPrimByte, GetTypeAsPrimitiveType()) << PrettyField(this);
- return Get32(object);
+ FIELD_GET(object, Byte);
}
template<bool kTransactionActive>
inline void ArtField::SetByte(Object* object, int8_t b) {
- DCHECK_EQ(Primitive::kPrimByte, GetTypeAsPrimitiveType()) << PrettyField(this);
- Set32<kTransactionActive>(object, b);
+ FIELD_SET(object, Byte, b);
}
inline uint16_t ArtField::GetChar(Object* object) {
- DCHECK_EQ(Primitive::kPrimChar, GetTypeAsPrimitiveType()) << PrettyField(this);
- return Get32(object);
+ FIELD_GET(object, Char);
}
template<bool kTransactionActive>
inline void ArtField::SetChar(Object* object, uint16_t c) {
- DCHECK_EQ(Primitive::kPrimChar, GetTypeAsPrimitiveType()) << PrettyField(this);
- Set32<kTransactionActive>(object, c);
+ FIELD_SET(object, Char, c);
}
inline int16_t ArtField::GetShort(Object* object) {
- DCHECK_EQ(Primitive::kPrimShort, GetTypeAsPrimitiveType()) << PrettyField(this);
- return Get32(object);
+ FIELD_GET(object, Short);
}
template<bool kTransactionActive>
inline void ArtField::SetShort(Object* object, int16_t s) {
- DCHECK_EQ(Primitive::kPrimShort, GetTypeAsPrimitiveType()) << PrettyField(this);
- Set32<kTransactionActive>(object, s);
+ FIELD_SET(object, Short, s);
}
+#undef FIELD_GET
+#undef FIELD_SET
+
inline int32_t ArtField::GetInt(Object* object) {
if (kIsDebugBuild) {
Primitive::Type type = GetTypeAsPrimitiveType();
@@ -273,7 +287,7 @@ inline bool ArtField::IsPrimitiveType() SHARED_LOCKS_REQUIRED(Locks::mutator_loc
}
inline size_t ArtField::FieldSize() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- return Primitive::FieldSize(GetTypeAsPrimitiveType());
+ return Primitive::ComponentSize(GetTypeAsPrimitiveType());
}
inline mirror::DexCache* ArtField::GetDexCache() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
diff --git a/runtime/mirror/art_field.h b/runtime/mirror/art_field.h
index f3dfa15004..885bcb06ad 100644
--- a/runtime/mirror/art_field.h
+++ b/runtime/mirror/art_field.h
@@ -95,9 +95,9 @@ class MANAGED ArtField FINAL : public Object {
void SetOffset(MemberOffset num_bytes) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// field access, null object for static fields
- bool GetBoolean(Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ uint8_t GetBoolean(Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive>
- void SetBoolean(Object* object, bool z) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void SetBoolean(Object* object, uint8_t z) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
int8_t GetByte(Object* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template<bool kTransactionActive>
void SetByte(Object* object, int8_t b) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h
index 06700e6d51..98ca64b735 100644
--- a/runtime/mirror/art_method-inl.h
+++ b/runtime/mirror/art_method-inl.h
@@ -38,7 +38,7 @@ namespace mirror {
inline uint32_t ArtMethod::ClassSize() {
uint32_t vtable_entries = Object::kVTableLength + 8;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
}
template<ReadBarrierOption kReadBarrierOption>
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index b0ff7eaa42..52dd0eed35 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -556,6 +556,8 @@ inline Object* Class::AllocNonMovableObject(Thread* self) {
inline uint32_t Class::ComputeClassSize(bool has_embedded_tables,
uint32_t num_vtable_entries,
+ uint32_t num_8bit_static_fields,
+ uint32_t num_16bit_static_fields,
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
uint32_t num_ref_static_fields) {
@@ -569,19 +571,39 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_tables,
sizeof(int32_t) /* vtable len */ +
embedded_vtable_size;
}
+
// Space used by reference statics.
size += num_ref_static_fields * sizeof(HeapReference<Object>);
- // Possible pad for alignment.
- if (((size & 7) != 0) && (num_64bit_static_fields > 0)) {
- size += sizeof(uint32_t);
- if (num_32bit_static_fields != 0) {
- // Shuffle one 32 bit static field forward.
- num_32bit_static_fields--;
+ if (!IsAligned<8>(size) && num_64bit_static_fields > 0) {
+ uint32_t gap = 8 - (size & 0x7);
+ size += gap; // will be padded
+ // Shuffle 4-byte fields forward.
+ while (gap >= sizeof(uint32_t) && num_32bit_static_fields != 0) {
+ --num_32bit_static_fields;
+ gap -= sizeof(uint32_t);
+ }
+ // Shuffle 2-byte fields forward.
+ while (gap >= sizeof(uint16_t) && num_16bit_static_fields != 0) {
+ --num_16bit_static_fields;
+ gap -= sizeof(uint16_t);
+ }
+ // Shuffle byte fields forward.
+ while (gap >= sizeof(uint8_t) && num_8bit_static_fields != 0) {
+ --num_8bit_static_fields;
+ gap -= sizeof(uint8_t);
}
}
+ // Guaranteed to be at least 4 byte aligned. No need for further alignments.
// Space used for primitive static fields.
- size += (num_32bit_static_fields * sizeof(uint32_t)) +
+ size += (num_8bit_static_fields * sizeof(uint8_t)) +
+ (num_16bit_static_fields * sizeof(uint16_t)) +
+ (num_32bit_static_fields * sizeof(uint32_t)) +
(num_64bit_static_fields * sizeof(uint64_t));
+ // For now, the start of of subclass expects to be 4-byte aligned, pad end of object to ensure
+ // alignment.
+ if (!IsAligned<4>(size)) {
+ size = RoundUp(size, 4);
+ }
return size;
}
@@ -705,11 +727,11 @@ inline MemberOffset Class::GetSlowPathFlagOffset() {
}
inline bool Class::GetSlowPathEnabled() {
- return GetField32(GetSlowPathFlagOffset());
+ return GetFieldBoolean(GetSlowPathFlagOffset());
}
inline void Class::SetSlowPath(bool enabled) {
- SetField32<false>(GetSlowPathFlagOffset(), enabled);
+ SetFieldBoolean<false>(GetSlowPathFlagOffset(), enabled);
}
inline void Class::InitializeClassVisitor::operator()(
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 81fbcab96e..0d30bc68a3 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -502,6 +502,8 @@ class MANAGED Class FINAL : public Object {
// Compute how many bytes would be used a class with the given elements.
static uint32_t ComputeClassSize(bool has_embedded_tables,
uint32_t num_vtable_entries,
+ uint32_t num_8bit_static_fields,
+ uint32_t num_16bit_static_fields,
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
uint32_t num_ref_static_fields);
@@ -510,12 +512,12 @@ class MANAGED Class FINAL : public Object {
static uint32_t ClassClassSize() {
// The number of vtable entries in java.lang.Class.
uint32_t vtable_entries = Object::kVTableLength + 64;
- return ComputeClassSize(true, vtable_entries, 0, 1, 0);
+ return ComputeClassSize(true, vtable_entries, 0, 0, 0, 1, 0);
}
// The size of a java.lang.Class representing a primitive such as int.class.
static uint32_t PrimitiveClassSize() {
- return ComputeClassSize(false, 0, 0, 0, 0);
+ return ComputeClassSize(false, 0, 0, 0, 0, 0, 0);
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index d3fcb550c6..288e88ecb2 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -28,7 +28,7 @@ namespace mirror {
inline uint32_t DexCache::ClassSize() {
uint32_t vtable_entries = Object::kVTableLength + 1;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
}
inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx)
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 9dbfb56c79..8c1dc7d22c 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -37,7 +37,7 @@ namespace mirror {
inline uint32_t Object::ClassSize() {
uint32_t vtable_entries = kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0);
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
@@ -408,17 +408,157 @@ inline size_t Object::SizeOf() {
}
template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
-inline int32_t Object::GetField32(MemberOffset field_offset) {
+inline uint8_t Object::GetFieldBoolean(MemberOffset field_offset) {
if (kVerifyFlags & kVerifyThis) {
VerifyObject(this);
}
- const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value();
- const int32_t* word_addr = reinterpret_cast<const int32_t*>(raw_addr);
- if (UNLIKELY(kIsVolatile)) {
- return reinterpret_cast<const Atomic<int32_t>*>(word_addr)->LoadSequentiallyConsistent();
- } else {
- return reinterpret_cast<const Atomic<int32_t>*>(word_addr)->LoadJavaData();
+ return GetField<uint8_t, kIsVolatile>(field_offset);
+}
+
+template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
+inline int8_t Object::GetFieldByte(MemberOffset field_offset) {
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ return GetField<int8_t, kIsVolatile>(field_offset);
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline uint8_t Object::GetFieldBooleanVolatile(MemberOffset field_offset) {
+ return GetFieldBoolean<kVerifyFlags, true>(field_offset);
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline int8_t Object::GetFieldByteVolatile(MemberOffset field_offset) {
+ return GetFieldByte<kVerifyFlags, true>(field_offset);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
+ bool kIsVolatile>
+inline void Object::SetFieldBoolean(MemberOffset field_offset, uint8_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ if (kCheckTransaction) {
+ DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
+ }
+ if (kTransactionActive) {
+ Runtime::Current()->RecordWriteFieldBoolean(this, field_offset,
+ GetFieldBoolean<kVerifyFlags, kIsVolatile>(field_offset),
+ kIsVolatile);
+ }
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ SetField<uint8_t, kIsVolatile>(field_offset, new_value);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
+ bool kIsVolatile>
+inline void Object::SetFieldByte(MemberOffset field_offset, int8_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ if (kCheckTransaction) {
+ DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
+ }
+ if (kTransactionActive) {
+ Runtime::Current()->RecordWriteFieldByte(this, field_offset,
+ GetFieldByte<kVerifyFlags, kIsVolatile>(field_offset),
+ kIsVolatile);
+ }
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ SetField<int8_t, kIsVolatile>(field_offset, new_value);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
+inline void Object::SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value) {
+ return SetFieldBoolean<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(
+ field_offset, new_value);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
+inline void Object::SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value) {
+ return SetFieldByte<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(
+ field_offset, new_value);
+}
+
+template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
+inline uint16_t Object::GetFieldChar(MemberOffset field_offset) {
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ return GetField<uint16_t, kIsVolatile>(field_offset);
+}
+
+template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
+inline int16_t Object::GetFieldShort(MemberOffset field_offset) {
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
}
+ return GetField<int16_t, kIsVolatile>(field_offset);
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline uint16_t Object::GetFieldCharVolatile(MemberOffset field_offset) {
+ return GetFieldChar<kVerifyFlags, true>(field_offset);
+}
+
+template<VerifyObjectFlags kVerifyFlags>
+inline int16_t Object::GetFieldShortVolatile(MemberOffset field_offset) {
+ return GetFieldShort<kVerifyFlags, true>(field_offset);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
+ bool kIsVolatile>
+inline void Object::SetFieldChar(MemberOffset field_offset, uint16_t new_value) {
+ if (kCheckTransaction) {
+ DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
+ }
+ if (kTransactionActive) {
+ Runtime::Current()->RecordWriteFieldChar(this, field_offset,
+ GetFieldChar<kVerifyFlags, kIsVolatile>(field_offset),
+ kIsVolatile);
+ }
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ SetField<uint16_t, kIsVolatile>(field_offset, new_value);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
+ bool kIsVolatile>
+inline void Object::SetFieldShort(MemberOffset field_offset, int16_t new_value) {
+ if (kCheckTransaction) {
+ DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
+ }
+ if (kTransactionActive) {
+ Runtime::Current()->RecordWriteFieldChar(this, field_offset,
+ GetFieldShort<kVerifyFlags, kIsVolatile>(field_offset),
+ kIsVolatile);
+ }
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ SetField<int16_t, kIsVolatile>(field_offset, new_value);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
+inline void Object::SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value) {
+ return SetFieldChar<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(
+ field_offset, new_value);
+}
+
+template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
+inline void Object::SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value) {
+ return SetFieldShort<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(
+ field_offset, new_value);
+}
+
+template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
+inline int32_t Object::GetField32(MemberOffset field_offset) {
+ if (kVerifyFlags & kVerifyThis) {
+ VerifyObject(this);
+ }
+ return GetField<int32_t, kIsVolatile>(field_offset);
}
template<VerifyObjectFlags kVerifyFlags>
@@ -440,13 +580,7 @@ inline void Object::SetField32(MemberOffset field_offset, int32_t new_value) {
if (kVerifyFlags & kVerifyThis) {
VerifyObject(this);
}
- byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
- int32_t* word_addr = reinterpret_cast<int32_t*>(raw_addr);
- if (kIsVolatile) {
- reinterpret_cast<Atomic<int32_t>*>(word_addr)->StoreSequentiallyConsistent(new_value);
- } else {
- reinterpret_cast<Atomic<int32_t>*>(word_addr)->StoreJavaData(new_value);
- }
+ SetField<int32_t, kIsVolatile>(field_offset, new_value);
}
template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
@@ -515,13 +649,7 @@ inline int64_t Object::GetField64(MemberOffset field_offset) {
if (kVerifyFlags & kVerifyThis) {
VerifyObject(this);
}
- const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value();
- const int64_t* addr = reinterpret_cast<const int64_t*>(raw_addr);
- if (kIsVolatile) {
- return reinterpret_cast<const Atomic<int64_t>*>(addr)->LoadSequentiallyConsistent();
- } else {
- return reinterpret_cast<const Atomic<int64_t>*>(addr)->LoadJavaData();
- }
+ return GetField<int64_t, kIsVolatile>(field_offset);
}
template<VerifyObjectFlags kVerifyFlags>
@@ -543,13 +671,7 @@ inline void Object::SetField64(MemberOffset field_offset, int64_t new_value) {
if (kVerifyFlags & kVerifyThis) {
VerifyObject(this);
}
- byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
- int64_t* addr = reinterpret_cast<int64_t*>(raw_addr);
- if (kIsVolatile) {
- reinterpret_cast<Atomic<int64_t>*>(addr)->StoreSequentiallyConsistent(new_value);
- } else {
- reinterpret_cast<Atomic<int64_t>*>(addr)->StoreJavaData(new_value);
- }
+ SetField<int64_t, kIsVolatile>(field_offset, new_value);
}
template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
@@ -558,6 +680,28 @@ inline void Object::SetField64Volatile(MemberOffset field_offset, int64_t new_va
new_value);
}
+template<typename kSize, bool kIsVolatile>
+inline void Object::SetField(MemberOffset field_offset, kSize new_value) {
+ byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
+ kSize* addr = reinterpret_cast<kSize*>(raw_addr);
+ if (kIsVolatile) {
+ reinterpret_cast<Atomic<kSize>*>(addr)->StoreSequentiallyConsistent(new_value);
+ } else {
+ reinterpret_cast<Atomic<kSize>*>(addr)->StoreJavaData(new_value);
+ }
+}
+
+template<typename kSize, bool kIsVolatile>
+inline kSize Object::GetField(MemberOffset field_offset) {
+ const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value();
+ const kSize* addr = reinterpret_cast<const kSize*>(raw_addr);
+ if (kIsVolatile) {
+ return reinterpret_cast<const Atomic<kSize>*>(addr)->LoadSequentiallyConsistent();
+ } else {
+ return reinterpret_cast<const Atomic<kSize>*>(addr)->LoadJavaData();
+ }
+}
+
template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
inline bool Object::CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset,
int64_t old_value, int64_t new_value) {
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index a6b622719e..6cd230b533 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -19,6 +19,7 @@
#include "object_reference.h"
#include "offsets.h"
+#include "runtime.h"
#include "verify_object.h"
namespace art {
@@ -247,6 +248,78 @@ class MANAGED LOCKABLE Object {
HeapReference<Object>* GetFieldObjectReferenceAddr(MemberOffset field_offset);
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE uint8_t GetFieldBoolean(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE int8_t GetFieldByte(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE uint8_t GetFieldBooleanVolatile(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE int8_t GetFieldByteVolatile(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE void SetFieldBoolean(MemberOffset field_offset, uint8_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE void SetFieldByte(MemberOffset field_offset, int8_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE void SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE void SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE uint16_t GetFieldChar(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE int16_t GetFieldShort(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE uint16_t GetFieldCharVolatile(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE int16_t GetFieldShortVolatile(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE void SetFieldChar(MemberOffset field_offset, uint16_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+ ALWAYS_INLINE void SetFieldShort(MemberOffset field_offset, int16_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE void SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<bool kTransactionActive, bool kCheckTransaction = true,
+ VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ ALWAYS_INLINE void SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -356,6 +429,13 @@ class MANAGED LOCKABLE Object {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
private:
+ template<typename kSize, bool kIsVolatile>
+ ALWAYS_INLINE void SetField(MemberOffset field_offset, kSize new_value)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ template<typename kSize, bool kIsVolatile>
+ ALWAYS_INLINE kSize GetField(MemberOffset field_offset)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Verify the type correctness of stores to fields.
// TODO: This can cause thread suspension and isn't moving GC safe.
void CheckFieldAssignmentImpl(MemberOffset field_offset, Object* new_value)
diff --git a/runtime/mirror/reference-inl.h b/runtime/mirror/reference-inl.h
index b353402602..d1d2a3af76 100644
--- a/runtime/mirror/reference-inl.h
+++ b/runtime/mirror/reference-inl.h
@@ -24,7 +24,7 @@ namespace mirror {
inline uint32_t Reference::ClassSize() {
uint32_t vtable_entries = Object::kVTableLength + 5;
- return Class::ComputeClassSize(false, vtable_entries, 2, 0, 0);
+ return Class::ComputeClassSize(false, vtable_entries, 2, 0, 0, 0, 0);
}
inline bool Reference::IsEnqueuable() {
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index 6736497593..f98407b224 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -29,7 +29,7 @@ namespace mirror {
inline uint32_t String::ClassSize() {
uint32_t vtable_entries = Object::kVTableLength + 51;
- return Class::ComputeClassSize(true, vtable_entries, 1, 1, 2);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 1, 0, 1, 2);
}
inline CharArray* String::GetCharArray() {
diff --git a/runtime/oat.cc b/runtime/oat.cc
index ede108cd19..43173cad15 100644
--- a/runtime/oat.cc
+++ b/runtime/oat.cc
@@ -23,7 +23,7 @@
namespace art {
const uint8_t OatHeader::kOatMagic[] = { 'o', 'a', 't', '\n' };
-const uint8_t OatHeader::kOatVersion[] = { '0', '3', '9', '\0' };
+const uint8_t OatHeader::kOatVersion[] = { '0', '4', '0', '\0' };
static size_t ComputeOatHeaderSize(const SafeMap<std::string, std::string>* variable_data) {
size_t estimate = 0U;
diff --git a/runtime/primitive.h b/runtime/primitive.h
index a36e9cb31b..36ad662ff3 100644
--- a/runtime/primitive.h
+++ b/runtime/primitive.h
@@ -84,10 +84,6 @@ class Primitive {
}
}
- static size_t FieldSize(Type type) {
- return ComponentSize(type) <= 4 ? 4 : 8;
- }
-
static const char* Descriptor(Type type) {
switch (type) {
case kPrimBoolean:
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 3e03c1a16b..3fc6ad53d2 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1207,6 +1207,34 @@ void Runtime::ExitTransactionMode() {
preinitialization_transaction_ = nullptr;
}
+void Runtime::RecordWriteFieldBoolean(mirror::Object* obj, MemberOffset field_offset,
+ uint8_t value, bool is_volatile) const {
+ DCHECK(IsCompiler());
+ DCHECK(IsActiveTransaction());
+ preinitialization_transaction_->RecordWriteFieldBoolean(obj, field_offset, value, is_volatile);
+}
+
+void Runtime::RecordWriteFieldByte(mirror::Object* obj, MemberOffset field_offset,
+ int8_t value, bool is_volatile) const {
+ DCHECK(IsCompiler());
+ DCHECK(IsActiveTransaction());
+ preinitialization_transaction_->RecordWriteFieldByte(obj, field_offset, value, is_volatile);
+}
+
+void Runtime::RecordWriteFieldChar(mirror::Object* obj, MemberOffset field_offset,
+ uint16_t value, bool is_volatile) const {
+ DCHECK(IsCompiler());
+ DCHECK(IsActiveTransaction());
+ preinitialization_transaction_->RecordWriteFieldChar(obj, field_offset, value, is_volatile);
+}
+
+void Runtime::RecordWriteFieldShort(mirror::Object* obj, MemberOffset field_offset,
+ int16_t value, bool is_volatile) const {
+ DCHECK(IsCompiler());
+ DCHECK(IsActiveTransaction());
+ preinitialization_transaction_->RecordWriteFieldShort(obj, field_offset, value, is_volatile);
+}
+
void Runtime::RecordWriteField32(mirror::Object* obj, MemberOffset field_offset,
uint32_t value, bool is_volatile) const {
DCHECK(IsCompiler());
diff --git a/runtime/runtime.h b/runtime/runtime.h
index b0a88d5684..fc93d16d94 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -415,6 +415,14 @@ class Runtime {
}
void EnterTransactionMode(Transaction* transaction);
void ExitTransactionMode();
+ void RecordWriteFieldBoolean(mirror::Object* obj, MemberOffset field_offset, uint8_t value,
+ bool is_volatile) const;
+ void RecordWriteFieldByte(mirror::Object* obj, MemberOffset field_offset, int8_t value,
+ bool is_volatile) const;
+ void RecordWriteFieldChar(mirror::Object* obj, MemberOffset field_offset, uint16_t value,
+ bool is_volatile) const;
+ void RecordWriteFieldShort(mirror::Object* obj, MemberOffset field_offset, int16_t value,
+ bool is_volatile) const;
void RecordWriteField32(mirror::Object* obj, MemberOffset field_offset, uint32_t value,
bool is_volatile) const;
void RecordWriteField64(mirror::Object* obj, MemberOffset field_offset, uint64_t value,
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 7ac685bd84..44b0ab4833 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1860,12 +1860,24 @@ void Thread::DumpThreadOffset(std::ostream& os, uint32_t offset) {
QUICK_ENTRY_POINT_INFO(pInitializeTypeAndVerifyAccess)
QUICK_ENTRY_POINT_INFO(pInitializeType)
QUICK_ENTRY_POINT_INFO(pResolveString)
+ QUICK_ENTRY_POINT_INFO(pSet8Instance)
+ QUICK_ENTRY_POINT_INFO(pSet8Static)
+ QUICK_ENTRY_POINT_INFO(pSet16Instance)
+ QUICK_ENTRY_POINT_INFO(pSet16Static)
QUICK_ENTRY_POINT_INFO(pSet32Instance)
QUICK_ENTRY_POINT_INFO(pSet32Static)
QUICK_ENTRY_POINT_INFO(pSet64Instance)
QUICK_ENTRY_POINT_INFO(pSet64Static)
QUICK_ENTRY_POINT_INFO(pSetObjInstance)
QUICK_ENTRY_POINT_INFO(pSetObjStatic)
+ QUICK_ENTRY_POINT_INFO(pGetByteInstance)
+ QUICK_ENTRY_POINT_INFO(pGetBooleanInstance)
+ QUICK_ENTRY_POINT_INFO(pGetByteStatic)
+ QUICK_ENTRY_POINT_INFO(pGetBooleanStatic)
+ QUICK_ENTRY_POINT_INFO(pGetShortInstance)
+ QUICK_ENTRY_POINT_INFO(pGetCharInstance)
+ QUICK_ENTRY_POINT_INFO(pGetShortStatic)
+ QUICK_ENTRY_POINT_INFO(pGetCharStatic)
QUICK_ENTRY_POINT_INFO(pGet32Instance)
QUICK_ENTRY_POINT_INFO(pGet32Static)
QUICK_ENTRY_POINT_INFO(pGet64Instance)
diff --git a/runtime/transaction.cc b/runtime/transaction.cc
index cc02a8de74..0cfdfc57fc 100644
--- a/runtime/transaction.cc
+++ b/runtime/transaction.cc
@@ -57,6 +57,40 @@ Transaction::~Transaction() {
}
}
+void Transaction::RecordWriteFieldBoolean(mirror::Object* obj, MemberOffset field_offset,
+ uint8_t value, bool is_volatile) {
+ DCHECK(obj != nullptr);
+ MutexLock mu(Thread::Current(), log_lock_);
+ ObjectLog& object_log = object_logs_[obj];
+ object_log.LogBooleanValue(field_offset, value, is_volatile);
+}
+
+void Transaction::RecordWriteFieldByte(mirror::Object* obj, MemberOffset field_offset,
+ int8_t value, bool is_volatile) {
+ DCHECK(obj != nullptr);
+ MutexLock mu(Thread::Current(), log_lock_);
+ ObjectLog& object_log = object_logs_[obj];
+ object_log.LogByteValue(field_offset, value, is_volatile);
+}
+
+void Transaction::RecordWriteFieldChar(mirror::Object* obj, MemberOffset field_offset,
+ uint16_t value, bool is_volatile) {
+ DCHECK(obj != nullptr);
+ MutexLock mu(Thread::Current(), log_lock_);
+ ObjectLog& object_log = object_logs_[obj];
+ object_log.LogCharValue(field_offset, value, is_volatile);
+}
+
+
+void Transaction::RecordWriteFieldShort(mirror::Object* obj, MemberOffset field_offset,
+ int16_t value, bool is_volatile) {
+ DCHECK(obj != nullptr);
+ MutexLock mu(Thread::Current(), log_lock_);
+ ObjectLog& object_log = object_logs_[obj];
+ object_log.LogShortValue(field_offset, value, is_volatile);
+}
+
+
void Transaction::RecordWriteField32(mirror::Object* obj, MemberOffset field_offset, uint32_t value,
bool is_volatile) {
DCHECK(obj != nullptr);
@@ -223,35 +257,42 @@ void Transaction::VisitStringLogs(RootCallback* callback, void* arg) {
}
}
+void Transaction::ObjectLog::LogBooleanValue(MemberOffset offset, uint8_t value, bool is_volatile) {
+ LogValue(ObjectLog::kBoolean, offset, value, is_volatile);
+}
+
+void Transaction::ObjectLog::LogByteValue(MemberOffset offset, int8_t value, bool is_volatile) {
+ LogValue(ObjectLog::kByte, offset, value, is_volatile);
+}
+
+void Transaction::ObjectLog::LogCharValue(MemberOffset offset, uint16_t value, bool is_volatile) {
+ LogValue(ObjectLog::kChar, offset, value, is_volatile);
+}
+
+void Transaction::ObjectLog::LogShortValue(MemberOffset offset, int16_t value, bool is_volatile) {
+ LogValue(ObjectLog::kShort, offset, value, is_volatile);
+}
+
void Transaction::ObjectLog::Log32BitsValue(MemberOffset offset, uint32_t value, bool is_volatile) {
- auto it = field_values_.find(offset.Uint32Value());
- if (it == field_values_.end()) {
- ObjectLog::FieldValue field_value;
- field_value.value = value;
- field_value.is_volatile = is_volatile;
- field_value.kind = ObjectLog::k32Bits;
- field_values_.insert(std::make_pair(offset.Uint32Value(), field_value));
- }
+ LogValue(ObjectLog::k32Bits, offset, value, is_volatile);
}
void Transaction::ObjectLog::Log64BitsValue(MemberOffset offset, uint64_t value, bool is_volatile) {
- auto it = field_values_.find(offset.Uint32Value());
- if (it == field_values_.end()) {
- ObjectLog::FieldValue field_value;
- field_value.value = value;
- field_value.is_volatile = is_volatile;
- field_value.kind = ObjectLog::k64Bits;
- field_values_.insert(std::make_pair(offset.Uint32Value(), field_value));
- }
+ LogValue(ObjectLog::k64Bits, offset, value, is_volatile);
}
void Transaction::ObjectLog::LogReferenceValue(MemberOffset offset, mirror::Object* obj, bool is_volatile) {
+ LogValue(ObjectLog::kReference, offset, reinterpret_cast<uintptr_t>(obj), is_volatile);
+}
+
+void Transaction::ObjectLog::LogValue(ObjectLog::FieldValueKind kind,
+ MemberOffset offset, uint64_t value, bool is_volatile) {
auto it = field_values_.find(offset.Uint32Value());
if (it == field_values_.end()) {
ObjectLog::FieldValue field_value;
- field_value.value = reinterpret_cast<uintptr_t>(obj);
+ field_value.value = value;
field_value.is_volatile = is_volatile;
- field_value.kind = ObjectLog::kReference;
+ field_value.kind = kind;
field_values_.insert(std::make_pair(offset.Uint32Value(), field_value));
}
}
@@ -281,6 +322,42 @@ void Transaction::ObjectLog::UndoFieldWrite(mirror::Object* obj, MemberOffset fi
// we'd need to disable the check.
constexpr bool kCheckTransaction = true;
switch (field_value.kind) {
+ case kBoolean:
+ if (UNLIKELY(field_value.is_volatile)) {
+ obj->SetFieldBooleanVolatile<false, kCheckTransaction>(field_offset,
+ static_cast<bool>(field_value.value));
+ } else {
+ obj->SetFieldBoolean<false, kCheckTransaction>(field_offset,
+ static_cast<bool>(field_value.value));
+ }
+ break;
+ case kByte:
+ if (UNLIKELY(field_value.is_volatile)) {
+ obj->SetFieldByteVolatile<false, kCheckTransaction>(field_offset,
+ static_cast<int8_t>(field_value.value));
+ } else {
+ obj->SetFieldByte<false, kCheckTransaction>(field_offset,
+ static_cast<int8_t>(field_value.value));
+ }
+ break;
+ case kChar:
+ if (UNLIKELY(field_value.is_volatile)) {
+ obj->SetFieldCharVolatile<false, kCheckTransaction>(field_offset,
+ static_cast<uint16_t>(field_value.value));
+ } else {
+ obj->SetFieldChar<false, kCheckTransaction>(field_offset,
+ static_cast<uint16_t>(field_value.value));
+ }
+ break;
+ case kShort:
+ if (UNLIKELY(field_value.is_volatile)) {
+ obj->SetFieldShortVolatile<false, kCheckTransaction>(field_offset,
+ static_cast<int16_t>(field_value.value));
+ } else {
+ obj->SetFieldShort<false, kCheckTransaction>(field_offset,
+ static_cast<int16_t>(field_value.value));
+ }
+ break;
case k32Bits:
if (UNLIKELY(field_value.is_volatile)) {
obj->SetField32Volatile<false, kCheckTransaction>(field_offset,
diff --git a/runtime/transaction.h b/runtime/transaction.h
index 7859126764..63900493a5 100644
--- a/runtime/transaction.h
+++ b/runtime/transaction.h
@@ -41,6 +41,18 @@ class Transaction {
~Transaction();
// Record object field changes.
+ void RecordWriteFieldBoolean(mirror::Object* obj, MemberOffset field_offset, uint8_t value,
+ bool is_volatile)
+ LOCKS_EXCLUDED(log_lock_);
+ void RecordWriteFieldByte(mirror::Object* obj, MemberOffset field_offset, int8_t value,
+ bool is_volatile)
+ LOCKS_EXCLUDED(log_lock_);
+ void RecordWriteFieldChar(mirror::Object* obj, MemberOffset field_offset, uint16_t value,
+ bool is_volatile)
+ LOCKS_EXCLUDED(log_lock_);
+ void RecordWriteFieldShort(mirror::Object* obj, MemberOffset field_offset, int16_t value,
+ bool is_volatile)
+ LOCKS_EXCLUDED(log_lock_);
void RecordWriteField32(mirror::Object* obj, MemberOffset field_offset, uint32_t value,
bool is_volatile)
LOCKS_EXCLUDED(log_lock_);
@@ -82,6 +94,10 @@ class Transaction {
private:
class ObjectLog {
public:
+ void LogBooleanValue(MemberOffset offset, uint8_t value, bool is_volatile);
+ void LogByteValue(MemberOffset offset, int8_t value, bool is_volatile);
+ void LogCharValue(MemberOffset offset, uint16_t value, bool is_volatile);
+ void LogShortValue(MemberOffset offset, int16_t value, bool is_volatile);
void Log32BitsValue(MemberOffset offset, uint32_t value, bool is_volatile);
void Log64BitsValue(MemberOffset offset, uint64_t value, bool is_volatile);
void LogReferenceValue(MemberOffset offset, mirror::Object* obj, bool is_volatile);
@@ -95,6 +111,10 @@ class Transaction {
private:
enum FieldValueKind {
+ kBoolean,
+ kByte,
+ kChar,
+ kShort,
k32Bits,
k64Bits,
kReference
@@ -106,6 +126,7 @@ class Transaction {
bool is_volatile;
};
+ void LogValue(FieldValueKind kind, MemberOffset offset, uint64_t value, bool is_volatile);
void UndoFieldWrite(mirror::Object* obj, MemberOffset field_offset,
const FieldValue& field_value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc
index c01c6925dd..6f9680f2c4 100644
--- a/runtime/verifier/method_verifier.cc
+++ b/runtime/verifier/method_verifier.cc
@@ -2629,6 +2629,18 @@ bool MethodVerifier::CodeFlowVerifyInstruction(uint32_t* start_guess) {
case Instruction::IPUT_QUICK:
VerifyIPutQuick(inst, reg_types_.Integer(), true);
break;
+ case Instruction::IPUT_BOOLEAN_QUICK:
+ VerifyIPutQuick(inst, reg_types_.Boolean(), true);
+ break;
+ case Instruction::IPUT_BYTE_QUICK:
+ VerifyIPutQuick(inst, reg_types_.Byte(), true);
+ break;
+ case Instruction::IPUT_CHAR_QUICK:
+ VerifyIPutQuick(inst, reg_types_.Char(), true);
+ break;
+ case Instruction::IPUT_SHORT_QUICK:
+ VerifyIPutQuick(inst, reg_types_.Short(), true);
+ break;
case Instruction::IPUT_WIDE_QUICK:
VerifyIPutQuick(inst, reg_types_.LongLo(), true);
break;
@@ -2661,10 +2673,6 @@ bool MethodVerifier::CodeFlowVerifyInstruction(uint32_t* start_guess) {
case Instruction::UNUSED_43:
case Instruction::UNUSED_79:
case Instruction::UNUSED_7A:
- case Instruction::UNUSED_EB:
- case Instruction::UNUSED_EC:
- case Instruction::UNUSED_ED:
- case Instruction::UNUSED_EE:
case Instruction::UNUSED_EF:
case Instruction::UNUSED_F0:
case Instruction::UNUSED_F1: