} UNSAFE_END // This function is a leaf since if the source and destination are both in native memory // the copy may potentially be very large, and we don't want to disable GC if we can avoid it. // If either source or destination (or both) are on the heap, the function will enter VM using // JVM_ENTRY_FROM_LEAF UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) { size_t sz = (size_t)size; size_t esz = (size_t)elemSize; if (srcObj == NULL && dstObj == NULL) { // Both src & dst are in native memory address src = (address)srcOffset; address dst = (address)dstOffset; Copy::conjoint_swap(src, dst, sz, esz); } else { // At least one of src/dst are on heap, transition to VM to access raw pointers JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) { oop srcp = JNIHandles::resolve(srcObj); oop dstp = JNIHandles::resolve(dstObj); address src = (address)index_oop_from_field_offset_long(srcp, srcOffset); address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset); Copy::conjoint_swap(src, dst, sz, esz); } JVM_END }
} UNSAFE_END UNSAFE_ENTRY(void, Unsafe_PutObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) { oop x = JNIHandles::resolve(x_h); oop p = JNIHandles::resolve(obj); if (UseCompressedOops) { oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x); } else { oop_store((oop*)index_oop_from_field_offset_long(p, offset), x); } } UNSAFE_END
} UNSAFE_END UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) { size_t sz = (size_t)size; oop srcp = JNIHandles::resolve(srcObj); oop dstp = JNIHandles::resolve(dstObj); void* src = index_oop_from_field_offset_long(srcp, srcOffset); void* dst = index_oop_from_field_offset_long(dstp, dstOffset); Copy::conjoint_memory_atomic(src, dst, sz); } UNSAFE_END
// These functions allow a null base pointer with an arbitrary address. // But if the base pointer is non-null, the offset should make some sense. // That is, it should be in the range [0, MAX_OBJECT_SIZE]. UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { oop p = JNIHandles::resolve(obj); oop v; if (UseCompressedOops) { narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset); v = oopDesc::decode_heap_oop(n); } else { v = *(oop*)index_oop_from_field_offset_long(p, offset); } ensure_satb_referent_alive(p, offset, v); return JNIHandles::make_local(env, v); } UNSAFE_END
} UNSAFE_END UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) { size_t sz = (size_t)size; oop base = JNIHandles::resolve(obj); void* p = index_oop_from_field_offset_long(base, offset); Copy::fill_to_memory_atomic(p, sz, value); } UNSAFE_END
// These functions allow a null base pointer with an arbitrary address. // But if the base pointer is non-null, the offset should make some sense. // That is, it should be in the range [0, MAX_OBJECT_SIZE]. UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { oop p = JNIHandles::resolve(obj); oop v; if (UseCompressedOops) { narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset); v = oopDesc::decode_heap_oop(n); } else { v = *(oop*)index_oop_from_field_offset_long(p, offset); } jobject ret = JNIHandles::make_local(env, v); #if INCLUDE_ALL_GCS // We could be accessing the referent field in a reference // object. If G1 is enabled then we need to register non-null // referent with the SATB barrier. if (UseG1GC) { bool needs_barrier = false; if (ret != NULL) { if (offset == java_lang_ref_Reference::referent_offset && obj != NULL) { oop o = JNIHandles::resolve(obj); Klass* k = o->klass(); if (InstanceKlass::cast(k)->reference_type() != REF_NONE) { assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity"); needs_barrier = true; } } } if (needs_barrier) { oop referent = JNIHandles::resolve(ret); G1SATBCardTableModRefBS::enqueue(referent); } } #endif // INCLUDE_ALL_GCS return ret; } UNSAFE_END
} UNSAFE_END UNSAFE_ENTRY(void, Unsafe_PutObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) { oop x = JNIHandles::resolve(x_h); oop p = JNIHandles::resolve(obj); void* addr = index_oop_from_field_offset_long(p, offset); OrderAccess::release(); if (UseCompressedOops) { oop_store((narrowOop*)addr, x); } else { oop_store((oop*)addr, x); } OrderAccess::fence(); } UNSAFE_END
} UNSAFE_END UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { oop p = JNIHandles::resolve(obj); void* addr = index_oop_from_field_offset_long(p, offset); volatile oop v; if (UseCompressedOops) { volatile narrowOop n = *(volatile narrowOop*) addr; (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n)); } else { (void)const_cast<oop&>(v = *(volatile oop*) addr); } OrderAccess::acquire(); return JNIHandles::make_local(env, v); } UNSAFE_END
} UNSAFE_END UNSAFE_ENTRY(jobject, Unsafe_GetObjectVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { oop p = JNIHandles::resolve(obj); void* addr = index_oop_from_field_offset_long(p, offset); volatile oop v; if (support_IRIW_for_not_multiple_copy_atomic_cpu) { OrderAccess::fence(); } if (UseCompressedOops) { volatile narrowOop n = *(volatile narrowOop*) addr; (void)const_cast<oop&>(v = oopDesc::decode_heap_oop(n)); } else { (void)const_cast<oop&>(v = *(volatile oop*) addr); } ensure_satb_referent_alive(p, offset, v); OrderAccess::acquire(); return JNIHandles::make_local(env, v); } UNSAFE_END
// Resolves and returns the address of the memory access void* addr() { return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset); }