Skip to content

Commit

Permalink
Merge pull request #2 from JornVernee/PR_async_close+ASYNC_EXCEPTION_…
Browse files Browse the repository at this point in the history
…CHECK
  • Loading branch information
fisk authored Nov 24, 2023
2 parents 5d34ddb + 057a176 commit 0b91ac9
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 40 deletions.
71 changes: 39 additions & 32 deletions src/hotspot/share/prims/unsafe.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,17 +70,26 @@
( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
+ ((julong)max_jint * sizeof(double)) )

#define UNSAFE_ENTRY(result_type, header) \
JVM_ENTRY(static result_type, header)

#define UNSAFE_LEAF(result_type, header) \
JVM_LEAF(static result_type, header)

// Note that scoped accesses (cf. scopedMemoryAccess.cpp) can install
// an async handshake on the entry to an Unsafe method. When that happens,
// it is expected that we are not allowed to touch the underlying memory
// that might have gotten unmapped. Therefore, we check at the entry
// to unsafe functions, if we have such async exception conditions,
// and return immediately if that is the case.
#define UNSAFE_ENTRY(result_type, header) \
JVM_ENTRY(static result_type, header) if (JavaThread::current()->has_async_exception_condition()) {return (result_type)0;}

#define UNSAFE_LEAF(result_type, header) \
JVM_LEAF(static result_type, header) if (JavaThread::current()->has_async_exception_condition()) {return (result_type)0;}
// We also use NoSafepointVerifier to block potential safepoints.
// It would be problematic if an async exception handshake were installed later on
// during another safepoint in the function, but before the memory access happens,
// as the memory will be freed after the handshake is installed.
#define UNSAFE_ENTRY_SCOPED(result_type, header) \
JVM_ENTRY(static result_type, header) \
if (thread->has_async_exception_condition()) {return (result_type)0;} \
NoSafepointVerifier nsv;

#define UNSAFE_END JVM_END

Expand Down Expand Up @@ -284,11 +293,11 @@ UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe,

#define DEFINE_GETSETOOP(java_type, Type) \
\
UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
return MemoryAccess<java_type>(thread, obj, offset).get(); \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
MemoryAccess<java_type>(thread, obj, offset).put(x); \
} UNSAFE_END \
\
Expand All @@ -307,11 +316,11 @@ DEFINE_GETSETOOP(jdouble, Double);

#define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
\
UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
} UNSAFE_END \
\
UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
} UNSAFE_END \
\
Expand Down Expand Up @@ -367,7 +376,7 @@ UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
os::free(p);
} UNSAFE_END

UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
UNSAFE_ENTRY_SCOPED(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
size_t sz = (size_t)size;

oop base = JNIHandles::resolve(obj);
Expand All @@ -376,7 +385,7 @@ UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, j
Copy::fill_to_memory_atomic(p, sz, value);
} UNSAFE_END

UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
UNSAFE_ENTRY_SCOPED(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
size_t sz = (size_t)size;

oop srcp = JNIHandles::resolve(srcObj);
Expand All @@ -395,11 +404,7 @@ UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcOb
}
} UNSAFE_END

// This function is a leaf since if the source and destination are both in native memory
// the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
// If either source or destination (or both) are on the heap, the function will enter VM using
// JVM_ENTRY_FROM_LEAF
UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
UNSAFE_ENTRY_SCOPED(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
size_t sz = (size_t)size;
size_t esz = (size_t)elemSize;

Expand All @@ -409,25 +414,27 @@ UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject sr
address dst = (address)dstOffset;

{
JavaThread* thread = JavaThread::thread_from_jni_environment(env);
GuardUnsafeAccess guard(thread);
// Transitioning to native state below checks NSV, but doesn't actually do a safepoint poll.
// So, this is safe to ignore, as no async exception handshake can actually be installed.
PauseNoSafepointVerifier pnsv(&nsv);
// Transition to native state. Since the source and destination are both in native memory
// the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
ThreadToNativeFromVM ttn(thread);
Copy::conjoint_swap(src, dst, sz, esz);
}
} else {
// At least one of src/dst are on heap, transition to VM to access raw pointers
oop srcp = JNIHandles::resolve(srcObj);
oop dstp = JNIHandles::resolve(dstObj);

JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) {
oop srcp = JNIHandles::resolve(srcObj);
oop dstp = JNIHandles::resolve(dstObj);
address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);

address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);

{
GuardUnsafeAccess guard(thread);
Copy::conjoint_swap(src, dst, sz, esz);
}
} JVM_END
{
GuardUnsafeAccess guard(thread);
Copy::conjoint_swap(src, dst, sz, esz);
}
}
} UNSAFE_END

Expand Down Expand Up @@ -723,13 +730,13 @@ UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject un
return JNIHandles::make_local(THREAD, res);
} UNSAFE_END

UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
UNSAFE_ENTRY_SCOPED(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
oop p = JNIHandles::resolve(obj);
volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
return Atomic::cmpxchg(addr, e, x);
} UNSAFE_END

UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
UNSAFE_ENTRY_SCOPED(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
oop p = JNIHandles::resolve(obj);
volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
return Atomic::cmpxchg(addr, e, x);
Expand All @@ -744,13 +751,13 @@ UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe
return ret == e;
} UNSAFE_END

UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
oop p = JNIHandles::resolve(obj);
volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
return Atomic::cmpxchg(addr, e, x) == e;
} UNSAFE_END

UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
oop p = JNIHandles::resolve(obj);
volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
return Atomic::cmpxchg(addr, e, x) == e;
Expand Down
8 changes: 0 additions & 8 deletions src/hotspot/share/runtime/interfaceSupport.inline.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -406,14 +406,6 @@ extern "C" { \
VM_LEAF_BASE(result_type, header)


#define JVM_ENTRY_FROM_LEAF(env, result_type, header) \
{ { \
JavaThread* thread=JavaThread::thread_from_jni_environment(env); \
ThreadInVMfromNative __tiv(thread); \
debug_only(VMNativeEntryWrapper __vew;) \
VM_ENTRY_BASE_FROM_LEAF(result_type, header, thread)


#define JVM_END } }

#endif // SHARE_RUNTIME_INTERFACESUPPORT_INLINE_HPP
27 changes: 27 additions & 0 deletions test/jdk/java/foreign/TestHandshake.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@

import java.lang.foreign.Arena;
import java.lang.foreign.MemorySegment;
import java.lang.foreign.ValueLayout;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.VarHandle;
import java.nio.ByteBuffer;
Expand All @@ -48,6 +49,7 @@
import org.testng.annotations.Test;

import static java.lang.foreign.ValueLayout.JAVA_BYTE;
import static java.lang.foreign.ValueLayout.JAVA_INT;
import static org.testng.Assert.*;

public class TestHandshake {
Expand Down Expand Up @@ -177,6 +179,30 @@ public void doAccess() {
}
}

static class SegmentSwappyCopyAccessor extends AbstractSegmentAccessor {

MemorySegment first, second;
ValueLayout sourceLayout, destLayout;
long count;


SegmentSwappyCopyAccessor(int id, MemorySegment segment, Arena _unused) {
super(id, segment);
long split = segment.byteSize() / 2;
first = segment.asSlice(0, split);
sourceLayout = JAVA_INT.withOrder(ByteOrder.LITTLE_ENDIAN);
second = segment.asSlice(split);
destLayout = JAVA_INT.withOrder(ByteOrder.BIG_ENDIAN);
count = Math.min(first.byteSize() / sourceLayout.byteSize(),
second.byteSize() / destLayout.byteSize());
}

@Override
public void doAccess() {
MemorySegment.copy(first, sourceLayout, 0L, second, destLayout, 0L, count);
}
}

static class SegmentFillAccessor extends AbstractSegmentAccessor {

SegmentFillAccessor(int id, MemorySegment segment, Arena _unused) {
Expand Down Expand Up @@ -264,6 +290,7 @@ static Object[][] accessors() {
return new Object[][] {
{ "SegmentAccessor", (AccessorFactory)SegmentAccessor::new },
{ "SegmentCopyAccessor", (AccessorFactory)SegmentCopyAccessor::new },
{ "SegmentSwappyCopyAccessor", (AccessorFactory)SegmentSwappyCopyAccessor::new },
{ "SegmentMismatchAccessor", (AccessorFactory)SegmentMismatchAccessor::new },
{ "SegmentFillAccessor", (AccessorFactory)SegmentFillAccessor::new },
{ "BufferAccessor", (AccessorFactory)BufferAccessor::new },
Expand Down

0 comments on commit 0b91ac9

Please sign in to comment.