8331573: Rename CollectedHeap::is_gc_active to be explicitly about STW GCs
Reviewed-by: stefank, zgu, tschatzl, gli
This commit is contained in:
parent
e8a2d5669c
commit
1eec30a6c0
src/hotspot/share
gc
g1
g1CollectedHeap.cppg1CollectedHeap.hppg1CollectedHeap.inline.hppg1FullGCScope.hppg1RemSet.cppg1VMOperations.cpp
parallel
serial
shared
collectedHeap.cppcollectedHeap.hppisGCActiveMark.cppisGCActiveMark.hppmemAllocator.cppvmStructs_gc.hpp
shenandoah
x
z
jvmci
memory
oops
prims
runtime
@ -2399,7 +2399,7 @@ void G1CollectedHeap::expand_heap_after_young_collection(){
|
||||
|
||||
bool G1CollectedHeap::do_collection_pause_at_safepoint() {
|
||||
assert_at_safepoint_on_vm_thread();
|
||||
guarantee(!is_gc_active(), "collection is not reentrant");
|
||||
guarantee(!is_stw_gc_active(), "collection is not reentrant");
|
||||
|
||||
do_collection_pause_at_safepoint_helper();
|
||||
return true;
|
||||
@ -2469,7 +2469,7 @@ void G1CollectedHeap::flush_region_pin_cache() {
|
||||
void G1CollectedHeap::do_collection_pause_at_safepoint_helper() {
|
||||
ResourceMark rm;
|
||||
|
||||
IsGCActiveMark active_gc_mark;
|
||||
IsSTWGCActiveMark active_gc_mark;
|
||||
GCIdMark gc_id_mark;
|
||||
SvcGCMarker sgcm(SvcGCMarker::MINOR);
|
||||
|
||||
|
@ -749,7 +749,7 @@ private:
|
||||
// false if unable to do the collection due to the GC locker being
|
||||
// active, true otherwise.
|
||||
// precondition: at safepoint on VM thread
|
||||
// precondition: !is_gc_active()
|
||||
// precondition: !is_stw_gc_active()
|
||||
bool do_collection_pause_at_safepoint();
|
||||
|
||||
// Helper for do_collection_pause_at_safepoint, containing the guts
|
||||
|
@ -265,7 +265,7 @@ inline bool G1CollectedHeap::is_obj_dead(const oop obj, const HeapRegion* hr) co
|
||||
|
||||
inline void G1CollectedHeap::pin_object(JavaThread* thread, oop obj) {
|
||||
assert(obj != nullptr, "obj must not be null");
|
||||
assert(!is_gc_active(), "must not pin objects during a GC");
|
||||
assert(!is_stw_gc_active(), "must not pin objects during a GC pause");
|
||||
assert(obj->is_typeArray(), "must be typeArray");
|
||||
|
||||
uint obj_region_idx = heap_region_containing(obj)->hrm_index();
|
||||
@ -274,7 +274,7 @@ inline void G1CollectedHeap::pin_object(JavaThread* thread, oop obj) {
|
||||
|
||||
inline void G1CollectedHeap::unpin_object(JavaThread* thread, oop obj) {
|
||||
assert(obj != nullptr, "obj must not be null");
|
||||
assert(!is_gc_active(), "must not unpin objects during a GC");
|
||||
assert(!is_stw_gc_active(), "must not unpin objects during a GC pause");
|
||||
|
||||
uint obj_region_idx = heap_region_containing(obj)->hrm_index();
|
||||
G1ThreadLocalData::pin_count_cache(thread).dec_count(obj_region_idx);
|
||||
|
@ -52,7 +52,7 @@ class G1FullGCScope : public StackObj {
|
||||
SvcGCMarker _svc_marker;
|
||||
STWGCTimer _timer;
|
||||
G1FullGCTracer* _tracer;
|
||||
IsGCActiveMark _active;
|
||||
IsSTWGCActiveMark _active;
|
||||
G1FullGCJFRTracerMark _tracer_mark;
|
||||
ClearedAllSoftRefs _soft_refs;
|
||||
G1FullGCMonitoringScope _monitoring_scope;
|
||||
|
@ -1564,7 +1564,7 @@ bool G1RemSet::clean_card_before_refine(CardValue** const card_ptr_addr) {
|
||||
|
||||
void G1RemSet::refine_card_concurrently(CardValue* const card_ptr,
|
||||
const uint worker_id) {
|
||||
assert(!_g1h->is_gc_active(), "Only call concurrently");
|
||||
assert(!_g1h->is_stw_gc_active(), "Only call concurrently");
|
||||
check_card_ptr(card_ptr, _ct);
|
||||
|
||||
// Construct the MemRegion representing the card.
|
||||
|
@ -146,7 +146,7 @@ void VM_G1PauseConcurrent::doit() {
|
||||
|
||||
G1ConcGCMonitoringScope monitoring_scope(g1h->monitoring_support());
|
||||
SvcGCMarker sgcm(SvcGCMarker::CONCURRENT);
|
||||
IsGCActiveMark x;
|
||||
IsSTWGCActiveMark x;
|
||||
|
||||
work();
|
||||
}
|
||||
|
@ -459,7 +459,7 @@ void ParallelScavengeHeap::do_full_collection(bool clear_all_soft_refs) {
|
||||
HeapWord* ParallelScavengeHeap::failed_mem_allocate(size_t size) {
|
||||
assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
|
||||
assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread");
|
||||
assert(!is_gc_active(), "not reentrant");
|
||||
assert(!is_stw_gc_active(), "not reentrant");
|
||||
assert(!Heap_lock->owned_by_self(), "this thread should not own the Heap_lock");
|
||||
|
||||
// We assume that allocation in eden will fail unless we collect.
|
||||
|
@ -1269,9 +1269,9 @@ bool PSParallelCompact::invoke(bool maximum_heap_compaction) {
|
||||
"should be in vm thread");
|
||||
|
||||
ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();
|
||||
assert(!heap->is_gc_active(), "not reentrant");
|
||||
assert(!heap->is_stw_gc_active(), "not reentrant");
|
||||
|
||||
IsGCActiveMark mark;
|
||||
IsSTWGCActiveMark mark;
|
||||
|
||||
const bool clear_all_soft_refs =
|
||||
heap->soft_ref_policy()->should_clear_all_soft_refs();
|
||||
@ -1492,7 +1492,7 @@ private:
|
||||
public:
|
||||
PCAddThreadRootsMarkingTaskClosure(uint worker_id) : _worker_id(worker_id) { }
|
||||
void do_thread(Thread* thread) {
|
||||
assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
|
||||
assert(ParallelScavengeHeap::heap()->is_stw_gc_active(), "called outside gc");
|
||||
|
||||
ResourceMark rm;
|
||||
|
||||
@ -1509,7 +1509,7 @@ public:
|
||||
};
|
||||
|
||||
void steal_marking_work(TaskTerminator& terminator, uint worker_id) {
|
||||
assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
|
||||
assert(ParallelScavengeHeap::heap()->is_stw_gc_active(), "called outside gc");
|
||||
|
||||
ParCompactionManager* cm =
|
||||
ParCompactionManager::gc_thread_compaction_manager(worker_id);
|
||||
@ -1986,7 +1986,7 @@ void PSParallelCompact::write_block_fill_histogram()
|
||||
#endif // #ifdef ASSERT
|
||||
|
||||
static void compaction_with_stealing_work(TaskTerminator* terminator, uint worker_id) {
|
||||
assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
|
||||
assert(ParallelScavengeHeap::heap()->is_stw_gc_active(), "called outside gc");
|
||||
|
||||
ParCompactionManager* cm =
|
||||
ParCompactionManager::gc_thread_compaction_manager(worker_id);
|
||||
|
@ -84,7 +84,7 @@ ParallelScavengeTracer PSScavenge::_gc_tracer;
|
||||
CollectorCounters* PSScavenge::_counters = nullptr;
|
||||
|
||||
static void scavenge_roots_work(ParallelRootType::Value root_type, uint worker_id) {
|
||||
assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
|
||||
assert(ParallelScavengeHeap::heap()->is_stw_gc_active(), "called outside gc");
|
||||
|
||||
PSPromotionManager* pm = PSPromotionManager::gc_thread_promotion_manager(worker_id);
|
||||
PSPromoteRootsClosure roots_to_old_closure(pm);
|
||||
@ -115,7 +115,7 @@ static void scavenge_roots_work(ParallelRootType::Value root_type, uint worker_i
|
||||
}
|
||||
|
||||
static void steal_work(TaskTerminator& terminator, uint worker_id) {
|
||||
assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
|
||||
assert(ParallelScavengeHeap::heap()->is_stw_gc_active(), "called outside gc");
|
||||
|
||||
PSPromotionManager* pm =
|
||||
PSPromotionManager::gc_thread_promotion_manager(worker_id);
|
||||
@ -232,11 +232,11 @@ public:
|
||||
bool PSScavenge::invoke() {
|
||||
assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint");
|
||||
assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread");
|
||||
assert(!ParallelScavengeHeap::heap()->is_gc_active(), "not reentrant");
|
||||
assert(!ParallelScavengeHeap::heap()->is_stw_gc_active(), "not reentrant");
|
||||
|
||||
ParallelScavengeHeap* const heap = ParallelScavengeHeap::heap();
|
||||
PSAdaptiveSizePolicy* policy = heap->size_policy();
|
||||
IsGCActiveMark mark;
|
||||
IsSTWGCActiveMark mark;
|
||||
|
||||
const bool scavenge_done = PSScavenge::invoke_no_policy();
|
||||
const bool need_full_gc = !scavenge_done;
|
||||
@ -264,7 +264,7 @@ class PSThreadRootsTaskClosure : public ThreadClosure {
|
||||
public:
|
||||
PSThreadRootsTaskClosure(uint worker_id) : _worker_id(worker_id) { }
|
||||
virtual void do_thread(Thread* thread) {
|
||||
assert(ParallelScavengeHeap::heap()->is_gc_active(), "called outside gc");
|
||||
assert(ParallelScavengeHeap::heap()->is_stw_gc_active(), "called outside gc");
|
||||
|
||||
PSPromotionManager* pm = PSPromotionManager::gc_thread_promotion_manager(_worker_id);
|
||||
PSScavengeRootsClosure roots_closure(pm);
|
||||
|
@ -484,7 +484,7 @@ void SerialHeap::do_collection(bool full,
|
||||
assert(my_thread->is_VM_thread(), "only VM thread");
|
||||
assert(Heap_lock->is_locked(),
|
||||
"the requesting thread should have the Heap_lock");
|
||||
guarantee(!is_gc_active(), "collection is not reentrant");
|
||||
guarantee(!is_stw_gc_active(), "collection is not reentrant");
|
||||
|
||||
if (GCLocker::check_active_before_gc()) {
|
||||
return; // GC is disabled (e.g. JNI GetXXXCritical operation)
|
||||
@ -495,7 +495,7 @@ void SerialHeap::do_collection(bool full,
|
||||
|
||||
ClearedAllSoftRefs casr(do_clear_all_soft_refs, soft_ref_policy());
|
||||
|
||||
IsGCActiveMark active_gc_mark;
|
||||
IsSTWGCActiveMark active_gc_mark;
|
||||
|
||||
bool complete = full && (max_generation == OldGen);
|
||||
bool old_collects_young = complete;
|
||||
|
@ -243,7 +243,7 @@ CollectedHeap::CollectedHeap() :
|
||||
_capacity_at_last_gc(0),
|
||||
_used_at_last_gc(0),
|
||||
_soft_ref_policy(),
|
||||
_is_gc_active(false),
|
||||
_is_stw_gc_active(false),
|
||||
_last_whole_heap_examined_time_ns(os::javaTimeNanos()),
|
||||
_total_collections(0),
|
||||
_total_full_collections(0),
|
||||
|
@ -90,8 +90,8 @@ public:
|
||||
class CollectedHeap : public CHeapObj<mtGC> {
|
||||
friend class VMStructs;
|
||||
friend class JVMCIVMStructs;
|
||||
friend class IsGCActiveMark; // Block structured external access to _is_gc_active
|
||||
friend class DisableIsGCActiveMark; // Disable current IsGCActiveMark
|
||||
friend class IsSTWGCActiveMark; // Block structured external access to _is_stw_gc_active
|
||||
friend class DisableIsSTWGCActiveMark; // Disable current IsSTWGCActiveMark
|
||||
friend class MemAllocator;
|
||||
friend class ParallelObjectIterator;
|
||||
|
||||
@ -112,7 +112,7 @@ class CollectedHeap : public CHeapObj<mtGC> {
|
||||
// Not used by all GCs
|
||||
MemRegion _reserved;
|
||||
|
||||
bool _is_gc_active;
|
||||
bool _is_stw_gc_active;
|
||||
|
||||
// (Minimum) Alignment reserve for TLABs and PLABs.
|
||||
static size_t _lab_alignment_reserve;
|
||||
@ -374,10 +374,8 @@ protected:
|
||||
// allocated object.
|
||||
virtual bool requires_barriers(stackChunkOop obj) const = 0;
|
||||
|
||||
// Returns "true" iff there is a stop-world GC in progress. (I assume
|
||||
// that it should answer "false" for the concurrent part of a concurrent
|
||||
// collector -- dld).
|
||||
bool is_gc_active() const { return _is_gc_active; }
|
||||
// Returns "true" iff there is a stop-world GC in progress.
|
||||
bool is_stw_gc_active() const { return _is_stw_gc_active; }
|
||||
|
||||
// Total number of GC collections (started)
|
||||
unsigned int total_collections() const { return _total_collections; }
|
||||
|
@ -29,28 +29,28 @@
|
||||
#include "utilities/debug.hpp"
|
||||
|
||||
// This class provides a method for block structured setting of the
|
||||
// _is_gc_active state without requiring accessors in CollectedHeap
|
||||
// _is_stw_gc_active state without requiring accessors in CollectedHeap
|
||||
|
||||
IsGCActiveMark::IsGCActiveMark() {
|
||||
IsSTWGCActiveMark::IsSTWGCActiveMark() {
|
||||
CollectedHeap* heap = Universe::heap();
|
||||
assert(!heap->is_gc_active(), "Not reentrant");
|
||||
heap->_is_gc_active = true;
|
||||
assert(!heap->is_stw_gc_active(), "Not reentrant");
|
||||
heap->_is_stw_gc_active = true;
|
||||
}
|
||||
|
||||
IsGCActiveMark::~IsGCActiveMark() {
|
||||
IsSTWGCActiveMark::~IsSTWGCActiveMark() {
|
||||
CollectedHeap* heap = Universe::heap();
|
||||
assert(heap->is_gc_active(), "Sanity");
|
||||
heap->_is_gc_active = false;
|
||||
assert(heap->is_stw_gc_active(), "Sanity");
|
||||
heap->_is_stw_gc_active = false;
|
||||
}
|
||||
|
||||
DisableIsGCActiveMark::DisableIsGCActiveMark() {
|
||||
DisableIsSTWGCActiveMark::DisableIsSTWGCActiveMark() {
|
||||
CollectedHeap* heap = Universe::heap();
|
||||
assert(heap->is_gc_active(), "Not reentrant");
|
||||
heap->_is_gc_active = false;
|
||||
assert(heap->is_stw_gc_active(), "Not reentrant");
|
||||
heap->_is_stw_gc_active = false;
|
||||
}
|
||||
|
||||
DisableIsGCActiveMark::~DisableIsGCActiveMark() {
|
||||
DisableIsSTWGCActiveMark::~DisableIsSTWGCActiveMark() {
|
||||
CollectedHeap* heap = Universe::heap();
|
||||
assert(!heap->is_gc_active(), "Sanity");
|
||||
heap->_is_gc_active = true;
|
||||
assert(!heap->is_stw_gc_active(), "Sanity");
|
||||
heap->_is_stw_gc_active = true;
|
||||
}
|
||||
|
@ -28,18 +28,18 @@
|
||||
#include "memory/allocation.hpp"
|
||||
|
||||
// This class provides a method for block structured setting of the
|
||||
// _is_gc_active state without requiring accessors in CollectedHeap
|
||||
// _is_stw_gc_active state without requiring accessors in CollectedHeap
|
||||
|
||||
class IsGCActiveMark : public StackObj {
|
||||
class IsSTWGCActiveMark : public StackObj {
|
||||
public:
|
||||
IsGCActiveMark();
|
||||
~IsGCActiveMark();
|
||||
IsSTWGCActiveMark();
|
||||
~IsSTWGCActiveMark();
|
||||
};
|
||||
|
||||
class DisableIsGCActiveMark : public StackObj {
|
||||
class DisableIsSTWGCActiveMark : public StackObj {
|
||||
public:
|
||||
DisableIsGCActiveMark();
|
||||
~DisableIsGCActiveMark();
|
||||
DisableIsSTWGCActiveMark();
|
||||
~DisableIsSTWGCActiveMark();
|
||||
};
|
||||
|
||||
#endif // SHARE_GC_SHARED_ISGCACTIVEMARK_HPP
|
||||
|
@ -147,7 +147,7 @@ void MemAllocator::Allocation::verify_before() {
|
||||
JavaThread* THREAD = _thread; // For exception macros.
|
||||
assert(!HAS_PENDING_EXCEPTION, "Should not allocate with exception pending");
|
||||
debug_only(check_for_valid_allocation_state());
|
||||
assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed");
|
||||
assert(!Universe::heap()->is_stw_gc_active(), "Allocation during GC pause not allowed");
|
||||
}
|
||||
|
||||
#ifdef ASSERT
|
||||
|
@ -92,7 +92,7 @@
|
||||
nonstatic_field(CardTableBarrierSet, _card_table, CardTable*) \
|
||||
\
|
||||
nonstatic_field(CollectedHeap, _reserved, MemRegion) \
|
||||
nonstatic_field(CollectedHeap, _is_gc_active, bool) \
|
||||
nonstatic_field(CollectedHeap, _is_stw_gc_active, bool) \
|
||||
nonstatic_field(CollectedHeap, _total_collections, unsigned int) \
|
||||
\
|
||||
nonstatic_field(ContiguousSpace, _bottom, HeapWord*) \
|
||||
|
@ -132,7 +132,7 @@ private:
|
||||
ShenandoahHeap* const _heap;
|
||||
const GCIdMark _gc_id_mark;
|
||||
const SvcGCMarker _svc_gc_mark;
|
||||
const IsGCActiveMark _is_gc_active_mark;
|
||||
const IsSTWGCActiveMark _is_gc_active_mark;
|
||||
TraceMemoryManagerStats _trace_pause;
|
||||
|
||||
public:
|
||||
|
@ -116,7 +116,7 @@ public:
|
||||
|
||||
// Setup GC id and active marker
|
||||
GCIdMark gc_id_mark(_gc_id);
|
||||
IsGCActiveMark gc_active_mark;
|
||||
IsSTWGCActiveMark gc_active_mark;
|
||||
|
||||
// Verify before operation
|
||||
XVerify::before_zoperation();
|
||||
|
@ -438,7 +438,7 @@ public:
|
||||
virtual void doit() {
|
||||
// Setup GC id and active marker
|
||||
GCIdMark gc_id_mark(_gc_id);
|
||||
IsGCActiveMark gc_active_mark;
|
||||
IsSTWGCActiveMark gc_active_mark;
|
||||
|
||||
// Verify before operation
|
||||
ZVerify::before_zoperation();
|
||||
|
@ -484,7 +484,7 @@ void ZVerify::after_mark() {
|
||||
}
|
||||
if (ZVerifyObjects) {
|
||||
// Workaround OopMapCacheAlloc_lock reordering with the StackWatermark_lock
|
||||
DisableIsGCActiveMark mark;
|
||||
DisableIsSTWGCActiveMark mark;
|
||||
|
||||
objects(false /* verify_weaks */);
|
||||
guarantee(zverify_broken_object == zaddress::null, "Verification failed");
|
||||
|
@ -879,7 +879,7 @@ static OopStorage* object_handles() {
|
||||
}
|
||||
|
||||
jlong JVMCIRuntime::make_oop_handle(const Handle& obj) {
|
||||
assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
|
||||
assert(!Universe::heap()->is_stw_gc_active(), "can't extend the root set during GC pause");
|
||||
assert(oopDesc::is_oop(obj()), "not an oop");
|
||||
|
||||
oop* ptr = OopHandle(object_handles(), obj()).ptr_raw();
|
||||
|
@ -1342,8 +1342,8 @@ bool Universe::release_fullgc_alot_dummy() {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Universe::is_gc_active() {
|
||||
return heap()->is_gc_active();
|
||||
bool Universe::is_stw_gc_active() {
|
||||
return heap()->is_stw_gc_active();
|
||||
}
|
||||
|
||||
bool Universe::is_in_heap(const void* p) {
|
||||
|
@ -295,7 +295,7 @@ class Universe: AllStatic {
|
||||
// The particular choice of collected heap.
|
||||
static CollectedHeap* heap() { return _collectedHeap; }
|
||||
|
||||
DEBUG_ONLY(static bool is_gc_active();)
|
||||
DEBUG_ONLY(static bool is_stw_gc_active();)
|
||||
DEBUG_ONLY(static bool is_in_heap(const void* p);)
|
||||
DEBUG_ONLY(static bool is_in_heap_or_null(const void* p) { return p == nullptr || is_in_heap(p); })
|
||||
|
||||
|
@ -312,7 +312,7 @@ void Method::mask_for(int bci, InterpreterOopMap* mask) {
|
||||
methodHandle h_this(Thread::current(), this);
|
||||
// Only GC uses the OopMapCache during thread stack root scanning
|
||||
// any other uses generate an oopmap but do not save it in the cache.
|
||||
if (Universe::heap()->is_gc_active()) {
|
||||
if (Universe::heap()->is_stw_gc_active()) {
|
||||
method_holder()->mask_for(h_this, bci, mask);
|
||||
} else {
|
||||
OopMapCache::compute_one_oop_map(h_this, bci, mask);
|
||||
|
@ -226,6 +226,6 @@ bool oopDesc::size_might_change() {
|
||||
// the grey portion of an already copied array. This will cause the first
|
||||
// disjunct below to fail if the two comparands are computed across such
|
||||
// a concurrent change.
|
||||
return Universe::heap()->is_gc_active() && is_objArray() && is_forwarded() && (UseParallelGC || UseG1GC);
|
||||
return Universe::heap()->is_stw_gc_active() && is_objArray() && is_forwarded() && (UseParallelGC || UseG1GC);
|
||||
}
|
||||
#endif
|
||||
|
@ -602,7 +602,7 @@ void AsyncGetCallTrace(ASGCT_CallTrace *trace, jint depth, void* ucontext) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Universe::heap()->is_gc_active()) {
|
||||
if (Universe::heap()->is_stw_gc_active()) {
|
||||
trace->num_frames = ticks_GC_active; // -2
|
||||
return;
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ static void report_handle_allocation_failure(AllocFailType alloc_failmode,
|
||||
}
|
||||
|
||||
jobject JNIHandles::make_global(Handle obj, AllocFailType alloc_failmode) {
|
||||
assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
|
||||
assert(!Universe::heap()->is_stw_gc_active(), "can't extend the root set during GC pause");
|
||||
assert(!current_thread_in_native(), "must not be in native");
|
||||
jobject res = nullptr;
|
||||
if (!obj.is_null()) {
|
||||
@ -105,7 +105,7 @@ jobject JNIHandles::make_global(Handle obj, AllocFailType alloc_failmode) {
|
||||
}
|
||||
|
||||
jweak JNIHandles::make_weak_global(Handle obj, AllocFailType alloc_failmode) {
|
||||
assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
|
||||
assert(!Universe::heap()->is_stw_gc_active(), "can't extend the root set during GC pause");
|
||||
assert(!current_thread_in_native(), "must not be in native");
|
||||
jweak res = nullptr;
|
||||
if (!obj.is_null()) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user