art/runtime/runtime.cc
void Runtime::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
VisitNonConcurrentRoots(visitor, flags);
VisitConcurrentRoots(visitor, flags);
}
void Runtime::VisitNonConcurrentRoots(RootVisitor* visitor, VisitRootFlags flags) {
VisitThreadRoots(visitor, flags);
VisitNonThreadRoots(visitor);
}
void Runtime::VisitThreadRoots(RootVisitor* visitor, VisitRootFlags flags) {
thread_list_->VisitRoots(visitor, flags);
}
art/runtime/thread_list.cc
void ThreadList::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) const {
MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
for (const auto& thread : list_) {
thread->VisitRoots(visitor, flags);
}
}
art/runtime/thread.cc
void Thread::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
if ((flags & VisitRootFlags::kVisitRootFlagPrecise) != 0) {
VisitRoots</* kPrecise */ true>(visitor);
} else {
VisitRoots</* kPrecise */ false>(visitor);
}
}
//define the meaning of enum RootType
template <bool kPrecise>
void Thread::VisitRoots(RootVisitor* visitor) {
const pid_t thread_id = GetThreadId();
visitor->VisitRootIfNonNull(&tlsPtr_.opeer, RootInfo(kRootThreadObject, thread_id));
if (tlsPtr_.exception != nullptr && tlsPtr_.exception != GetDeoptimizationException()) {
visitor->VisitRoot(reinterpret_cast<mirror::Object**>(&tlsPtr_.exception),
RootInfo(kRootNativeStack, thread_id));
}
if (tlsPtr_.async_exception != nullptr) {
visitor->VisitRoot(reinterpret_cast<mirror::Object**>(&tlsPtr_.async_exception),
RootInfo(kRootNativeStack, thread_id));
}
visitor->VisitRootIfNonNull(&tlsPtr_.monitor_enter_object, RootInfo(kRootNativeStack, thread_id));
tlsPtr_.jni_env->VisitJniLocalRoots(visitor, RootInfo(kRootJNILocal, thread_id));
tlsPtr_.jni_env->VisitMonitorRoots(visitor, RootInfo(kRootJNIMonitor, thread_id));
HandleScopeVisitRoots(visitor, thread_id);
if (tlsPtr_.debug_invoke_req != nullptr) {
tlsPtr_.debug_invoke_req->VisitRoots(visitor, RootInfo(kRootDebugger, thread_id));
}
......
// Visit roots on this thread's stack
RuntimeContextType context;
RootCallbackVisitor visitor_to_callback(visitor, thread_id);
ReferenceMapVisitor<RootCallbackVisitor, kPrecise> mapper(this, &context, visitor_to_callback);
mapper.template WalkStack<StackVisitor::CountTransitions::kNo>(false);
for (instrumentation::InstrumentationStackFrame& frame : *GetInstrumentationStack()) {
visitor->VisitRootIfNonNull(&frame.this_object_, RootInfo(kRootVMInternal, thread_id));
}
}
void Runtime::VisitNonThreadRoots(RootVisitor* visitor) {
java_vm_->VisitRoots(visitor);
sentinel_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
pre_allocated_OutOfMemoryError_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
pre_allocated_NoClassDefFoundError_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
verifier::MethodVerifier::VisitStaticRoots(visitor);
VisitTransactionRoots(visitor);
}
void Runtime::VisitConcurrentRoots(RootVisitor* visitor, VisitRootFlags flags) {
intern_table_->VisitRoots(visitor, flags);
class_linker_->VisitRoots(visitor, flags);
heap_->VisitAllocationRecords(visitor);
if ((flags & kVisitRootFlagNewRoots) == 0) {
// Guaranteed to have no new roots in the constant roots.
VisitConstantRoots(visitor);
}
Dbg::VisitRoots(visitor);
}
art/runtime/gc_root.h
enum RootType {
kRootUnknown = 0,
kRootJNIGlobal,
kRootJNILocal,
kRootJavaFrame,
kRootNativeStack,
kRootStickyClass, //contains mirror class
kRootThreadBlock,
kRootMonitorUsed,
kRootThreadObject,
kRootInternedString,
kRootFinalizing, // used for HPROF's conversion to HprofHeapTag
kRootDebugger,
kRootReferenceCleanup, // used for HPROF's conversion to HprofHeapTag
kRootVMInternal,
kRootJNIMonitor,
};
art/runtime/gc_root.h
// Single root version, not overridable.
ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (*root != nullptr) {
VisitRoot(root, info);
}
}
// Single root version, not overridable.
ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
REQUIRES_SHARED(Locks::mutator_lock_) {
VisitRoots(&root, 1, info);
}
// Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't critical.
class SingleRootVisitor : public RootVisitor {
void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
REQUIRES_SHARED(Locks::mutator_lock_) {
for (size_t i = 0; i < count; ++i) {
VisitRoot(*roots[i], info);
}
}
art/runtime/jni_env_ext.h
void VisitJniLocalRoots(RootVisitor* visitor, const RootInfo& root_info)
REQUIRES_SHARED(Locks::mutator_lock_) {
locals_.VisitRoots(visitor, root_info);
}
art/runtime/java_vm_ext.cc
void JavaVMExt::VisitRoots(RootVisitor* visitor) {
Thread* self = Thread::Current();
ReaderMutexLock mu(self, *Locks::jni_globals_lock_);
globals_.VisitRoots(visitor, RootInfo(kRootJNIGlobal));
// The weak_globals table is visited by the GC itself (because it mutates the table).
}
art/runtime/gc/heap-visit-objects-inl.h
template <typename Visitor>
inline void Heap::VisitObjectsPaused(Visitor&& visitor) {
Thread* self = Thread::Current();
Locks::mutator_lock_->AssertExclusiveHeld(self);
VisitObjectsInternalRegionSpace(visitor);
VisitObjectsInternal(visitor);
}
// Visit objects in the region spaces.
template <typename Visitor>
inline void Heap::VisitObjectsInternalRegionSpace(Visitor&& visitor) {
region_space_->Walk(visitor);
}
// Visit objects in the other spaces.
template <typename Visitor>
inline void Heap::VisitObjectsInternal(Visitor&& visitor) {
if (bump_pointer_space_ != nullptr) {
// Visit objects in bump pointer space.
bump_pointer_space_->Walk(visitor);
}
for (auto* it = allocation_stack_->Begin(), *end = allocation_stack_->End(); it < end; ++it) {
mirror::Object* const obj = it->AsMirrorPtr();
visitor(obj);
}
{
ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
GetLiveBitmap()->Visit<Visitor>(visitor);
}
art/runtime/gc/accounting/heap_bitmap-inl.h
template <typename Visitor>
inline void HeapBitmap::Visit(Visitor&& visitor) {
for (const auto& bitmap : continuous_space_bitmaps_) {
bitmap->VisitMarkedRange(bitmap->HeapBegin(), bitmap->HeapLimit(), visitor);
}
for (const auto& bitmap : large_object_bitmaps_) {
bitmap->VisitMarkedRange(bitmap->HeapBegin(), bitmap->HeapLimit(), visitor);
}
}
art/runtime/mirror/object-refvisitor-inl.h
template <bool kVisitNativeRoots,
VerifyObjectFlags kVerifyFlags,
ReadBarrierOption kReadBarrierOption,
typename Visitor,
typename JavaLangRefVisitor>
inline void Object::VisitReferences(const Visitor& visitor,
const JavaLangRefVisitor& ref_visitor) {
ObjPtr<Class> klass = GetClass<kVerifyFlags, kReadBarrierOption>();
visitor(this, ClassOffset(), false);
const uint32_t class_flags = klass->GetClassFlags<kVerifyNone>();
if (LIKELY(class_flags == kClassFlagNormal)) {
DCHECK((!klass->IsVariableSize<kVerifyFlags, kReadBarrierOption>()));
VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);