Skip to content

Commit

Permalink
Merge pull request #44 from DataDog/rgs/fix-async-sample-mutex
Browse files Browse the repository at this point in the history
fix async sample mutex
  • Loading branch information
richardstartin authored Nov 15, 2023
2 parents 89ffb07 + ff7429f commit c578955
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 70 deletions.
101 changes: 44 additions & 57 deletions ddprof-lib/src/main/cpp/profiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -341,58 +341,6 @@ int Profiler::convertNativeTrace(int native_frames, const void** callchain, ASGC
return depth;
}

NOINLINE void Profiler::getJavaTraceAsyncRetryPopStub(void* ucontext, ASGCT_CallTrace* trace, int max_depth, CodeBlob* stub, StackFrame& frame) {
if (!(_safe_mode & POP_STUB) && frame.popStub((instruction_t*)stub->_start, stub->_name)
&& isAddressInCode(frame.pc() -= ADJUST_RET)) {
VM::_asyncGetCallTrace(trace, max_depth, ucontext);
}
}

NOINLINE void Profiler::getJavaTraceAsyncRetryPopMethod(void* ucontext, ASGCT_CallTrace* trace, int max_depth, CodeBlob* stub, StackFrame& frame, NMethod* nmethod) {
if (!(_safe_mode & POP_METHOD) && frame.popMethod((instruction_t*)nmethod->entry())
&& isAddressInCode(frame.pc() -= ADJUST_RET)) {
VM::_asyncGetCallTrace(trace, max_depth, ucontext);
}
}

NOINLINE void Profiler::getJavaTraceAsyncRetryMakeFrameWalkable(void* ucontext, ASGCT_CallTrace* trace, int max_depth, VMThread* vm_thread) {
uintptr_t& sp = vm_thread->lastJavaSP();
uintptr_t& pc = vm_thread->lastJavaPC();
if (sp != 0 && pc == 0) {
// We have the last Java frame anchor, but it is not marked as walkable.
// Make it walkable here
pc = ((uintptr_t*)sp)[-1];

NMethod* m = CodeHeap::findNMethod((const void*)pc);
if (m != NULL) {
// AGCT fails if the last Java frame is a Runtime Stub with an invalid _frame_complete_offset.
// In this case we patch _frame_complete_offset manually
if (!m->isNMethod() && m->frameSize() > 0 && m->frameCompleteOffset() == -1) {
m->setFrameCompleteOffset(0);
}
VM::_asyncGetCallTrace(trace, max_depth, ucontext);
} else if (findLibraryByAddress((const void*)pc) != NULL) {
VM::_asyncGetCallTrace(trace, max_depth, ucontext);
}

pc = 0;
}
}

NOINLINE void Profiler::getJavaTraceAsyncRetryInvalidRuntimeStubFrameCompleteOffset(void* ucontext, ASGCT_CallTrace* trace, int max_depth, VMThread* vm_thread) {
uintptr_t& sp = vm_thread->lastJavaSP();
uintptr_t& pc = vm_thread->lastJavaPC();
if (sp != 0 && pc != 0) {
// Similar to the above: last Java frame is set,
// but points to a Runtime Stub with an invalid _frame_complete_offset
NMethod* m = CodeHeap::findNMethod((const void*)pc);
if (m != NULL && !m->isNMethod() && m->frameSize() > 0 && m->frameCompleteOffset() == -1) {
m->setFrameCompleteOffset(0);
VM::_asyncGetCallTrace(trace, max_depth, ucontext);
}
}
}

int Profiler::getJavaTraceAsync(void* ucontext, ASGCT_CallFrame* frames, int max_depth, StackContext* java_ctx, bool *truncated) {
// Workaround for JDK-8132510: it's not safe to call GetEnv() inside a signal handler
// since JDK 9, so we do it only for threads already registered in ThreadLocalStorage
Expand Down Expand Up @@ -488,7 +436,10 @@ int Profiler::getJavaTraceAsync(void* ucontext, ASGCT_CallFrame* frames, int max
if (_cstack != CSTACK_NO) {
max_depth -= makeFrame(trace.frames++, BCI_NATIVE_FRAME, stub->_name);
}
getJavaTraceAsyncRetryPopStub(ucontext, &trace, max_depth, stub, frame);
if (!(_safe_mode & POP_STUB) && frame.popStub((instruction_t*)stub->_start, stub->_name)
&& isAddressInCode(frame.pc() -= ADJUST_RET)) {
VM::_asyncGetCallTrace(&trace, max_depth, ucontext);
}
} else if (VMStructs::hasMethodStructs()) {
NMethod* nmethod = CodeHeap::findNMethod((const void*)frame.pc());
if (nmethod != NULL && nmethod->isNMethod() && nmethod->isAlive()) {
Expand All @@ -498,19 +449,55 @@ int Profiler::getJavaTraceAsync(void* ucontext, ASGCT_CallFrame* frames, int max
if (method_id != NULL) {
max_depth -= makeFrame(trace.frames++, 0, method_id);
}
getJavaTraceAsyncRetryPopMethod(ucontext, &trace, max_depth, stub, frame, nmethod);
if (!(_safe_mode & POP_METHOD) && frame.popMethod((instruction_t*)nmethod->entry())
&& isAddressInCode(frame.pc() -= ADJUST_RET)) {
VM::_asyncGetCallTrace(&trace, max_depth, ucontext);
}
}
} else if (nmethod != NULL) {
if (_cstack != CSTACK_NO) {
max_depth -= makeFrame(trace.frames++, BCI_NATIVE_FRAME, nmethod->name());
}
getJavaTraceAsyncRetryPopStub(ucontext, &trace, max_depth, stub, frame);
if (!(_safe_mode & POP_STUB) && frame.popStub(NULL, nmethod->name())
&& isAddressInCode(frame.pc() -= ADJUST_RET)) {
VM::_asyncGetCallTrace(&trace, max_depth, ucontext);
}
}
}
} else if (trace.num_frames == ticks_unknown_not_Java && !(_safe_mode & LAST_JAVA_PC)) {
getJavaTraceAsyncRetryMakeFrameWalkable(ucontext, &trace, max_depth, vm_thread);
uintptr_t& sp = vm_thread->lastJavaSP();
uintptr_t& pc = vm_thread->lastJavaPC();
if (sp != 0 && pc == 0) {
// We have the last Java frame anchor, but it is not marked as walkable.
// Make it walkable here
pc = ((uintptr_t*)sp)[-1];

NMethod* m = CodeHeap::findNMethod((const void*)pc);
if (m != NULL) {
// AGCT fails if the last Java frame is a Runtime Stub with an invalid _frame_complete_offset.
// In this case we patch _frame_complete_offset manually
if (!m->isNMethod() && m->frameSize() > 0 && m->frameCompleteOffset() == -1) {
m->setFrameCompleteOffset(0);
}
VM::_asyncGetCallTrace(&trace, max_depth, ucontext);
} else if (findLibraryByAddress((const void*)pc) != NULL) {
VM::_asyncGetCallTrace(&trace, max_depth, ucontext);
}

pc = 0;
}
} else if (trace.num_frames == ticks_not_walkable_not_Java && !(_safe_mode & LAST_JAVA_PC)) {
getJavaTraceAsyncRetryInvalidRuntimeStubFrameCompleteOffset(ucontext, &trace, max_depth, vm_thread);
uintptr_t& sp = vm_thread->lastJavaSP();
uintptr_t& pc = vm_thread->lastJavaPC();
if (sp != 0 && pc != 0) {
// Similar to the above: last Java frame is set,
// but points to a Runtime Stub with an invalid _frame_complete_offset
NMethod* m = CodeHeap::findNMethod((const void*)pc);
if (m != NULL && !m->isNMethod() && m->frameSize() > 0 && m->frameCompleteOffset() == -1) {
m->setFrameCompleteOffset(0);
VM::_asyncGetCallTrace(&trace, max_depth, ucontext);
}
}
} else if (trace.num_frames == ticks_GC_active && !(_safe_mode & GC_TRACES)) {
if (vm_thread->lastJavaSP() == 0) {
// Do not add 'GC_active' for threads with no Java frames, e.g. Compiler threads
Expand Down
16 changes: 3 additions & 13 deletions ddprof-lib/src/main/cpp/profiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@
#include "vmEntry.h"
#include "objectSampler.h"
#include "thread.h"
#include "vmStructs.h"
#include "stackFrame.h"

// avoid linking against newer symbols here for wide compatibility
#ifdef __GLIBC__
Expand All @@ -47,12 +45,6 @@
#endif
#endif

#ifdef __clang__
# define NOINLINE __attribute__((noinline))
#else
# define NOINLINE __attribute__((noinline,noclone))
#endif

const int MAX_NATIVE_FRAMES = 128;
const int RESERVED_FRAMES = 4;

Expand Down Expand Up @@ -85,7 +77,9 @@ class AsyncSampleMutex {
}
AsyncSampleMutex(AsyncSampleMutex& other) = delete;
~AsyncSampleMutex() {
try_set(false);
if (_acquired) {
try_set(false);
}
}
bool acquired() {
return _acquired;
Expand Down Expand Up @@ -191,10 +185,6 @@ class Profiler {
Engine* selectWallEngine(Arguments& args);
Engine* selectAllocEngine(Arguments& args);
Error checkJvmCapabilities();
NOINLINE void getJavaTraceAsyncRetryPopStub(void* ucontext, ASGCT_CallTrace* trace, int max_depth, CodeBlob* stub, StackFrame& frame);
NOINLINE void getJavaTraceAsyncRetryPopMethod(void* ucontext, ASGCT_CallTrace* trace, int max_depth, CodeBlob* stub, StackFrame& frame, NMethod* nmethod);
NOINLINE void getJavaTraceAsyncRetryMakeFrameWalkable(void* ucontext, ASGCT_CallTrace* trace, int max_depth, VMThread* vm_thread);
NOINLINE void getJavaTraceAsyncRetryInvalidRuntimeStubFrameCompleteOffset(void* ucontext, ASGCT_CallTrace* trace, int max_depth, VMThread* vm_thread);

void lockAll();
void unlockAll();
Expand Down

0 comments on commit c578955

Please sign in to comment.