Skip to content

Commit

Permalink
Upgrade async-profiler to v3 (#386)
Browse files Browse the repository at this point in the history
  • Loading branch information
SirYwell authored Nov 21, 2024
1 parent ddb0097 commit 368ba2f
Show file tree
Hide file tree
Showing 20 changed files with 406 additions and 264 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ It is:
It works by sampling statistical data about the systems activity, and constructing a call graph based on this data. The call graph is then displayed in an online viewer for further analysis by the user.

There are two different profiler engines:
* Native `AsyncGetCallTrace` + `perf_events` - uses [async-profiler](https://github.com/jvm-profiling-tools/async-profiler) (*only available on Linux x86_64 systems*)
* Built-in Java `ThreadMXBean` - an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q.
* Native/Async - uses the [async-profiler](https://github.com/async-profiler/async-profiler) library (*only available on Linux & macOS systems*)
* Java - uses `ThreadMXBean`, an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q.

### :zap: Memory Inspection

Expand Down
15 changes: 15 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,21 @@ subprojects {
patchVersion = determinePatchVersion()
pluginVersion = baseVersion + '.' + patchVersion
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'

applyExcludes = { Jar jarTask ->
jarTask.exclude 'module-info.class'
jarTask.exclude 'META-INF/maven/**'
jarTask.exclude 'META-INF/proguard/**'
jarTask.exclude 'META-INF/LICENSE'
jarTask.exclude 'META-INF/NOTICE'
// protobuf
jarTask.exclude '**/*.proto'
jarTask.exclude '**/*.proto.bin'
// async-profiler
jarTask.exclude 'linux-arm64/**'
jarTask.exclude 'linux-x64/**'
jarTask.exclude 'macos/**'
}
}

tasks.withType(JavaCompile).configureEach {
Expand Down
6 changes: 1 addition & 5 deletions spark-bukkit/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,7 @@ shadowJar {
relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks'
relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws'

exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
exclude '**/*.proto'
exclude '**/*.proto.bin'
project.applyExcludes(delegate)
}

artifacts {
Expand Down
6 changes: 1 addition & 5 deletions spark-bungeecord/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,7 @@ shadowJar {
relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks'
relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws'

exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
exclude '**/*.proto'
exclude '**/*.proto.bin'
project.applyExcludes(delegate)
}

artifacts {
Expand Down
2 changes: 1 addition & 1 deletion spark-common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ license {

dependencies {
api project(':spark-api')
implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3'
implementation 'tools.profiler:async-profiler:3.0' // spark native version: 3cf733d
implementation 'org.ow2.asm:asm:9.7'
implementation 'net.bytebuddy:byte-buddy-agent:1.14.17'
implementation 'com.google.protobuf:protobuf-javalite:4.28.2'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,18 +144,8 @@ private static AsyncProfiler load(SparkPlatform platform) throws Exception {
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
String jvm = System.getProperty("java.vm.name");

// openj9 not supported by async-profiler at the moment
if (jvm.contains("OpenJ9")) {
throw new UnsupportedJvmException(jvm);
}

if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) {
arch = "amd64-musl";
}

Table<String, String, String> supported = ImmutableTable.<String, String, String>builder()
.put("linux", "amd64", "linux/amd64")
.put("linux", "amd64-musl", "linux/amd64-musl")
.put("linux", "aarch64", "linux/aarch64")
.put("macosx", "amd64", "macos")
.put("macosx", "aarch64", "macos")
Expand Down Expand Up @@ -242,20 +232,4 @@ public NativeLoadingException(Throwable cause) {
super("A runtime error occurred whilst loading the native library", cause);
}
}

// Checks if the system is using musl instead of glibc
private static boolean isLinuxMusl() {
try {
InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`")
.start()
.getInputStream();

BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
String output = reader.lines().collect(Collectors.joining());
return output.contains("musl"); // shrug
} catch (Throwable e) {
// ignore
return false;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,12 @@ private void scheduleTimeout() {
}

this.scheduler.schedule(() -> {
stop(false);
this.future.complete(this);
try {
stop(false);
this.future.complete(this);
} catch (Exception e) {
this.future.completeExceptionally(e);
}
}, delay, TimeUnit.MILLISECONDS);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,11 @@

package me.lucko.spark.common.sampler.async;

import com.google.common.collect.ImmutableMap;
import me.lucko.spark.common.sampler.async.jfr.JfrReader;

import java.nio.charset.StandardCharsets;
import java.util.Map;

/**
* Represents a profile "segment".
Expand Down Expand Up @@ -83,7 +85,9 @@ public static ProfileSegment parseSegment(JfrReader reader, JfrReader.Event samp
String threadState = UNKNOWN_THREAD_STATE;
if (sample instanceof JfrReader.ExecutionSample) {
JfrReader.ExecutionSample executionSample = (JfrReader.ExecutionSample) sample;
threadState = reader.threadStates.get(executionSample.threadState);

Map<Integer, String> threadStateLookup = reader.enums.getOrDefault("jdk.types.ThreadState", ImmutableMap.of());
threadState = threadStateLookup.getOrDefault(executionSample.threadState, UNKNOWN_THREAD_STATE);
}

return new ProfileSegment(sample.tid, threadName, stack, value, threadState);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,6 @@
/*
* Copyright 2020 Andrei Pangin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Copyright The async-profiler authors
* SPDX-License-Identifier: Apache-2.0
*/

package me.lucko.spark.common.sampler.async.jfr;
Expand All @@ -37,9 +26,11 @@ public void clear() {
size = 0;
}

// spark start
public int size() {
return this.size;
}
// spark end

public void put(long key, T value) {
if (key == 0) {
Expand Down
Loading

0 comments on commit 368ba2f

Please sign in to comment.