aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2022-02-22 21:09:32 +0000
committerLuck <git@lucko.me>2022-02-22 21:10:51 +0000
commit530610afda0697536bb0d103d95f8e4ebf73b7c8 (patch)
tree44dfe4790c0fa4969e184daea7b0a1545d828a6b /spark-common/src/main/java/me/lucko
parentf8ae6c5e41d72e7e695c65fe77281c6dd87e1ec3 (diff)
downloadspark-530610afda0697536bb0d103d95f8e4ebf73b7c8.tar.gz
spark-530610afda0697536bb0d103d95f8e4ebf73b7c8.tar.bz2
spark-530610afda0697536bb0d103d95f8e4ebf73b7c8.zip
Update async-profiler, add support for linux/aarch64 (#181)
Diffstat (limited to 'spark-common/src/main/java/me/lucko')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java18
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java18
2 files changed, 28 insertions, 8 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index d1c8393..d642a53 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -20,8 +20,8 @@
package me.lucko.spark.common.sampler.async;
-import com.google.common.collect.ImmutableSetMultimap;
-import com.google.common.collect.Multimap;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Table;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.util.TemporaryFiles;
@@ -108,18 +108,20 @@ public enum AsyncProfilerAccess {
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
- Multimap<String, String> supported = ImmutableSetMultimap.<String, String>builder()
- .put("linux", "amd64")
- .put("macosx", "amd64")
- .put("macosx", "aarch64")
+ Table<String, String, String> supported = ImmutableTable.<String, String, String>builder()
+ .put("linux", "amd64", "linux/amd64")
+ .put("linux", "aarch64", "linux/aarch64")
+ .put("macosx", "amd64", "macos")
+ .put("macosx", "aarch64", "macos")
.build();
- if (!supported.containsEntry(os, arch)) {
+ String libPath = supported.get(os, arch);
+ if (libPath == null) {
throw new UnsupportedSystemException(os, arch);
}
// extract the profiler binary from the spark jar file
- String resource = "spark/" + os + "/libasyncProfiler.so";
+ String resource = "spark/" + libPath + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
index a705f2d..e0cc4e9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
@@ -59,6 +59,7 @@ public class JfrReader implements Closeable {
public final Dictionary<AsyncStackTraceElement> stackFrames = new Dictionary<>(); // spark
public final Map<Integer, String> frameTypes = new HashMap<>();
public final Map<Integer, String> threadStates = new HashMap<>();
+ public final Map<String, String> settings = new HashMap<>();
private int executionSample;
private int nativeMethodSample;
@@ -67,6 +68,8 @@ public class JfrReader implements Closeable {
private int allocationSample;
private int monitorEnter;
private int threadPark;
+ private int activeSetting;
+ private boolean activeSettingHasStack;
public JfrReader(Path path) throws IOException { // spark - Path instead of String
this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String
@@ -129,6 +132,8 @@ public class JfrReader implements Closeable {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false);
} else if (type == threadPark) {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true);
+ } else if (type == activeSetting) {
+ readActiveSetting();
}
if ((pos += size) <= buf.limit()) {
@@ -170,6 +175,17 @@ public class JfrReader implements Closeable {
return new ContendedLock(time, tid, stackTraceId, duration, classId);
}
+ private void readActiveSetting() {
+ long time = getVarlong();
+ long duration = getVarlong();
+ int tid = getVarint();
+ if (activeSettingHasStack) getVarint();
+ long id = getVarlong();
+ String name = getString();
+ String value = getString();
+ settings.put(name, value);
+ }
+
private boolean readChunk(int pos) throws IOException {
if (pos + CHUNK_HEADER_SIZE > buf.limit() || buf.getInt(pos) != CHUNK_SIGNATURE) {
throw new IOException("Not a valid JFR file");
@@ -424,6 +440,8 @@ public class JfrReader implements Closeable {
allocationSample = getTypeId("jdk.ObjectAllocationSample");
monitorEnter = getTypeId("jdk.JavaMonitorEnter");
threadPark = getTypeId("jdk.ThreadPark");
+ activeSetting = getTypeId("jdk.ActiveSetting");
+ activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null;
}
private int getTypeId(String typeName) {