diff options
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common')
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java | 24 |
1 files changed, 21 insertions, 3 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index 192275b..7dcb131 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -114,13 +114,19 @@ public class AsyncProfilerAccess { if (this.setupException instanceof UnsupportedSystemException) { platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" + this.setupException.getMessage() + "), so the built-in Java engine will be used instead."); + } else if (this.setupException instanceof UnsupportedJvmException) { + platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your JVM (" + + this.setupException.getMessage() + "), so the built-in Java engine will be used instead."); } else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) { platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed."); platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc"); } else { - platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage()); + String error = this.setupException.getMessage(); + if (this.setupException.getCause() != null) { + error += " (" + this.setupException.getCause().getMessage() + ")"; + } + platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + error); platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler"); - this.setupException.printStackTrace(); } } @@ -140,6 +146,12 @@ public class AsyncProfilerAccess { // check compatibility String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); + String jvm = System.getProperty("java.vm.name"); + + // openj9 not supported by async-profiler at the moment + if (jvm.contains("OpenJ9")) { + throw new UnsupportedJvmException(jvm); + } if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { arch = "amd64-musl"; @@ -159,7 +171,7 @@ public class AsyncProfilerAccess { } // extract the profiler binary from the spark jar file - String resource = "spark/" + libPath + "/libasyncProfiler.so"; + String resource = "spark-native/" + libPath + "/libasyncProfiler.so"; URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource); if (profilerResource == null) { throw new IllegalStateException("Could not find " + resource + " in spark jar file"); @@ -224,6 +236,12 @@ public class AsyncProfilerAccess { } } + private static final class UnsupportedJvmException extends UnsupportedOperationException { + public UnsupportedJvmException(String jvm) { + super(jvm); + } + } + private static final class NativeLoadingException extends RuntimeException { public NativeLoadingException(Throwable cause) { super("A runtime error occurred whilst loading the native library", cause); |