diff options
Diffstat (limited to 'spark-common/src/main/java/me/lucko')
11 files changed, 378 insertions, 32 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 491b7d3..fdced75 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -44,6 +44,7 @@ import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; +import me.lucko.spark.common.util.ClassSourceLookup; import net.kyori.adventure.text.event.ClickEvent; @@ -87,6 +88,7 @@ public class SparkPlatform { private final ActivityLog activityLog; private final TickHook tickHook; private final TickReporter tickReporter; + private final ClassSourceLookup classSourceLookup; private final TickStatistics tickStatistics; private Map<String, GarbageCollectorStatistics> startupGcStatistics = ImmutableMap.of(); private long serverNormalOperationStartTime; @@ -115,6 +117,7 @@ public class SparkPlatform { this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); + this.classSourceLookup = plugin.createClassSourceLookup(); this.tickStatistics = this.tickHook != null ? new TickStatistics() : null; } @@ -175,6 +178,10 @@ public class SparkPlatform { return this.tickReporter; } + public ClassSourceLookup getClassSourceLookup() { + return this.classSourceLookup; + } + public TickStatistics getTickStatistics() { return this.tickStatistics; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index 216f23f..aa5112d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -26,6 +26,7 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; +import me.lucko.spark.common.util.ClassSourceLookup; import java.nio.file.Path; import java.util.stream.Stream; @@ -102,6 +103,15 @@ public interface SparkPlugin { } /** + * Creates a class source lookup function. + * + * @return the class source lookup function + */ + default ClassSourceLookup createClassSourceLookup() { + return ClassSourceLookup.NO_OP; + } + + /** * Gets information for the platform. * * @return information about the platform diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 856a182..ebf6372 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -299,7 +299,7 @@ public class SamplerModule implements CommandModule { } private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) { - byte[] output = sampler.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode); + byte[] output = sampler.formCompressedDataPayload(new Sampler.ExportProps(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.getClassSourceLookup())); try { String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); String url = SparkPlatform.VIEWER_URL + key; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 5088ed7..bc08dfd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -24,6 +24,7 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkProtos.SamplerData; import java.io.ByteArrayOutputStream; @@ -71,16 +72,10 @@ public interface Sampler { CompletableFuture<? extends Sampler> getFuture(); // Methods used to export the sampler data to the web viewer. - SamplerData toProto( - PlatformInfo platformInfo, - CommandSender creator, - Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, - String comment, - MergeMode mergeMode - ); + SamplerData toProto(ExportProps props); - default byte[] formCompressedDataPayload(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) { - SamplerData proto = toProto(platformInfo, creator, outputOrder, comment, mergeMode); + default byte[] formCompressedDataPayload(ExportProps props) { + SamplerData proto = toProto(props); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); try (OutputStream out = new GZIPOutputStream(byteOut)) { @@ -91,4 +86,22 @@ public interface Sampler { return byteOut.toByteArray(); } + class ExportProps { + public final PlatformInfo platformInfo; + public final CommandSender creator; + public final Comparator<? super Map.Entry<String, ThreadNode>> outputOrder; + public final String comment; + public final MergeMode mergeMode; + public final ClassSourceLookup classSourceLookup; + + public ExportProps(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + this.platformInfo = platformInfo; + this.creator = creator; + this.outputOrder = outputOrder; + this.comment = comment; + this.mergeMode = mergeMode; + this.classSourceLookup = classSourceLookup; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index c76274b..8d57a6d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -20,14 +20,12 @@ package me.lucko.spark.common.sampler.async; -import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.jfr.JfrReader; -import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkProtos; import one.profiler.AsyncProfiler; @@ -37,7 +35,6 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -144,17 +141,17 @@ public class AsyncSampler implements Sampler { } @Override - public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) { + public SparkProtos.SamplerData toProto(ExportProps props) { final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder() - .setPlatformMetadata(platformInfo.toData().toProto()) - .setCreator(creator.toData().toProto()) + .setPlatformMetadata(props.platformInfo.toData().toProto()) + .setCreator(props.creator.toData().toProto()) .setStartTime(this.startTime) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(this.dataAggregator.getMetadata()); - if (comment != null) { - metadata.setComment(comment); + if (props.comment != null) { + metadata.setComment(props.comment); } SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder(); @@ -163,10 +160,17 @@ public class AsyncSampler implements Sampler { aggregateOutput(); List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); - data.sort(outputOrder); + data.sort(props.outputOrder); + + ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup); for (Map.Entry<String, ThreadNode> entry : data) { - proto.addThreads(entry.getValue().toProto(mergeMode)); + proto.addThreads(entry.getValue().toProto(props.mergeMode)); + classSourceVisitor.visit(entry.getValue()); + } + + if (classSourceVisitor.hasMappings()) { + proto.putAllClassSources(classSourceVisitor.getMapping()); } return proto.build(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 5fe5add..23d38d8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -23,14 +23,12 @@ package me.lucko.spark.common.sampler.java; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkProtos.SamplerData; import me.lucko.spark.proto.SparkProtos.SamplerMetadata; @@ -38,7 +36,6 @@ import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.util.ArrayList; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -161,27 +158,34 @@ public class JavaSampler implements Sampler, Runnable { } @Override - public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) { + public SamplerData toProto(ExportProps props) { final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() - .setPlatformMetadata(platformInfo.toData().toProto()) - .setCreator(creator.toData().toProto()) + .setPlatformMetadata(props.platformInfo.toData().toProto()) + .setCreator(props.creator.toData().toProto()) .setStartTime(this.startTime) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(this.dataAggregator.getMetadata()); - if (comment != null) { - metadata.setComment(comment); + if (props.comment != null) { + metadata.setComment(props.comment); } SamplerData.Builder proto = SamplerData.newBuilder(); proto.setMetadata(metadata.build()); List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); - data.sort(outputOrder); + data.sort(props.outputOrder); + + ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup); for (Map.Entry<String, ThreadNode> entry : data) { - proto.addThreads(entry.getValue().toProto(mergeMode)); + proto.addThreads(entry.getValue().toProto(props.mergeMode)); + classSourceVisitor.visit(entry.getValue()); + } + + if (classSourceVisitor.hasMappings()) { + proto.putAllClassSources(classSourceVisitor.getMapping()); } return proto.build(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index 2ef06d3..73f7bd7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -24,6 +24,7 @@ package me.lucko.spark.common.sampler.node; import me.lucko.spark.common.sampler.async.AsyncStackTraceElement; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -56,6 +57,10 @@ public abstract class AbstractNode { return this.totalTime.longValue() / 1000d; } + public Collection<StackTraceNode> getChildren() { + return this.children.values(); + } + /** * Merge {@code other} into {@code this}. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index 4179464..efc7f81 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -51,6 +51,10 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta return this.description.methodName; } + public String getMethodDescription() { + return this.description.methodDescription; + } + public int getLineNumber() { return this.description.lineNumber; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java new file mode 100644 index 0000000..4481786 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/ClassFinder.java @@ -0,0 +1,71 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.util; + +import net.bytebuddy.agent.ByteBuddyAgent; + +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.lang.instrument.Instrumentation; +import java.util.HashMap; +import java.util.Map; + +/** + * Uses {@link Instrumentation} to find a class reference for given class names. + * + * <p>This is necessary as we don't always have access to the classloader for a given class.</p> + */ +public class ClassFinder { + + private final Map<String, Class<?>> classes = new HashMap<>(); + + public ClassFinder() { + Instrumentation instrumentation; + try { + instrumentation = ByteBuddyAgent.install(); + } catch (Exception e) { + return; + } + + // obtain and cache loaded classes + for (Class<?> loadedClass : instrumentation.getAllLoadedClasses()) { + this.classes.put(loadedClass.getName(), loadedClass); + } + } + + public @Nullable Class<?> findClass(String className) { + // try instrumentation + Class<?> clazz = this.classes.get(className); + if (clazz != null) { + return clazz; + } + + // try Class.forName + try { + return Class.forName(className); + } catch (Throwable e) { + // ignore + } + + return null; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java new file mode 100644 index 0000000..27e3ec6 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java @@ -0,0 +1,223 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.ThreadNode; + +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Paths; +import java.security.CodeSource; +import java.security.ProtectionDomain; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * A function which defines the source of given {@link Class}es. + */ +public interface ClassSourceLookup { + + /** + * Identify the given class. + * + * @param clazz the class + * @return the source of the class + */ + @Nullable String identify(Class<?> clazz) throws Exception; + + /** + * A no-operation {@link ClassSourceLookup}. + */ + ClassSourceLookup NO_OP = new ClassSourceLookup() { + @Override + public @Nullable String identify(Class<?> clazz) { + return null; + } + }; + + /** + * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}. + */ + abstract class ByClassLoader implements ClassSourceLookup { + + public abstract @Nullable String identify(ClassLoader loader) throws Exception; + + @Override + public final @Nullable String identify(Class<?> clazz) throws Exception { + ClassLoader loader = clazz.getClassLoader(); + while (loader != null) { + String source = identify(loader); + if (source != null) { + return source; + } + loader = loader.getParent(); + } + return null; + } + } + + /** + * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}. + */ + class ByFirstUrlSource extends ByClassLoader { + @Override + public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException { + if (loader instanceof URLClassLoader) { + URLClassLoader urlClassLoader = (URLClassLoader) loader; + URL[] urls = urlClassLoader.getURLs(); + if (urls.length == 0) { + return null; + } + return identifyUrl(urls[0]); + } + return null; + } + } + + /** + * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}. + */ + class ByCodeSource implements ClassSourceLookup { + @Override + public @Nullable String identify(Class<?> clazz) throws URISyntaxException { + ProtectionDomain protectionDomain = clazz.getProtectionDomain(); + if (protectionDomain == null) { + return null; + } + CodeSource codeSource = protectionDomain.getCodeSource(); + if (codeSource == null) { + return null; + } + + URL url = codeSource.getLocation(); + return url == null ? null : identifyUrl(url); + } + } + + /** + * Attempts to identify a jar file from a URL. + * + * @param url the url + * @return the name of the file + * @throws URISyntaxException thrown by {@link URL#toURI()} + */ + static String identifyUrl(URL url) throws URISyntaxException { + if (url.getProtocol().equals("file")) { + String jarName = Paths.get(url.toURI()).getFileName().toString(); + if (jarName.endsWith(".jar")) { + return jarName.substring(0, jarName.length() - 4); + } + } + return null; + } + interface Visitor { + void visit(ThreadNode node); + + boolean hasMappings(); + + Map<String, String> getMapping(); + } + + static Visitor createVisitor(ClassSourceLookup lookup) { + if (lookup == ClassSourceLookup.NO_OP) { + return NoOpVistitor.INSTANCE; // don't bother! + } + return new VisitorImpl(lookup); + } + + enum NoOpVistitor implements Visitor { + INSTANCE; + + @Override + public void visit(ThreadNode node) { + + } + + @Override + public boolean hasMappings() { + return false; + } + + @Override + public Map<String, String> getMapping() { + return Collections.emptyMap(); + } + } + + /** + * Visitor which scans {@link StackTraceNode}s and accumulates class identities. + */ + class VisitorImpl implements Visitor { + private final ClassSourceLookup lookup; + private final ClassFinder classFinder = new ClassFinder(); + + // class name --> identifier (plugin name) + private final Map<String, String> map = new HashMap<>(); + + VisitorImpl(ClassSourceLookup lookup) { + this.lookup = lookup; + } + + @Override + public void visit(ThreadNode node) { + for (StackTraceNode child : node.getChildren()) { + visitStackNode(child); + } + } + + @Override + public boolean hasMappings() { + return !this.map.isEmpty(); + } + + @Override + public Map<String, String> getMapping() { + this.map.values().removeIf(Objects::isNull); + return this.map; + } + + private void visitStackNode(StackTraceNode node) { + String className = node.getClassName(); + if (!this.map.containsKey(className)) { + try { + Class<?> clazz = this.classFinder.findClass(className); + Objects.requireNonNull(clazz); + this.map.put(className, this.lookup.identify(clazz)); + } catch (Throwable e) { + this.map.put(className, null); + } + } + + // recursively + for (StackTraceNode child : node.getChildren()) { + visitStackNode(child); + } + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java index 2e113a9..a35bf08 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MethodDisambiguator.java @@ -48,6 +48,11 @@ public final class MethodDisambiguator { private final Map<String, ComputedClass> cache = new ConcurrentHashMap<>(); public Optional<MethodDescription> disambiguate(StackTraceNode element) { + String desc = element.getMethodDescription(); + if (desc != null) { + return Optional.of(new MethodDescription(element.getMethodName(), desc)); + } + return disambiguate(element.getClassName(), element.getMethodName(), element.getLineNumber()); } |