aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common/sampler
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2021-05-27 23:27:38 +0100
committerLuck <git@lucko.me>2021-05-30 21:51:37 +0100
commitf5bb628319d57c8d1ed26e1673d9f781cc939f83 (patch)
treedabba3da9c6bc37447d6eacfd42eddb30e6fd2d2 /spark-common/src/main/java/me/lucko/spark/common/sampler
parent767995e05d46b416292a713756782f939b16f61f (diff)
downloadspark-f5bb628319d57c8d1ed26e1673d9f781cc939f83.tar.gz
spark-f5bb628319d57c8d1ed26e1673d9f781cc939f83.tar.bz2
spark-f5bb628319d57c8d1ed26e1673d9f781cc939f83.zip
Extract class sources (plugin/mod names) and include in data payload
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/sampler')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java31
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java26
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java26
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java5
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java4
5 files changed, 61 insertions, 31 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 5088ed7..bc08dfd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -24,6 +24,7 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos.SamplerData;
import java.io.ByteArrayOutputStream;
@@ -71,16 +72,10 @@ public interface Sampler {
CompletableFuture<? extends Sampler> getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(
- PlatformInfo platformInfo,
- CommandSender creator,
- Comparator<? super Map.Entry<String, ThreadNode>> outputOrder,
- String comment,
- MergeMode mergeMode
- );
+ SamplerData toProto(ExportProps props);
- default byte[] formCompressedDataPayload(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) {
- SamplerData proto = toProto(platformInfo, creator, outputOrder, comment, mergeMode);
+ default byte[] formCompressedDataPayload(ExportProps props) {
+ SamplerData proto = toProto(props);
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
try (OutputStream out = new GZIPOutputStream(byteOut)) {
@@ -91,4 +86,22 @@ public interface Sampler {
return byteOut.toByteArray();
}
+ class ExportProps {
+ public final PlatformInfo platformInfo;
+ public final CommandSender creator;
+ public final Comparator<? super Map.Entry<String, ThreadNode>> outputOrder;
+ public final String comment;
+ public final MergeMode mergeMode;
+ public final ClassSourceLookup classSourceLookup;
+
+ public ExportProps(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ this.platformInfo = platformInfo;
+ this.creator = creator;
+ this.outputOrder = outputOrder;
+ this.comment = comment;
+ this.mergeMode = mergeMode;
+ this.classSourceLookup = classSourceLookup;
+ }
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index c76274b..8d57a6d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -20,14 +20,12 @@
package me.lucko.spark.common.sampler.async;
-import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.async.jfr.JfrReader;
-import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos;
import one.profiler.AsyncProfiler;
@@ -37,7 +35,6 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
-import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -144,17 +141,17 @@ public class AsyncSampler implements Sampler {
}
@Override
- public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) {
+ public SparkProtos.SamplerData toProto(ExportProps props) {
final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
+ .setPlatformMetadata(props.platformInfo.toData().toProto())
+ .setCreator(props.creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
.setThreadDumper(this.threadDumper.getMetadata())
.setDataAggregator(this.dataAggregator.getMetadata());
- if (comment != null) {
- metadata.setComment(comment);
+ if (props.comment != null) {
+ metadata.setComment(props.comment);
}
SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder();
@@ -163,10 +160,17 @@ public class AsyncSampler implements Sampler {
aggregateOutput();
List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
+ data.sort(props.outputOrder);
+
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup);
for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
+ proto.addThreads(entry.getValue().toProto(props.mergeMode));
+ classSourceVisitor.visit(entry.getValue());
+ }
+
+ if (classSourceVisitor.hasMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getMapping());
}
return proto.build();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 5fe5add..23d38d8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -23,14 +23,12 @@ package me.lucko.spark.common.sampler.java;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
-import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos.SamplerData;
import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
@@ -38,7 +36,6 @@ import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
-import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -161,27 +158,34 @@ public class JavaSampler implements Sampler, Runnable {
}
@Override
- public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) {
+ public SamplerData toProto(ExportProps props) {
final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
+ .setPlatformMetadata(props.platformInfo.toData().toProto())
+ .setCreator(props.creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
.setThreadDumper(this.threadDumper.getMetadata())
.setDataAggregator(this.dataAggregator.getMetadata());
- if (comment != null) {
- metadata.setComment(comment);
+ if (props.comment != null) {
+ metadata.setComment(props.comment);
}
SamplerData.Builder proto = SamplerData.newBuilder();
proto.setMetadata(metadata.build());
List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
+ data.sort(props.outputOrder);
+
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup);
for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
+ proto.addThreads(entry.getValue().toProto(props.mergeMode));
+ classSourceVisitor.visit(entry.getValue());
+ }
+
+ if (classSourceVisitor.hasMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getMapping());
}
return proto.build();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index 2ef06d3..73f7bd7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -24,6 +24,7 @@ package me.lucko.spark.common.sampler.node;
import me.lucko.spark.common.sampler.async.AsyncStackTraceElement;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -56,6 +57,10 @@ public abstract class AbstractNode {
return this.totalTime.longValue() / 1000d;
}
+ public Collection<StackTraceNode> getChildren() {
+ return this.children.values();
+ }
+
/**
* Merge {@code other} into {@code this}.
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index 4179464..efc7f81 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -51,6 +51,10 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
return this.description.methodName;
}
+ public String getMethodDescription() {
+ return this.description.methodDescription;
+ }
+
public int getLineNumber() {
return this.description.lineNumber;
}