aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common/sampler
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2021-07-18 21:31:32 +0100
committerLuck <git@lucko.me>2021-07-18 21:31:32 +0100
commit8204f693071ed7411300231cba8225b7846cc500 (patch)
treea6d6c4959b7280a3def22a43dbf7e2a3c6046bd9 /spark-common/src/main/java/me/lucko/spark/common/sampler
parentc31c7033318509e53c790f6cd6618cb94dca3af7 (diff)
downloadspark-8204f693071ed7411300231cba8225b7846cc500.tar.gz
spark-8204f693071ed7411300231cba8225b7846cc500.tar.bz2
spark-8204f693071ed7411300231cba8225b7846cc500.zip
Add flag to save profile/heapsummary to files instead of uploading to bytebin
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/sampler')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java20
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java20
3 files changed, 25 insertions, 51 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index bc08dfd..b512bc1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -27,13 +27,9 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos.SamplerData;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
import java.util.Comparator;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
-import java.util.zip.GZIPOutputStream;
/**
* Abstract superinterface for all sampler implementations.
@@ -72,36 +68,6 @@ public interface Sampler {
CompletableFuture<? extends Sampler> getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(ExportProps props);
-
- default byte[] formCompressedDataPayload(ExportProps props) {
- SamplerData proto = toProto(props);
-
- ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
- try (OutputStream out = new GZIPOutputStream(byteOut)) {
- proto.writeTo(out);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- return byteOut.toByteArray();
- }
-
- class ExportProps {
- public final PlatformInfo platformInfo;
- public final CommandSender creator;
- public final Comparator<? super Map.Entry<String, ThreadNode>> outputOrder;
- public final String comment;
- public final MergeMode mergeMode;
- public final ClassSourceLookup classSourceLookup;
-
- public ExportProps(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- this.platformInfo = platformInfo;
- this.creator = creator;
- this.outputOrder = outputOrder;
- this.comment = comment;
- this.mergeMode = mergeMode;
- this.classSourceLookup = classSourceLookup;
- }
- }
+ SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 8d57a6d..ca30df0 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -20,10 +20,13 @@
package me.lucko.spark.common.sampler.async;
+import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos;
@@ -35,6 +38,7 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
+import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -141,17 +145,17 @@ public class AsyncSampler implements Sampler {
}
@Override
- public SparkProtos.SamplerData toProto(ExportProps props) {
+ public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder()
- .setPlatformMetadata(props.platformInfo.toData().toProto())
- .setCreator(props.creator.toData().toProto())
+ .setPlatformMetadata(platformInfo.toData().toProto())
+ .setCreator(creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
.setThreadDumper(this.threadDumper.getMetadata())
.setDataAggregator(this.dataAggregator.getMetadata());
- if (props.comment != null) {
- metadata.setComment(props.comment);
+ if (comment != null) {
+ metadata.setComment(comment);
}
SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder();
@@ -160,12 +164,12 @@ public class AsyncSampler implements Sampler {
aggregateOutput();
List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(props.outputOrder);
+ data.sort(outputOrder);
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup);
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(props.mergeMode));
+ proto.addThreads(entry.getValue().toProto(mergeMode));
classSourceVisitor.visit(entry.getValue());
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 23d38d8..a7204b3 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -23,9 +23,12 @@ package me.lucko.spark.common.sampler.java;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -36,6 +39,7 @@ import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
+import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -158,29 +162,29 @@ public class JavaSampler implements Sampler, Runnable {
}
@Override
- public SamplerData toProto(ExportProps props) {
+ public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(props.platformInfo.toData().toProto())
- .setCreator(props.creator.toData().toProto())
+ .setPlatformMetadata(platformInfo.toData().toProto())
+ .setCreator(creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
.setThreadDumper(this.threadDumper.getMetadata())
.setDataAggregator(this.dataAggregator.getMetadata());
- if (props.comment != null) {
- metadata.setComment(props.comment);
+ if (comment != null) {
+ metadata.setComment(comment);
}
SamplerData.Builder proto = SamplerData.newBuilder();
proto.setMetadata(metadata.build());
List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(props.outputOrder);
+ data.sort(outputOrder);
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup);
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(props.mergeMode));
+ proto.addThreads(entry.getValue().toProto(mergeMode));
classSourceVisitor.visit(entry.getValue());
}