aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2021-12-28 22:31:20 +0000
committerLuck <git@lucko.me>2021-12-28 22:31:20 +0000
commit7e96297d27f78b14354ee1391dc3808b240b19f7 (patch)
tree52fbd4b7a228101b31c565609f65d4f5d659f1c7 /spark-common/src/main/java
parent92669a73490e526459457f4dbd5fff116a628870 (diff)
downloadspark-7e96297d27f78b14354ee1391dc3808b240b19f7.tar.gz
spark-7e96297d27f78b14354ee1391dc3808b240b19f7.tar.bz2
spark-7e96297d27f78b14354ee1391dc3808b240b19f7.zip
Use try catch for system stat collection
Diffstat (limited to 'spark-common/src/main/java')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java23
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java35
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java35
4 files changed, 76 insertions, 74 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index 7bb411d..c0980e7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -126,14 +126,23 @@ public final class HeapDumpSummary {
}
public HeapData toProto(SparkPlatform platform, CommandSender creator) {
- HeapData.Builder proto = HeapData.newBuilder();
- proto.setMetadata(HeapMetadata.newBuilder()
+ HeapMetadata.Builder metadata = HeapMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
- .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null))
- .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
- .setCreator(creator.toData().toProto())
- .build()
- );
+ .setCreator(creator.toData().toProto());
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ HeapData.Builder proto = HeapData.newBuilder();
+ proto.setMetadata(metadata);
for (Entry entry : this.entries) {
proto.addEntries(entry.toProto());
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index 568b59d..34abdfa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -20,8 +20,19 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
-
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.node.MergeMode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -79,4 +90,48 @@ public abstract class AbstractSampler implements Sampler {
protected Map<String, GarbageCollectorStatistics> getInitialGcStats() {
return this.initialGcStats;
}
+
+ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
+ SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setCreator(creator.toData().toProto())
+ .setStartTime(this.startTime)
+ .setInterval(this.interval)
+ .setThreadDumper(this.threadDumper.getMetadata())
+ .setDataAggregator(dataAggregator.getMetadata());
+
+ if (comment != null) {
+ metadata.setComment(comment);
+ }
+
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ proto.setMetadata(metadata);
+ }
+
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(dataAggregator.getData().entrySet());
+ data.sort(outputOrder);
+
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
+
+ for (Map.Entry<String, ThreadNode> entry : data) {
+ proto.addThreads(entry.getValue().toProto(mergeMode));
+ classSourceVisitor.visit(entry.getValue());
+ }
+
+ if (classSourceVisitor.hasMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getMapping());
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index db8802c..62325ae 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -33,7 +33,6 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.TemporaryFiles;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import one.profiler.AsyncProfiler;
@@ -41,7 +40,6 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
@@ -157,39 +155,10 @@ public class AsyncSampler extends AbstractSampler {
@Override
public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
- .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()))
- .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
SamplerData.Builder proto = SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
aggregateOutput();
-
- List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index c873f9f..d2959bd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -33,14 +33,11 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
-import java.util.ArrayList;
import java.util.Comparator;
-import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -130,37 +127,9 @@ public class JavaSampler extends AbstractSampler implements Runnable {
@Override
public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
- .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()))
- .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
SamplerData.Builder proto = SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
- List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}