aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common/command
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2021-07-18 21:31:32 +0100
committerLuck <git@lucko.me>2021-07-18 21:31:32 +0100
commit8204f693071ed7411300231cba8225b7846cc500 (patch)
treea6d6c4959b7280a3def22a43dbf7e2a3c6046bd9 /spark-common/src/main/java/me/lucko/spark/common/command
parentc31c7033318509e53c790f6cd6618cb94dca3af7 (diff)
downloadspark-8204f693071ed7411300231cba8225b7846cc500.tar.gz
spark-8204f693071ed7411300231cba8225b7846cc500.tar.bz2
spark-8204f693071ed7411300231cba8225b7846cc500.zip
Add flag to save profile/heapsummary to files instead of uploading to bytebin
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/command')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java77
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java75
2 files changed, 103 insertions, 49 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index b0a3c31..8a7d781 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -31,6 +31,7 @@ import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.heapdump.HeapDump;
import me.lucko.spark.common.heapdump.HeapDumpSummary;
import me.lucko.spark.common.util.FormatUtil;
+import me.lucko.spark.proto.SparkProtos;
import net.kyori.adventure.text.event.ClickEvent;
@@ -45,8 +46,6 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
import java.util.Iterator;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
@@ -67,9 +66,9 @@ public class HeapAnalysisModule implements CommandModule {
public void registerCommands(Consumer<Command> consumer) {
consumer.accept(Command.builder()
.aliases("heapsummary")
- .argumentUsage("run-gc-before", null)
+ .argumentUsage("save-to-file", null)
.executor(HeapAnalysisModule::heapSummary)
- .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before"))
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--save-to-file", "--run-gc-before"))
.build()
);
@@ -99,35 +98,57 @@ public class HeapAnalysisModule implements CommandModule {
return;
}
- byte[] output = heapDump.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), sender);
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key();
- String url = SparkPlatform.VIEWER_URL + key;
-
- resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD));
- resp.broadcast(text()
- .content(url)
- .color(GRAY)
- .clickEvent(ClickEvent.openUrl(url))
- .build()
- );
-
- platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
- } catch (IOException e) {
- resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED));
- e.printStackTrace();
+ SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender);
+
+ boolean saveToFile = false;
+ if (arguments.boolFlag("save-to-file")) {
+ saveToFile = true;
+ } else {
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key();
+ String url = SparkPlatform.VIEWER_URL + key;
+
+ resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD));
+ resp.broadcast(text()
+ .content(url)
+ .color(GRAY)
+ .clickEvent(ClickEvent.openUrl(url))
+ .build()
+ );
+
+ platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED));
+ e.printStackTrace();
+ saveToFile = true;
+ }
+ }
+
+ if (saveToFile) {
+ Path file = platform.resolveSaveFile("heapsummary", "sparkheap");
+ try {
+ Files.write(file, output.toByteArray());
+
+ resp.broadcastPrefixed(text()
+ .content("Heap dump summary written to: ")
+ .color(GOLD)
+ .append(text(file.toString(), GRAY))
+ .build()
+ );
+ resp.broadcastPrefixed(text("You can read the heap dump summary file using the viewer web-app - " + SparkPlatform.VIEWER_URL, GRAY));
+
+ platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump summary", file.toString()));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED));
+ e.printStackTrace();
+ }
}
+
}
private static void heapDump(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- Path pluginFolder = platform.getPlugin().getPluginDirectory();
- try {
- Files.createDirectories(pluginFolder);
- } catch (IOException e) {
- // ignore
- }
+ Path file = platform.resolveSaveFile("heap", HeapDump.isOpenJ9() ? "phd" : "hprof");
- Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
boolean liveOnly = !arguments.boolFlag("include-non-live");
if (arguments.boolFlag("run-gc-before")) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index ebf6372..b20d22f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -40,12 +40,15 @@ import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.MethodDisambiguator;
+import me.lucko.spark.proto.SparkProtos;
import net.kyori.adventure.text.event.ClickEvent;
import okhttp3.MediaType;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -93,6 +96,7 @@ public class SamplerModule implements CommandModule {
.argumentUsage("force-java-sampler", null)
.argumentUsage("stop --comment", "comment")
.argumentUsage("stop --order-by-time", null)
+ .argumentUsage("stop --save-to-file", null)
.executor(this::profiler)
.tabCompleter((platform, sender, arguments) -> {
if (arguments.contains("--info") || arguments.contains("--cancel")) {
@@ -100,7 +104,7 @@ public class SamplerModule implements CommandModule {
}
if (arguments.contains("--stop") || arguments.contains("--upload")) {
- return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment");
+ return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment", "--save-to-file");
}
List<String> opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel",
@@ -250,9 +254,10 @@ public class SamplerModule implements CommandModule {
String comment = Iterables.getFirst(arguments.stringFlag("comment"), null);
MethodDisambiguator methodDisambiguator = new MethodDisambiguator();
MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator);
+ boolean saveToFile = arguments.boolFlag("save-to-file");
future.thenAcceptAsync(s -> {
resp.broadcastPrefixed(text("The active profiler has completed! Uploading results..."));
- handleUpload(platform, resp, s, threadOrder, comment, mergeMode);
+ handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile);
});
}
}
@@ -293,29 +298,57 @@ public class SamplerModule implements CommandModule {
String comment = Iterables.getFirst(arguments.stringFlag("comment"), null);
MethodDisambiguator methodDisambiguator = new MethodDisambiguator();
MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator);
- handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode);
+ boolean saveToFile = arguments.boolFlag("save-to-file");
+ handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile);
this.activeSampler = null;
}
}
- private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) {
- byte[] output = sampler.formCompressedDataPayload(new Sampler.ExportProps(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.getClassSourceLookup()));
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
- String url = SparkPlatform.VIEWER_URL + key;
-
- resp.broadcastPrefixed(text("Profiler results:", GOLD));
- resp.broadcast(text()
- .content(url)
- .color(GRAY)
- .clickEvent(ClickEvent.openUrl(url))
- .build()
- );
-
- platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
- } catch (IOException e) {
- resp.broadcastPrefixed(text("An error occurred whilst uploading the results.", RED));
- e.printStackTrace();
+ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
+ SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.getClassSourceLookup());
+
+ boolean saveToFile = false;
+ if (saveToFileFlag) {
+ saveToFile = true;
+ } else {
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
+ String url = SparkPlatform.VIEWER_URL + key;
+
+ resp.broadcastPrefixed(text("Profiler results:", GOLD));
+ resp.broadcast(text()
+ .content(url)
+ .color(GRAY)
+ .clickEvent(ClickEvent.openUrl(url))
+ .build()
+ );
+
+ platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED));
+ e.printStackTrace();
+ saveToFile = true;
+ }
+ }
+
+ if (saveToFile) {
+ Path file = platform.resolveSaveFile("profile", "sparkprofile");
+ try {
+ Files.write(file, output.toByteArray());
+
+ resp.broadcastPrefixed(text()
+ .content("Profile written to: ")
+ .color(GOLD)
+ .append(text(file.toString(), GRAY))
+ .build()
+ );
+ resp.broadcastPrefixed(text("You can read the profile file using the viewer web-app - " + SparkPlatform.VIEWER_URL, GRAY));
+
+ platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString()));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED));
+ e.printStackTrace();
+ }
}
}
}