From 8204f693071ed7411300231cba8225b7846cc500 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 18 Jul 2021 21:31:32 +0100 Subject: Add flag to save profile/heapsummary to files instead of uploading to bytebin --- .../java/me/lucko/spark/common/SparkPlatform.java | 18 +++++ .../common/command/modules/HeapAnalysisModule.java | 77 ++++++++++++++-------- .../common/command/modules/SamplerModule.java | 75 +++++++++++++++------ .../spark/common/heapdump/HeapDumpSummary.java | 16 +---- .../me/lucko/spark/common/sampler/Sampler.java | 36 +--------- .../spark/common/sampler/async/AsyncSampler.java | 20 +++--- .../spark/common/sampler/java/JavaSampler.java | 20 +++--- .../me/lucko/spark/common/util/BytebinClient.java | 15 +++++ 8 files changed, 162 insertions(+), 115 deletions(-) (limited to 'spark-common/src/main/java/me/lucko/spark') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 4dfbd20..7cb5d29 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -50,6 +50,11 @@ import net.kyori.adventure.text.event.ClickEvent; import okhttp3.OkHttpClient; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -80,6 +85,8 @@ public class SparkPlatform { private static final OkHttpClient OK_HTTP_CLIENT = new OkHttpClient(); /** The bytebin instance used by the platform */ public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient(OK_HTTP_CLIENT, "https://bytebin.lucko.me/", "spark-plugin"); + /** The date time formatter instance used by the platform */ + private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss"); private final SparkPlugin plugin; private final List commandModules; @@ -199,6 +206,17 @@ public class SparkPlatform { return this.serverNormalOperationStartTime; } + public Path resolveSaveFile(String prefix, String extension) { + Path pluginFolder = this.plugin.getPluginDirectory(); + try { + Files.createDirectories(pluginFolder); + } catch (IOException e) { + // ignore + } + + return pluginFolder.resolve(prefix + "-" + DATE_TIME_FORMATTER.format(LocalDateTime.now()) + "." + extension); + } + private List getAvailableCommands(CommandSender sender) { if (sender.hasPermission("spark")) { return this.commands; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index b0a3c31..8a7d781 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -31,6 +31,7 @@ import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.heapdump.HeapDump; import me.lucko.spark.common.heapdump.HeapDumpSummary; import me.lucko.spark.common.util.FormatUtil; +import me.lucko.spark.proto.SparkProtos; import net.kyori.adventure.text.event.ClickEvent; @@ -45,8 +46,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; import java.util.Iterator; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; @@ -67,9 +66,9 @@ public class HeapAnalysisModule implements CommandModule { public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() .aliases("heapsummary") - .argumentUsage("run-gc-before", null) + .argumentUsage("save-to-file", null) .executor(HeapAnalysisModule::heapSummary) - .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before")) + .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--save-to-file", "--run-gc-before")) .build() ); @@ -99,35 +98,57 @@ public class HeapAnalysisModule implements CommandModule { return; } - byte[] output = heapDump.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), sender); - try { - String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); - String url = SparkPlatform.VIEWER_URL + key; - - resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD)); - resp.broadcast(text() - .content(url) - .color(GRAY) - .clickEvent(ClickEvent.openUrl(url)) - .build() - ); - - platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url)); - } catch (IOException e) { - resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED)); - e.printStackTrace(); + SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender); + + boolean saveToFile = false; + if (arguments.boolFlag("save-to-file")) { + saveToFile = true; + } else { + try { + String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); + String url = SparkPlatform.VIEWER_URL + key; + + resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD)); + resp.broadcast(text() + .content(url) + .color(GRAY) + .clickEvent(ClickEvent.openUrl(url)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url)); + } catch (IOException e) { + resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED)); + e.printStackTrace(); + saveToFile = true; + } + } + + if (saveToFile) { + Path file = platform.resolveSaveFile("heapsummary", "sparkheap"); + try { + Files.write(file, output.toByteArray()); + + resp.broadcastPrefixed(text() + .content("Heap dump summary written to: ") + .color(GOLD) + .append(text(file.toString(), GRAY)) + .build() + ); + resp.broadcastPrefixed(text("You can read the heap dump summary file using the viewer web-app - " + SparkPlatform.VIEWER_URL, GRAY)); + + platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump summary", file.toString())); + } catch (IOException e) { + resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); + e.printStackTrace(); + } } + } private static void heapDump(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - Path pluginFolder = platform.getPlugin().getPluginDirectory(); - try { - Files.createDirectories(pluginFolder); - } catch (IOException e) { - // ignore - } + Path file = platform.resolveSaveFile("heap", HeapDump.isOpenJ9() ? "phd" : "hprof"); - Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof")); boolean liveOnly = !arguments.boolFlag("include-non-live"); if (arguments.boolFlag("run-gc-before")) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index ebf6372..b20d22f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -40,12 +40,15 @@ import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkProtos; import net.kyori.adventure.text.event.ClickEvent; import okhttp3.MediaType; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -93,6 +96,7 @@ public class SamplerModule implements CommandModule { .argumentUsage("force-java-sampler", null) .argumentUsage("stop --comment", "comment") .argumentUsage("stop --order-by-time", null) + .argumentUsage("stop --save-to-file", null) .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { if (arguments.contains("--info") || arguments.contains("--cancel")) { @@ -100,7 +104,7 @@ public class SamplerModule implements CommandModule { } if (arguments.contains("--stop") || arguments.contains("--upload")) { - return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment"); + return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment", "--save-to-file"); } List opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel", @@ -250,9 +254,10 @@ public class SamplerModule implements CommandModule { String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); + boolean saveToFile = arguments.boolFlag("save-to-file"); future.thenAcceptAsync(s -> { resp.broadcastPrefixed(text("The active profiler has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, comment, mergeMode); + handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile); }); } } @@ -293,29 +298,57 @@ public class SamplerModule implements CommandModule { String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); - handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode); + boolean saveToFile = arguments.boolFlag("save-to-file"); + handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile); this.activeSampler = null; } } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) { - byte[] output = sampler.formCompressedDataPayload(new Sampler.ExportProps(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.getClassSourceLookup())); - try { - String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); - String url = SparkPlatform.VIEWER_URL + key; - - resp.broadcastPrefixed(text("Profiler results:", GOLD)); - resp.broadcast(text() - .content(url) - .color(GRAY) - .clickEvent(ClickEvent.openUrl(url)) - .build() - ); - - platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url)); - } catch (IOException e) { - resp.broadcastPrefixed(text("An error occurred whilst uploading the results.", RED)); - e.printStackTrace(); + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { + SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.getClassSourceLookup()); + + boolean saveToFile = false; + if (saveToFileFlag) { + saveToFile = true; + } else { + try { + String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); + String url = SparkPlatform.VIEWER_URL + key; + + resp.broadcastPrefixed(text("Profiler results:", GOLD)); + resp.broadcast(text() + .content(url) + .color(GRAY) + .clickEvent(ClickEvent.openUrl(url)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url)); + } catch (IOException e) { + resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED)); + e.printStackTrace(); + saveToFile = true; + } + } + + if (saveToFile) { + Path file = platform.resolveSaveFile("profile", "sparkprofile"); + try { + Files.write(file, output.toByteArray()); + + resp.broadcastPrefixed(text() + .content("Profile written to: ") + .color(GOLD) + .append(text(file.toString(), GRAY)) + .build() + ); + resp.broadcastPrefixed(text("You can read the profile file using the viewer web-app - " + SparkPlatform.VIEWER_URL, GRAY)); + + platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString())); + } catch (IOException e) { + resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); + e.printStackTrace(); + } } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index de0c5df..34fd6c4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -28,9 +28,6 @@ import me.lucko.spark.proto.SparkProtos.HeapEntry; import org.objectweb.asm.Type; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; import java.lang.management.ManagementFactory; import java.util.Arrays; import java.util.List; @@ -38,7 +35,6 @@ import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.zip.GZIPOutputStream; import javax.management.JMX; import javax.management.MBeanServer; @@ -129,7 +125,7 @@ public final class HeapDumpSummary { this.entries = entries; } - private HeapData toProto(PlatformInfo platformInfo, CommandSender creator) { + public HeapData toProto(PlatformInfo platformInfo, CommandSender creator) { HeapData.Builder proto = HeapData.newBuilder(); proto.setMetadata(SparkProtos.HeapMetadata.newBuilder() .setPlatformMetadata(platformInfo.toData().toProto()) @@ -144,16 +140,6 @@ public final class HeapDumpSummary { return proto.build(); } - public byte[] formCompressedDataPayload(PlatformInfo platformInfo, CommandSender creator) { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (OutputStream out = new GZIPOutputStream(byteOut)) { - toProto(platformInfo, creator).writeTo(out); - } catch (IOException e) { - throw new RuntimeException(e); - } - return byteOut.toByteArray(); - } - public static final class Entry { private final int order; private final int instances; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index bc08dfd..b512bc1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -27,13 +27,9 @@ import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkProtos.SamplerData; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; import java.util.Comparator; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.zip.GZIPOutputStream; /** * Abstract superinterface for all sampler implementations. @@ -72,36 +68,6 @@ public interface Sampler { CompletableFuture getFuture(); // Methods used to export the sampler data to the web viewer. - SamplerData toProto(ExportProps props); - - default byte[] formCompressedDataPayload(ExportProps props) { - SamplerData proto = toProto(props); - - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (OutputStream out = new GZIPOutputStream(byteOut)) { - proto.writeTo(out); - } catch (IOException e) { - throw new RuntimeException(e); - } - return byteOut.toByteArray(); - } - - class ExportProps { - public final PlatformInfo platformInfo; - public final CommandSender creator; - public final Comparator> outputOrder; - public final String comment; - public final MergeMode mergeMode; - public final ClassSourceLookup classSourceLookup; - - public ExportProps(PlatformInfo platformInfo, CommandSender creator, Comparator> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { - this.platformInfo = platformInfo; - this.creator = creator; - this.outputOrder = outputOrder; - this.comment = comment; - this.mergeMode = mergeMode; - this.classSourceLookup = classSourceLookup; - } - } + SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 8d57a6d..ca30df0 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -20,10 +20,13 @@ package me.lucko.spark.common.sampler.async; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.jfr.JfrReader; +import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkProtos; @@ -35,6 +38,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -141,17 +145,17 @@ public class AsyncSampler implements Sampler { } @Override - public SparkProtos.SamplerData toProto(ExportProps props) { + public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder() - .setPlatformMetadata(props.platformInfo.toData().toProto()) - .setCreator(props.creator.toData().toProto()) + .setPlatformMetadata(platformInfo.toData().toProto()) + .setCreator(creator.toData().toProto()) .setStartTime(this.startTime) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(this.dataAggregator.getMetadata()); - if (props.comment != null) { - metadata.setComment(props.comment); + if (comment != null) { + metadata.setComment(comment); } SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder(); @@ -160,12 +164,12 @@ public class AsyncSampler implements Sampler { aggregateOutput(); List> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); - data.sort(props.outputOrder); + data.sort(outputOrder); - ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup); + ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); for (Map.Entry entry : data) { - proto.addThreads(entry.getValue().toProto(props.mergeMode)); + proto.addThreads(entry.getValue().toProto(mergeMode)); classSourceVisitor.visit(entry.getValue()); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 23d38d8..a7204b3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -23,9 +23,12 @@ package me.lucko.spark.common.sampler.java; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -36,6 +39,7 @@ import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -158,29 +162,29 @@ public class JavaSampler implements Sampler, Runnable { } @Override - public SamplerData toProto(ExportProps props) { + public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() - .setPlatformMetadata(props.platformInfo.toData().toProto()) - .setCreator(props.creator.toData().toProto()) + .setPlatformMetadata(platformInfo.toData().toProto()) + .setCreator(creator.toData().toProto()) .setStartTime(this.startTime) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(this.dataAggregator.getMetadata()); - if (props.comment != null) { - metadata.setComment(props.comment); + if (comment != null) { + metadata.setComment(comment); } SamplerData.Builder proto = SamplerData.newBuilder(); proto.setMetadata(metadata.build()); List> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); - data.sort(props.outputOrder); + data.sort(outputOrder); - ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup); + ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); for (Map.Entry entry : data) { - proto.addThreads(entry.getValue().toProto(props.mergeMode)); + proto.addThreads(entry.getValue().toProto(mergeMode)); classSourceVisitor.visit(entry.getValue()); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java index 9202303..29ee5bb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java @@ -25,13 +25,18 @@ package me.lucko.spark.common.util; +import com.google.protobuf.AbstractMessageLite; + import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.OutputStream; +import java.util.zip.GZIPOutputStream; /** * Utility for posting content to bytebin. @@ -81,6 +86,16 @@ public class BytebinClient extends AbstractHttpClient { } } + public Content postContent(AbstractMessageLite proto, MediaType contentType) throws IOException { + ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); + try (OutputStream out = new GZIPOutputStream(byteOut)) { + proto.writeTo(out); + } catch (IOException e) { + throw new RuntimeException(e); + } + return postContent(byteOut.toByteArray(), contentType); + } + public static final class Content { private final String key; -- cgit