aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common/command
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2018-11-04 01:05:41 +0000
committerLuck <git@lucko.me>2018-11-04 01:05:41 +0000
commit320d6a28b60873c8e8163b27ed1389978aed4ee6 (patch)
treeba130d567e58883458411d115a6eac1b8688220a /spark-common/src/main/java/me/lucko/spark/common/command
parent9e4c0edc47707fbcad34305b3cd723b08f1ab4d6 (diff)
downloadspark-320d6a28b60873c8e8163b27ed1389978aed4ee6.tar.gz
spark-320d6a28b60873c8e8163b27ed1389978aed4ee6.tar.bz2
spark-320d6a28b60873c8e8163b27ed1389978aed4ee6.zip
some misc refactoring
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/command')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java8
2 files changed, 10 insertions, 6 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
index 8752443..318ce25 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
@@ -23,13 +23,15 @@ package me.lucko.spark.common.command.modules;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
-import me.lucko.spark.common.http.Bytebin;
import me.lucko.spark.memory.HeapDump;
+import okhttp3.MediaType;
+
import java.io.IOException;
import java.util.function.Consumer;
public class HeapModule<S> implements CommandModule<S> {
+ private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
@@ -50,9 +52,9 @@ public class HeapModule<S> implements CommandModule<S> {
byte[] output = heapDump.formCompressedDataPayload();
try {
- String pasteId = Bytebin.postCompressedContent(output);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
platform.sendPrefixedMessage("&bHeap dump output:");
- platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
e.printStackTrace();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 693ffd9..5fd8b5b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -25,13 +25,14 @@ import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.common.http.Bytebin;
import me.lucko.spark.sampler.Sampler;
import me.lucko.spark.sampler.SamplerBuilder;
import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.ThreadGrouper;
import me.lucko.spark.sampler.TickCounter;
+import okhttp3.MediaType;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -42,6 +43,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
public class SamplerModule<S> implements CommandModule<S> {
+ private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
/** Guards {@link #activeSampler} */
private final Object[] activeSamplerMutex = new Object[0];
@@ -236,9 +238,9 @@ public class SamplerModule<S> implements CommandModule<S> {
platform.runAsync(() -> {
byte[] output = sampler.formCompressedDataPayload();
try {
- String pasteId = Bytebin.postCompressedContent(output);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
platform.sendPrefixedMessage("&bSampling results:");
- platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
e.printStackTrace();