aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common/command
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2018-10-15 18:07:29 +0100
committerLuck <git@lucko.me>2018-10-15 18:07:29 +0100
commit91775dd2ecc3f3e70dd422f68cf6d06e74db5d49 (patch)
treea598f6860667cb25b6d5c69301730015d4797f4b /spark-common/src/main/java/me/lucko/spark/common/command
parent648167064ad2064fc5ab77fb57b347253ac9d468 (diff)
downloadspark-91775dd2ecc3f3e70dd422f68cf6d06e74db5d49.tar.gz
spark-91775dd2ecc3f3e70dd422f68cf6d06e74db5d49.tar.bz2
spark-91775dd2ecc3f3e70dd422f68cf6d06e74db5d49.zip
Start work on commands refactoring
Long term goals are: - tab completion - auto generate usage/info messages
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/command')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java100
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Command.java113
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java69
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java80
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java236
6 files changed, 627 insertions, 0 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
new file mode 100644
index 0000000..4189174
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
@@ -0,0 +1,100 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command;
+
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.SetMultimap;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class Arguments {
+ private static final Pattern FLAG_REGEX = Pattern.compile("^--(.+)$");
+
+ private final List<String> rawArgs;
+ private final SetMultimap<String, String> parsedArgs;
+
+ public Arguments(String[] args) {
+ this.rawArgs = new ArrayList<>(Arrays.asList(args));
+ this.parsedArgs = HashMultimap.create();
+
+ String flag = null;
+ List<String> value = null;
+
+ for (int i = 0; i < this.rawArgs.size(); i++) {
+ String arg = this.rawArgs.get(i);
+
+ Matcher matcher = FLAG_REGEX.matcher(arg);
+ boolean matches = matcher.matches();
+
+ if (flag == null || matches) {
+ if (!matches) {
+ throw new IllegalArgumentException("Expected flag at position " + i + " but got '" + arg + "' instead!");
+ }
+
+ // store existing value, if present
+ if (flag != null) {
+ this.parsedArgs.put(flag, String.join(" ", value));
+ }
+
+ flag = matcher.group(1).toLowerCase();
+ value = new ArrayList<>();
+ } else {
+ // part of a value
+ value.add(arg);
+ }
+ }
+
+ // store remaining value, if present
+ if (flag != null) {
+ this.parsedArgs.put(flag, String.join(" ", value));
+ }
+ }
+
+ public List<String> raw() {
+ return this.rawArgs;
+ }
+
+ public int intFlag(String key) {
+ Iterator<String> it = this.parsedArgs.get(key).iterator();
+ if (it.hasNext()) {
+ try {
+ return Math.abs(Integer.parseInt(it.next()));
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("Invalid input for '" + key + "' argument. Please specify a number!");
+ }
+ }
+ return -1; // undefined
+ }
+
+ public Set<String> stringFlag(String key) {
+ return this.parsedArgs.get(key);
+ }
+
+ public boolean boolFlag(String key) {
+ return this.parsedArgs.containsKey(key);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
new file mode 100644
index 0000000..70dc7e8
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
@@ -0,0 +1,113 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command;
+
+import com.google.common.collect.ImmutableSet;
+
+import me.lucko.spark.common.SparkPlatform;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+public class Command<S> {
+
+ public static <S> Builder<S> builder() {
+ return new Builder<>();
+ }
+
+ private final Set<String> aliases;
+ private final Executor<S> executor;
+ private final TabCompleter<S> tabCompleter;
+
+ private Command(Set<String> aliases, Executor<S> executor, TabCompleter<S> tabCompleter) {
+ this.aliases = aliases;
+ this.executor = executor;
+ this.tabCompleter = tabCompleter;
+ }
+
+ public Set<String> aliases() {
+ return this.aliases;
+ }
+
+ public Executor<S> executor() {
+ return this.executor;
+ }
+
+ public TabCompleter<S> tabCompleter() {
+ return this.tabCompleter;
+ }
+
+ public static final class Builder<S> {
+ private ImmutableSet.Builder<String> aliases = ImmutableSet.builder();
+ private Executor<S> executor = null;
+ private TabCompleter<S> tabCompleter = null;
+
+ Builder() {
+
+ }
+
+ public Builder<S> aliases(String... aliases) {
+ this.aliases.add(aliases);
+ return this;
+ }
+
+ public Builder<S> executor(Executor<S> executor) {
+ this.executor = Objects.requireNonNull(executor, "executor");
+ return this;
+ }
+
+ public Builder<S> tabCompleter(TabCompleter<S> tabCompleter) {
+ this.tabCompleter = Objects.requireNonNull(tabCompleter, "tabCompleter");
+ return this;
+ }
+
+ public Command<S> build() {
+ Set<String> aliases = this.aliases.build();
+ if (aliases.isEmpty()) {
+ throw new IllegalStateException("No aliases defined");
+ }
+ if (this.executor == null) {
+ throw new IllegalStateException("No defined executor");
+ }
+ if (this.tabCompleter == null) {
+ this.tabCompleter = TabCompleter.empty();
+ }
+ return new Command<>(aliases, this.executor, this.tabCompleter);
+ }
+ }
+
+ @FunctionalInterface
+ public interface Executor<S> {
+ void execute(SparkPlatform<S> platform, S sender, Arguments arguments);
+ }
+
+ @FunctionalInterface
+ public interface TabCompleter<S> {
+ static <S> TabCompleter<S> empty() {
+ return (platform, sender, arguments) -> Collections.emptyList();
+ }
+
+ List<String> completions(SparkPlatform<S> platform, S sender, List<String> arguments);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java
new file mode 100644
index 0000000..f195ef2
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java
@@ -0,0 +1,29 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command;
+
+import java.util.function.Consumer;
+
+public interface CommandModule<S> {
+
+ void registerCommands(Consumer<Command<S>> consumer);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
new file mode 100644
index 0000000..e586971
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
@@ -0,0 +1,69 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.http.Bytebin;
+import me.lucko.spark.memory.HeapDump;
+
+import java.io.IOException;
+import java.util.function.Consumer;
+
+public class HeapModule<S> implements CommandModule<S> {
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("heap", "memory")
+ .executor((platform, sender, arguments) -> {
+ platform.runAsync(() -> {
+ platform.sendPrefixedMessage("&7Creating a new heap dump, please wait...");
+
+ HeapDump heapDump;
+ try {
+ heapDump = HeapDump.createNew();
+ } catch (Exception e) {
+ platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap.");
+ e.printStackTrace();
+ return;
+ }
+
+ byte[] output = heapDump.formCompressedDataPayload();
+ try {
+ String pasteId = Bytebin.postCompressedContent(output);
+ platform.sendPrefixedMessage("&bHeap dump output:");
+ platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ } catch (IOException e) {
+ platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
+ e.printStackTrace();
+ }
+ });
+ })
+ .tabCompleter((platform, sender, arguments) -> {
+ return null;
+ })
+ .build()
+ );
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
new file mode 100644
index 0000000..eafc567
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
@@ -0,0 +1,80 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.monitor.TickMonitor;
+import me.lucko.spark.sampler.TickCounter;
+
+import java.util.function.Consumer;
+
+public class MonitoringModule<S> implements CommandModule<S> {
+
+ /** The tick monitor instance currently running, if any */
+ private ReportingTickMonitor activeTickMonitor = null;
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("monitoring")
+ .executor((platform, sender, arguments) -> {
+ if (this.activeTickMonitor == null) {
+
+ int threshold = arguments.intFlag("threshold");
+ if (threshold == -1) {
+ threshold = 100;
+ }
+
+ try {
+ TickCounter tickCounter = platform.newTickCounter();
+ this.activeTickMonitor = new ReportingTickMonitor(platform, tickCounter, threshold);
+ } catch (UnsupportedOperationException e) {
+ platform.sendPrefixedMessage(sender, "&cNot supported!");
+ }
+ } else {
+ this.activeTickMonitor.close();
+ this.activeTickMonitor = null;
+ platform.sendPrefixedMessage("&7Tick monitor disabled.");
+ }
+ })
+ .tabCompleter((platform, sender, arguments) -> {
+ return null;
+ })
+ .build()
+ );
+ }
+
+ private class ReportingTickMonitor extends TickMonitor {
+ private final SparkPlatform<S> platform;
+
+ ReportingTickMonitor(SparkPlatform<S> platform, TickCounter tickCounter, int percentageChangeThreshold) {
+ super(tickCounter, percentageChangeThreshold);
+ this.platform = platform;
+ }
+
+ @Override
+ protected void sendMessage(String message) {
+ platform.sendPrefixedMessage(message);
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
new file mode 100644
index 0000000..853aa5d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -0,0 +1,236 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.http.Bytebin;
+import me.lucko.spark.sampler.Sampler;
+import me.lucko.spark.sampler.SamplerBuilder;
+import me.lucko.spark.sampler.ThreadDumper;
+import me.lucko.spark.sampler.ThreadGrouper;
+import me.lucko.spark.sampler.TickCounter;
+
+import java.io.IOException;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+
+public class SamplerModule<S> implements CommandModule<S> {
+
+ /** Guards {@link #activeSampler} */
+ private final Object[] activeSamplerMutex = new Object[0];
+ /** The WarmRoast instance currently running, if any */
+ private Sampler activeSampler = null;
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("start")
+ .executor((platform, sender, arguments) -> {
+ int timeoutSeconds = arguments.intFlag("timeout");
+ if (timeoutSeconds != -1 && timeoutSeconds <= 10) {
+ platform.sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
+ return;
+ }
+
+ if (timeoutSeconds != -1 && timeoutSeconds < 30) {
+ platform.sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
+ }
+
+ int intervalMillis = arguments.intFlag("interval");
+ if (intervalMillis <= 0) {
+ intervalMillis = 4;
+ }
+
+ Set<String> threads = arguments.stringFlag("thread");
+ ThreadDumper threadDumper;
+ if (threads.isEmpty()) {
+ // use the server thread
+ threadDumper = platform.getDefaultThreadDumper();
+ } else if (threads.contains("*")) {
+ threadDumper = ThreadDumper.ALL;
+ } else {
+ threadDumper = new ThreadDumper.Specific(threads);
+ }
+
+ ThreadGrouper threadGrouper;
+ if (arguments.boolFlag("not-combined")) {
+ threadGrouper = ThreadGrouper.BY_NAME;
+ } else {
+ threadGrouper = ThreadGrouper.BY_POOL;
+ }
+
+ int ticksOver = arguments.intFlag("only-ticks-over");
+ TickCounter tickCounter = null;
+ if (ticksOver != -1) {
+ try {
+ tickCounter = platform.newTickCounter();
+ } catch (UnsupportedOperationException e) {
+ platform.sendPrefixedMessage(sender, "&cTick counting is not supported!");
+ return;
+ }
+ }
+
+ Sampler sampler;
+ synchronized (this.activeSamplerMutex) {
+ if (this.activeSampler != null) {
+ platform.sendPrefixedMessage(sender, "&7An active sampler is already running.");
+ return;
+ }
+
+ platform.sendPrefixedMessage("&7Initializing a new profiler, please wait...");
+
+ SamplerBuilder builder = new SamplerBuilder();
+ builder.threadDumper(threadDumper);
+ builder.threadGrouper(threadGrouper);
+ if (timeoutSeconds != -1) {
+ builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS);
+ }
+ builder.samplingInterval(intervalMillis);
+ if (ticksOver != -1) {
+ builder.ticksOver(ticksOver, tickCounter);
+ }
+ sampler = this.activeSampler = builder.start();
+
+ platform.sendPrefixedMessage("&bProfiler now active!");
+ if (timeoutSeconds == -1) {
+ platform.sendPrefixedMessage("&7Use '/" + platform.getLabel() + " stop' to stop profiling and upload the results.");
+ } else {
+ platform.sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
+ }
+ }
+
+ CompletableFuture<Sampler> future = sampler.getFuture();
+
+ // send message if profiling fails
+ future.whenCompleteAsync((s, throwable) -> {
+ if (throwable != null) {
+ platform.sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
+ throwable.printStackTrace();
+ }
+ });
+
+ // set activeSampler to null when complete.
+ future.whenCompleteAsync((s, throwable) -> {
+ synchronized (this.activeSamplerMutex) {
+ if (sampler == this.activeSampler) {
+ this.activeSampler = null;
+ }
+ }
+ });
+
+ // await the result
+ if (timeoutSeconds != -1) {
+ future.thenAcceptAsync(s -> {
+ platform.sendPrefixedMessage("&7The active sampling operation has completed! Uploading results...");
+ handleUpload(platform, s);
+ });
+ }
+ })
+ .tabCompleter((platform, sender, arguments) -> {
+ return null;
+ })
+ .build()
+ );
+
+ consumer.accept(Command.<S>builder()
+ .aliases("info")
+ .executor((platform, sender, arguments) -> {
+ synchronized (this.activeSamplerMutex) {
+ if (this.activeSampler == null) {
+ platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ } else {
+ long timeout = this.activeSampler.getEndTime();
+ if (timeout == -1) {
+ platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout.");
+ } else {
+ long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L;
+ platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
+ }
+
+ long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L;
+ platform.sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far.");
+ }
+ }
+ })
+ .tabCompleter((platform, sender, arguments) -> {
+ return null;
+ })
+ .build()
+ );
+
+ consumer.accept(Command.<S>builder()
+ .aliases("stop", "upload", "paste")
+ .executor((platform, sender, arguments) -> {
+ synchronized (this.activeSamplerMutex) {
+ if (this.activeSampler == null) {
+ platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ } else {
+ this.activeSampler.cancel();
+ platform.sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results...");
+ handleUpload(platform, this.activeSampler);
+ this.activeSampler = null;
+ }
+ }
+ })
+ .tabCompleter((platform, sender, arguments) -> {
+ return null;
+ })
+ .build()
+ );
+
+ consumer.accept(Command.<S>builder()
+ .aliases("cancel")
+ .executor((platform, sender, arguments) -> {
+ synchronized (this.activeSamplerMutex) {
+ if (this.activeSampler == null) {
+ platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ } else {
+ this.activeSampler.cancel();
+ this.activeSampler = null;
+ platform.sendPrefixedMessage("&bThe active sampling task has been cancelled.");
+ }
+ }
+ })
+ .tabCompleter((platform, sender, arguments) -> {
+ return null;
+ })
+ .build()
+ );
+ }
+
+ private void handleUpload(SparkPlatform<S> platform, Sampler sampler) {
+ platform.runAsync(() -> {
+ byte[] output = sampler.formCompressedDataPayload();
+ try {
+ String pasteId = Bytebin.postCompressedContent(output);
+ platform.sendPrefixedMessage("&bSampling results:");
+ platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ } catch (IOException e) {
+ platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
+ e.printStackTrace();
+ }
+ });
+ }
+}