aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2019-04-16 21:37:59 +0100
committerLuck <git@lucko.me>2019-04-16 21:37:59 +0100
commitecd4cec8545460a4fc4ca65b911c2503a00cd8e7 (patch)
tree62067383a1044abc3a09724e89c6e7c619e87ec0 /spark-common/src/main/java/me/lucko
parent8a61b404848ed8e3c27f06eb73239d37d4273240 (diff)
downloadspark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.tar.gz
spark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.tar.bz2
spark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.zip
Lots of refactoring, add tps command
Diffstat (limited to 'spark-common/src/main/java/me/lucko')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java105
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java49
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Command.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java120
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java79
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java (renamed from spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java)9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java (renamed from spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java)12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java (renamed from spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java)35
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java (renamed from spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java)26
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java170
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java)11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java)4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java)10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java)4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java)8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java)10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java45
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java149
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java (renamed from spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java (renamed from spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java93
29 files changed, 798 insertions, 320 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index ef21d1c..8eb4565 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -21,19 +21,20 @@
package me.lucko.spark.common;
import com.google.common.collect.ImmutableList;
-
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.modules.MemoryModule;
+import me.lucko.spark.common.command.modules.MonitoringModule;
import me.lucko.spark.common.command.modules.SamplerModule;
import me.lucko.spark.common.command.modules.TickMonitoringModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-import me.lucko.spark.util.BytebinClient;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+import me.lucko.spark.common.sampler.TickCounter;
+import me.lucko.spark.common.util.BytebinClient;
+import okhttp3.OkHttpClient;
-import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -41,52 +42,68 @@ import java.util.List;
import java.util.stream.Collectors;
/**
- * Abstract command handling class used by all platforms.
+ * Abstract spark implementation used by all platforms.
*
* @param <S> the sender (e.g. CommandSender) type used by the platform
*/
-public abstract class SparkPlatform<S> {
+public class SparkPlatform<S> {
/** The URL of the viewer frontend */
public static final String VIEWER_URL = "https://sparkprofiler.github.io/#";
+ /** The shared okhttp client */
+ private static final OkHttpClient OK_HTTP_CLIENT = new OkHttpClient();
/** The bytebin instance used by the platform */
- public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient("https://bytebin.lucko.me/", "spark-plugin");
-
- /** The prefix used in all messages */
- private static final String PREFIX = "&8[&fspark&8] &7";
-
- private static <T> List<Command<T>> prepareCommands() {
- ImmutableList.Builder<Command<T>> builder = ImmutableList.builder();
- new SamplerModule<T>().registerCommands(builder::add);
- new TickMonitoringModule<T>().registerCommands(builder::add);
- new MemoryModule<T>().registerCommands(builder::add);
- return builder.build();
+ public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient(OK_HTTP_CLIENT, "https://bytebin.lucko.me/", "spark-plugin");
+
+ private final List<Command<S>> commands;
+ private final SparkPlugin<S> plugin;
+
+ private final TickCounter tickCounter;
+ private final TpsCalculator tpsCalculator;
+
+ public SparkPlatform(SparkPlugin<S> plugin) {
+ this.plugin = plugin;
+
+ ImmutableList.Builder<Command<S>> commandsBuilder = ImmutableList.builder();
+ new SamplerModule<S>().registerCommands(commandsBuilder::add);
+ new MonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new TickMonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new MemoryModule<S>().registerCommands(commandsBuilder::add);
+ this.commands = commandsBuilder.build();
+
+ this.tickCounter = plugin.createTickCounter();
+ this.tpsCalculator = this.tickCounter != null ? new TpsCalculator() : null;
+ }
+
+ public void enable() {
+ if (this.tickCounter != null) {
+ this.tickCounter.addTickTask(this.tpsCalculator);
+ this.tickCounter.start();
+ }
+ }
+
+ public void disable() {
+ if (this.tickCounter != null) {
+ this.tickCounter.close();
+ }
+ }
+
+ public SparkPlugin<S> getPlugin() {
+ return this.plugin;
}
- private final List<Command<S>> commands = prepareCommands();
-
- // abstract methods implemented by each platform
- public abstract String getVersion();
- public abstract Path getPluginFolder();
- public abstract String getLabel();
- public abstract void sendMessage(S sender, String message);
- public abstract void sendMessage(String message);
- public abstract void sendLink(String url);
- public abstract void runAsync(Runnable r);
- public abstract ThreadDumper getDefaultThreadDumper();
- public abstract TickCounter newTickCounter();
-
- public void sendPrefixedMessage(S sender, String message) {
- sendMessage(sender, PREFIX + message);
+ public TickCounter getTickCounter() {
+ return this.tickCounter;
}
- public void sendPrefixedMessage(String message) {
- sendMessage(PREFIX + message);
+ public TpsCalculator getTpsCalculator() {
+ return this.tpsCalculator;
}
public void executeCommand(S sender, String[] args) {
+ CommandResponseHandler<S> resp = new CommandResponseHandler<>(this, sender);
if (args.length == 0) {
- sendUsage(sender);
+ sendUsage(resp);
return;
}
@@ -96,15 +113,15 @@ public abstract class SparkPlatform<S> {
for (Command<S> command : this.commands) {
if (command.aliases().contains(alias)) {
try {
- command.executor().execute(this, sender, new Arguments(rawArgs));
+ command.executor().execute(this, sender, resp, new Arguments(rawArgs));
} catch (IllegalArgumentException e) {
- sendMessage(sender, "&c" + e.getMessage());
+ resp.replyPrefixed("&c" + e.getMessage());
}
return;
}
}
- sendUsage(sender);
+ sendUsage(resp);
}
public List<String> tabCompleteCommand(S sender, String[] args) {
@@ -127,15 +144,15 @@ public abstract class SparkPlatform<S> {
return Collections.emptyList();
}
- private void sendUsage(S sender) {
- sendPrefixedMessage(sender, "&fspark &7v" + getVersion());
+ private void sendUsage(CommandResponseHandler<S> sender) {
+ sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion());
for (Command<S> command : this.commands) {
- sendMessage(sender, "&b&l> &7/" + getLabel() + " " + command.aliases().get(0));
+ sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
for (Command.ArgumentInfo arg : command.arguments()) {
if (arg.requiresParameter()) {
- sendMessage(sender, " &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
+ sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
} else {
- sendMessage(sender, " &8[&7--" + arg.argumentName() + "]");
+ sender.reply(" &8[&7--" + arg.argumentName() + "]");
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
new file mode 100644
index 0000000..7a3a353
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -0,0 +1,49 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common;
+
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.nio.file.Path;
+import java.util.Set;
+
+public interface SparkPlugin<S> {
+
+ String getVersion();
+
+ Path getPluginFolder();
+
+ String getLabel();
+
+ Set<S> getSenders();
+
+ void sendMessage(S sender, String message);
+
+ void sendLink(S sender, String url);
+
+ void runAsync(Runnable r);
+
+ ThreadDumper getDefaultThreadDumper();
+
+ TickCounter createTickCounter();
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
index fb440b1..c9f6551 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
@@ -21,7 +21,6 @@
package me.lucko.spark.common.command;
import com.google.common.collect.ImmutableList;
-
import me.lucko.spark.common.SparkPlatform;
import java.util.Collections;
@@ -109,7 +108,7 @@ public class Command<S> {
@FunctionalInterface
public interface Executor<S> {
- void execute(SparkPlatform<S> platform, S sender, Arguments arguments);
+ void execute(SparkPlatform<S> platform, S sender, CommandResponseHandler resp, Arguments arguments);
}
@FunctionalInterface
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
new file mode 100644
index 0000000..a5a7391
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -0,0 +1,75 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command;
+
+import me.lucko.spark.common.SparkPlatform;
+
+import java.util.Set;
+import java.util.function.Consumer;
+
+public class CommandResponseHandler<S> {
+
+ /** The prefix used in all messages */
+ private static final String PREFIX = "&8[&fspark&8] &7";
+
+ private final SparkPlatform<S> platform;
+ private final S sender;
+
+ public CommandResponseHandler(SparkPlatform<S> platform, S sender) {
+ this.platform = platform;
+ this.sender = sender;
+ }
+
+ public S sender() {
+ return this.sender;
+ }
+
+ public void allSenders(Consumer<? super S> action) {
+ Set<S> senders = this.platform.getPlugin().getSenders();
+ senders.add(this.sender);
+ senders.forEach(action);
+ }
+
+ public void reply(String message) {
+ this.platform.getPlugin().sendMessage(this.sender, message);
+ }
+
+ public void broadcast(String message) {
+ allSenders(sender -> this.platform.getPlugin().sendMessage(sender, message));
+ }
+
+ public void replyPrefixed(String message) {
+ this.platform.getPlugin().sendMessage(this.sender, PREFIX + message);
+ }
+
+ public void broadcastPrefixed(String message) {
+ allSenders(sender -> this.platform.getPlugin().sendMessage(sender, PREFIX + message));
+ }
+
+ public void replyLink(String link) {
+ this.platform.getPlugin().sendLink(this.sender, link);
+ }
+
+ public void broadcastLink(String link) {
+ allSenders(sender -> this.platform.getPlugin().sendLink(sender, link));
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
index 5f17d54..2cb2e07 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
@@ -24,9 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.memory.HeapDump;
-import me.lucko.spark.memory.HeapDumpSummary;
-
+import me.lucko.spark.common.memory.HeapDump;
+import me.lucko.spark.common.memory.HeapDumpSummary;
import okhttp3.MediaType;
import java.io.IOException;
@@ -44,34 +43,34 @@ public class MemoryModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("heapsummary")
.argumentUsage("run-gc-before", null)
- .executor((platform, sender, arguments) -> {
- platform.runAsync(() -> {
- if (arguments.boolFlag("run-gc-before")) {
- platform.sendPrefixedMessage("&7Running garbage collector...");
- System.gc();
- }
-
- platform.sendPrefixedMessage("&7Creating a new heap dump summary, please wait...");
-
- HeapDumpSummary heapDump;
- try {
- heapDump = HeapDumpSummary.createNew();
- } catch (Exception e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap.");
- e.printStackTrace();
- return;
- }
-
- byte[] output = heapDump.formCompressedDataPayload();
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bHeap dump summmary output:");
- platform.sendLink(SparkPlatform.VIEWER_URL + key);
- } catch (IOException e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
- e.printStackTrace();
- }
- });
+ .executor((platform, sender, resp, arguments) -> {
+ platform.getPlugin().runAsync(() -> {
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed("&7Running garbage collector...");
+ System.gc();
+ }
+
+ resp.broadcastPrefixed("&7Creating a new heap dump summary, please wait...");
+
+ HeapDumpSummary heapDump;
+ try {
+ heapDump = HeapDumpSummary.createNew();
+ } catch (Exception e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst inspecting the heap.");
+ e.printStackTrace();
+ return;
+ }
+
+ byte[] output = heapDump.formCompressedDataPayload();
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
+ resp.broadcastPrefixed("&bHeap dump summmary output:");
+ resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
+ } catch (IOException e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst uploading the data.");
+ e.printStackTrace();
+ }
+ });
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before"))
.build()
@@ -81,35 +80,36 @@ public class MemoryModule<S> implements CommandModule<S> {
.aliases("heapdump")
.argumentUsage("run-gc-before", null)
.argumentUsage("include-non-live", null)
- .executor((platform, sender, arguments) -> {
- platform.runAsync(() -> {
- Path pluginFolder = platform.getPluginFolder();
- try {
- Files.createDirectories(pluginFolder);
- } catch (IOException e) {
- // ignore
- }
-
- Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
- boolean liveOnly = !arguments.boolFlag("include-non-live");
-
- if (arguments.boolFlag("run-gc-before")) {
- platform.sendPrefixedMessage("&7Running garbage collector...");
- System.gc();
- }
-
- platform.sendPrefixedMessage("&7Creating a new heap dump, please wait...");
-
- try {
- HeapDump.dumpHeap(file, liveOnly);
- } catch (Exception e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst creating a heap dump.");
- e.printStackTrace();
- return;
- }
-
- platform.sendPrefixedMessage("&bHeap dump written to: " + file.toString());
- });
+ .executor((platform, sender, resp, arguments) -> {
+ // ignore
+ platform.getPlugin().runAsync(() -> {
+ Path pluginFolder = platform.getPlugin().getPluginFolder();
+ try {
+ Files.createDirectories(pluginFolder);
+ } catch (IOException e) {
+ // ignore
+ }
+
+ Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
+ boolean liveOnly = !arguments.boolFlag("include-non-live");
+
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed("&7Running garbage collector...");
+ System.gc();
+ }
+
+ resp.broadcastPrefixed("&7Creating a new heap dump, please wait...");
+
+ try {
+ HeapDump.dumpHeap(file, liveOnly);
+ } catch (Exception e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst creating a heap dump.");
+ e.printStackTrace();
+ return;
+ }
+
+ resp.broadcastPrefixed("&bHeap dump written to: " + file.toString());
+ });
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live"))
.build()
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
new file mode 100644
index 0000000..b543e1d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
@@ -0,0 +1,51 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+
+import java.util.function.Consumer;
+
+public class MonitoringModule<S> implements CommandModule<S> {
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("tps")
+ .executor((platform, sender, resp, arguments) -> {
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator == null) {
+ resp.replyPrefixed("TPS data is not available.");
+ return;
+ }
+
+ String formattedTpsString = tpsCalculator.toFormattedString();
+ resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m");
+ resp.replyPrefixed(formattedTpsString);
+ })
+ .tabCompleter(Command.TabCompleter.empty())
+ .build()
+ );
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 9d00a96..a0f171c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -23,14 +23,14 @@ package me.lucko.spark.common.command.modules;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.sampler.Sampler;
-import me.lucko.spark.sampler.SamplerBuilder;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.Sampler;
+import me.lucko.spark.common.sampler.SamplerBuilder;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.TickCounter;
import okhttp3.MediaType;
import java.io.IOException;
@@ -62,15 +62,15 @@ public class SamplerModule<S> implements CommandModule<S> {
.argumentUsage("interval", "interval millis")
.argumentUsage("only-ticks-over", "tick length millis")
.argumentUsage("include-line-numbers", null)
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
int timeoutSeconds = arguments.intFlag("timeout");
if (timeoutSeconds != -1 && timeoutSeconds <= 10) {
- platform.sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
+ resp.replyPrefixed("&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
return;
}
if (timeoutSeconds != -1 && timeoutSeconds < 30) {
- platform.sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
+ resp.replyPrefixed("&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
}
double intervalMillis = arguments.doubleFlag("interval");
@@ -84,7 +84,7 @@ public class SamplerModule<S> implements CommandModule<S> {
ThreadDumper threadDumper;
if (threads.isEmpty()) {
// use the server thread
- threadDumper = platform.getDefaultThreadDumper();
+ threadDumper = platform.getPlugin().getDefaultThreadDumper();
} else if (threads.contains("*")) {
threadDumper = ThreadDumper.ALL;
} else {
@@ -108,10 +108,9 @@ public class SamplerModule<S> implements CommandModule<S> {
int ticksOver = arguments.intFlag("only-ticks-over");
TickCounter tickCounter = null;
if (ticksOver != -1) {
- try {
- tickCounter = platform.newTickCounter();
- } catch (UnsupportedOperationException e) {
- platform.sendPrefixedMessage(sender, "&cTick counting is not supported!");
+ tickCounter = platform.getTickCounter();
+ if (tickCounter == null) {
+ resp.replyPrefixed("&cTick counting is not supported!");
return;
}
}
@@ -119,11 +118,11 @@ public class SamplerModule<S> implements CommandModule<S> {
Sampler sampler;
synchronized (this.activeSamplerMutex) {
if (this.activeSampler != null) {
- platform.sendPrefixedMessage(sender, "&7An active sampler is already running.");
+ resp.replyPrefixed("&7An active sampler is already running.");
return;
}
- platform.sendPrefixedMessage("&7Initializing a new profiler, please wait...");
+ resp.broadcastPrefixed("&7Initializing a new profiler, please wait...");
SamplerBuilder builder = new SamplerBuilder();
builder.threadDumper(threadDumper);
@@ -138,11 +137,11 @@ public class SamplerModule<S> implements CommandModule<S> {
}
sampler = this.activeSampler = builder.start();
- platform.sendPrefixedMessage("&bProfiler now active!");
+ resp.broadcastPrefixed("&bProfiler now active!");
if (timeoutSeconds == -1) {
- platform.sendPrefixedMessage("&7Use '/" + platform.getLabel() + " stop' to stop profiling and upload the results.");
+ resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results.");
} else {
- platform.sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
+ resp.broadcastPrefixed("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
}
}
@@ -151,7 +150,7 @@ public class SamplerModule<S> implements CommandModule<S> {
// send message if profiling fails
future.whenCompleteAsync((s, throwable) -> {
if (throwable != null) {
- platform.sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
+ resp.broadcastPrefixed("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
throwable.printStackTrace();
}
});
@@ -168,8 +167,8 @@ public class SamplerModule<S> implements CommandModule<S> {
// await the result
if (timeoutSeconds != -1) {
future.thenAcceptAsync(s -> {
- platform.sendPrefixedMessage("&7The active sampling operation has completed! Uploading results...");
- handleUpload(platform, s);
+ resp.broadcastPrefixed("&7The active sampling operation has completed! Uploading results...");
+ handleUpload(platform, resp, s);
});
}
})
@@ -188,21 +187,21 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("info")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
long timeout = this.activeSampler.getEndTime();
if (timeout == -1) {
- platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout.");
+ resp.replyPrefixed("&7There is an active sampler currently running, with no defined timeout.");
} else {
long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L;
- platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
+ resp.replyPrefixed("&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
}
long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L;
- platform.sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far.");
+ resp.replyPrefixed("&7It has been sampling for " + runningTime + " seconds so far.");
}
}
})
@@ -211,14 +210,14 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("stop", "upload", "paste")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
this.activeSampler.cancel();
- platform.sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results...");
- handleUpload(platform, this.activeSampler);
+ resp.broadcastPrefixed("&7The active sampling operation has been stopped! Uploading results...");
+ handleUpload(platform, resp, this.activeSampler);
this.activeSampler = null;
}
}
@@ -228,14 +227,14 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("cancel")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
this.activeSampler.cancel();
this.activeSampler = null;
- platform.sendPrefixedMessage("&bThe active sampling task has been cancelled.");
+ resp.broadcastPrefixed("&bThe active sampling task has been cancelled.");
}
}
})
@@ -243,15 +242,15 @@ public class SamplerModule<S> implements CommandModule<S> {
);
}
- private void handleUpload(SparkPlatform<S> platform, Sampler sampler) {
- platform.runAsync(() -> {
+ private void handleUpload(SparkPlatform<S> platform, CommandResponseHandler<S> resp, Sampler sampler) {
+ platform.getPlugin().runAsync(() -> {
byte[] output = sampler.formCompressedDataPayload();
try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bSampling results:");
- platform.sendLink(SparkPlatform.VIEWER_URL + key);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
+ resp.broadcastPrefixed("&bSampling results:");
+ resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
+ resp.broadcastPrefixed("&cAn error occurred whilst uploading the results.");
e.printStackTrace();
}
});
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
index 3ad8909..bea7a07 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.command.modules;
-import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.monitor.TickMonitor;
-import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.common.monitor.tick.TickMonitor;
+import me.lucko.spark.common.sampler.TickCounter;
import java.util.function.Consumer;
@@ -37,27 +37,29 @@ public class TickMonitoringModule<S> implements CommandModule<S> {
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
consumer.accept(Command.<S>builder()
- .aliases("monitoring")
+ .aliases("tickmonitoring")
.argumentUsage("threshold", "percentage increase")
.argumentUsage("without-gc", null)
- .executor((platform, sender, arguments) -> {
- if (this.activeTickMonitor == null) {
+ .executor((platform, sender, resp, arguments) -> {
+ TickCounter tickCounter = platform.getTickCounter();
+ if (tickCounter == null) {
+ resp.replyPrefixed("&cNot supported!");
+ return;
+ }
+ if (this.activeTickMonitor == null) {
int threshold = arguments.intFlag("threshold");
if (threshold == -1) {
threshold = 100;
}
- try {
- TickCounter tickCounter = platform.newTickCounter();
- this.activeTickMonitor = new ReportingTickMonitor(platform, tickCounter, threshold, !arguments.boolFlag("without-gc"));
- } catch (UnsupportedOperationException e) {
- platform.sendPrefixedMessage(sender, "&cNot supported!");
- }
+ this.activeTickMonitor = new ReportingTickMonitor(resp, tickCounter, threshold, !arguments.boolFlag("without-gc"));
+ tickCounter.addTickTask(this.activeTickMonitor);
} else {
+ tickCounter.removeTickTask(this.activeTickMonitor);
this.activeTickMonitor.close();
this.activeTickMonitor = null;
- platform.sendPrefixedMessage("&7Tick monitor disabled.");
+ resp.broadcastPrefixed("&7Tick monitor disabled.");
}
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--threshold", "--without-gc"))
@@ -66,16 +68,16 @@ public class TickMonitoringModule<S> implements CommandModule<S> {
}
private class ReportingTickMonitor extends TickMonitor {
- private final SparkPlatform<S> platform;
+ private final CommandResponseHandler<S> resp;
- ReportingTickMonitor(SparkPlatform<S> platform, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
+ ReportingTickMonitor(CommandResponseHandler<S> resp, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
super(tickCounter, percentageChangeThreshold, monitorGc);
- this.platform = platform;
+ this.resp = resp;
}
@Override
protected void sendMessage(String message) {
- this.platform.sendPrefixedMessage(message);
+ this.resp.broadcastPrefixed(message);
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
index 4017d64..6200c17 100644
--- a/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
@@ -18,17 +18,16 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.memory;
+package me.lucko.spark.common.memory;
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Method;
import java.nio.file.Path;
-import javax.management.JMX;
-import javax.management.MBeanServer;
-import javax.management.ObjectName;
-
/**
* Utility for creating .hprof memory heap snapshots.
*/
diff --git a/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
index 402b89e..f3f62db 100644
--- a/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
@@ -18,12 +18,14 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.memory;
+package me.lucko.spark.common.memory;
import com.google.gson.stream.JsonWriter;
+import me.lucko.spark.common.util.TypeDescriptors;
-import me.lucko.spark.util.TypeDescriptors;
-
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
@@ -38,10 +40,6 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.zip.GZIPOutputStream;
-import javax.management.JMX;
-import javax.management.MBeanServer;
-import javax.management.ObjectName;
-
/**
* Represents a "heap dump summary" from the VM.
*
diff --git a/spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
index 906ca07..93a5fd8 100644
--- a/spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
@@ -18,29 +18,26 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.monitor;
+package me.lucko.spark.common.monitor.gc;
import com.sun.management.GarbageCollectionNotificationInfo;
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.util.ArrayList;
-import java.util.List;
-
import javax.management.ListenerNotFoundException;
import javax.management.Notification;
import javax.management.NotificationEmitter;
import javax.management.NotificationListener;
import javax.management.openmbean.CompositeData;
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.util.ArrayList;
+import java.util.List;
public class GarbageCollectionMonitor implements NotificationListener, AutoCloseable {
- private final TickMonitor tickMonitor;
+ private final List<Listener> listeners = new ArrayList<>();
private final List<NotificationEmitter> emitters = new ArrayList<>();
- public GarbageCollectionMonitor(TickMonitor tickMonitor) {
- this.tickMonitor = tickMonitor;
-
+ public GarbageCollectionMonitor() {
List<GarbageCollectorMXBean> beans = ManagementFactory.getGarbageCollectorMXBeans();
for (GarbageCollectorMXBean bean : beans) {
if (!(bean instanceof NotificationEmitter)) {
@@ -53,6 +50,14 @@ public class GarbageCollectionMonitor implements NotificationListener, AutoClose
}
}
+ public void addListener(Listener listener) {
+ this.listeners.add(listener);
+ }
+
+ public void removeListener(Listener listener) {
+ this.listeners.remove(listener);
+ }
+
@Override
public void handleNotification(Notification notification, Object handback) {
if (!notification.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) {
@@ -60,7 +65,9 @@ public class GarbageCollectionMonitor implements NotificationListener, AutoClose
}
GarbageCollectionNotificationInfo data = GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData());
- this.tickMonitor.onGc(data);
+ for (Listener listener : this.listeners) {
+ listener.onGc(data);
+ }
}
@Override
@@ -73,5 +80,11 @@ public class GarbageCollectionMonitor implements NotificationListener, AutoClose
}
}
this.emitters.clear();
+ this.listeners.clear();
+ }
+
+ public interface Listener {
+ void onGc(GarbageCollectionNotificationInfo data);
}
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
index abb6148..034e876 100644
--- a/spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
@@ -18,16 +18,16 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.monitor;
+package me.lucko.spark.common.monitor.tick;
import com.sun.management.GarbageCollectionNotificationInfo;
-
-import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.common.monitor.gc.GarbageCollectionMonitor;
+import me.lucko.spark.common.sampler.TickCounter;
import java.text.DecimalFormat;
import java.util.DoubleSummaryStatistics;
-public abstract class TickMonitor implements Runnable, AutoCloseable {
+public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollectionMonitor.Listener, AutoCloseable {
private static final DecimalFormat df = new DecimalFormat("#.##");
private final TickCounter tickCounter;
@@ -44,24 +44,25 @@ public abstract class TickMonitor implements Runnable, AutoCloseable {
this.tickCounter = tickCounter;
this.percentageChangeThreshold = percentageChangeThreshold;
- this.tickCounter.start();
- this.tickCounter.addTickTask(this);
-
- this.garbageCollectionMonitor = monitorGc ? new GarbageCollectionMonitor(this) : null;
+ if (monitorGc) {
+ this.garbageCollectionMonitor = new GarbageCollectionMonitor();
+ this.garbageCollectionMonitor.addListener(this);
+ } else {
+ this.garbageCollectionMonitor = null;
+ }
}
protected abstract void sendMessage(String message);
@Override
public void close() {
- this.tickCounter.close();
if (this.garbageCollectionMonitor != null) {
this.garbageCollectionMonitor.close();
}
}
@Override
- public void run() {
+ public void onTick(TickCounter counter) {
double now = ((double) System.nanoTime()) / 1000000d;
// init
@@ -110,13 +111,14 @@ public abstract class TickMonitor implements Runnable, AutoCloseable {
double percentageChange = (increase * 100d) / this.avg;
if (percentageChange > this.percentageChangeThreshold) {
- sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
+ sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
"&7 ms. (&b" + df.format(percentageChange) + "% &7increase from average)");
}
}
}
- void onGc(GarbageCollectionNotificationInfo data) {
+ @Override
+ public void onGc(GarbageCollectionNotificationInfo data) {
if (this.state == State.SETUP) {
// set lastTickTime to zero so this tick won't be counted in the average
this.lastTickTime = 0;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java
new file mode 100644
index 0000000..2f3af3e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java
@@ -0,0 +1,170 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.tick;
+
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Calculates the servers TPS (ticks per second) rate.
+ *
+ * <p>The code use to calculate the TPS is the same as the code used by the Minecraft server itself.
+ * This means that this class will output values the same as the /tps command.</p>
+ *
+ * <p>We calculate our own values instead of pulling them from the server for two reasons. Firstly,
+ * it's easier - pulling from the server requires reflection code on each of the platforms, we'd
+ * rather avoid that. Secondly, it allows us to generate rolling averages over a shorter period of
+ * time.</p>
+ */
+public class TpsCalculator implements TickCounter.TickTask {
+
+ private static final long SEC_IN_NANO = TimeUnit.SECONDS.toNanos(1);
+ private static final int TPS = 20;
+ private static final int SAMPLE_INTERVAL = 20;
+ private static final BigDecimal TPS_BASE = new BigDecimal(SEC_IN_NANO).multiply(new BigDecimal((long) SAMPLE_INTERVAL));
+
+ private final RollingAverage tps5S = new RollingAverage(5);
+ private final RollingAverage tps10S = new RollingAverage(10);
+ private final RollingAverage tps1M = new RollingAverage(60);
+ private final RollingAverage tps5M = new RollingAverage(60 * 5);
+ private final RollingAverage tps15M = new RollingAverage(60 * 15);
+
+ private final RollingAverage[] averages = new RollingAverage[]{
+ this.tps5S, this.tps10S, this.tps1M, this.tps5M, this.tps15M
+ };
+
+ private long last = 0;
+
+ // called every tick
+ @Override
+ public void onTick(TickCounter counter) {
+ if (counter.getCurrentTick() % SAMPLE_INTERVAL != 0) {
+ return;
+ }
+
+ long now = System.nanoTime();
+
+ if (this.last == 0) {
+ this.last = now;
+ return;
+ }
+
+ long diff = now - this.last;
+ BigDecimal currentTps = TPS_BASE.divide(new BigDecimal(diff), 30, RoundingMode.HALF_UP);
+
+ for (RollingAverage rollingAverage : this.averages) {
+ rollingAverage.add(currentTps, diff);
+ }
+
+ this.last = now;
+ }
+
+ public RollingAverage avg5Sec() {
+ return this.tps5S;
+ }
+
+ public RollingAverage avg10Sec() {
+ return this.tps10S;
+ }
+
+ public RollingAverage avg1Min() {
+ return this.tps1M;
+ }
+
+ public RollingAverage avg5Min() {
+ return this.tps5M;
+ }
+
+ public RollingAverage avg15Min() {
+ return this.tps15M;
+ }
+
+ public String toFormattedString() {
+ return formatTps(this.tps5S.getAverage()) + ", " +
+ formatTps(this.tps10S.getAverage()) + ", " +
+ formatTps(this.tps1M.getAverage()) + ", " +
+ formatTps(this.tps5M.getAverage()) + ", " +
+ formatTps(this.tps15M.getAverage());
+ }
+
+ public static String formatTps(double tps) {
+ StringBuilder sb = new StringBuilder();
+ if (tps > 18.0) {
+ sb.append("&a");
+ } else if (tps > 16.0) {
+ sb.append("&e");
+ } else {
+ sb.append("&c");
+ }
+ if (tps > 20.0) {
+ sb.append('*');
+ }
+ return sb.append(Math.min(Math.round(tps * 100.0) / 100.0, 20.0)).toString();
+ }
+
+ /**
+ * Rolling average calculator taken.
+ *
+ * <p>This code is taken from PaperMC/Paper, licensed under MIT.</p>
+ *
+ * @author aikar (PaperMC) https://github.com/PaperMC/Paper/blob/master/Spigot-Server-Patches/0021-Further-improve-server-tick-loop.patch
+ */
+ public static final class RollingAverage {
+ private final int size;
+ private long time;
+ private BigDecimal total;
+ private int index = 0;
+ private final BigDecimal[] samples;
+ private final long[] times;
+
+ RollingAverage(int size) {
+ this.size = size;
+ this.time = size * SEC_IN_NANO;
+ this.total = new BigDecimal((long) TPS).multiply(new BigDecimal(SEC_IN_NANO)).multiply(new BigDecimal((long) size));
+ this.samples = new BigDecimal[size];
+ this.times = new long[size];
+ for (int i = 0; i < size; i++) {
+ this.samples[i] = new BigDecimal((long) TPS);
+ this.times[i] = SEC_IN_NANO;
+ }
+ }
+
+ public void add(BigDecimal x, long t) {
+ this.time -= this.times[this.index];
+ this.total = this.total.subtract(this.samples[this.index].multiply(new BigDecimal(this.times[this.index])));
+ this.samples[this.index] = x;
+ this.times[this.index] = t;
+ this.time += t;
+ this.total = this.total.add(x.multiply(new BigDecimal(t)));
+ if (++this.index == this.size) {
+ this.index = 0;
+ }
+ }
+
+ public double getAverage() {
+ return this.total.divide(new BigDecimal(this.time), 30, RoundingMode.HALF_UP).doubleValue();
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 6777770..d504247 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -19,15 +19,14 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.gson.stream.JsonWriter;
-
-import me.lucko.spark.sampler.aggregator.DataAggregator;
-import me.lucko.spark.sampler.aggregator.SimpleDataAggregator;
-import me.lucko.spark.sampler.aggregator.TickedDataAggregator;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.aggregator.SimpleDataAggregator;
+import me.lucko.spark.common.sampler.aggregator.TickedDataAggregator;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index bf9dc04..4ce69df 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
import java.util.concurrent.TimeUnit;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 5b68eaf..14938ac 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -19,9 +19,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
-import me.lucko.spark.util.ThreadFinder;
+import me.lucko.spark.common.util.ThreadFinder;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index 3f1be33..f53800a 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java
index 059e420..aa839ba 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
/**
* A hook with the game's "tick loop".
@@ -48,13 +48,17 @@ public interface TickCounter extends AutoCloseable {
*
* @param runnable the task
*/
- void addTickTask(Runnable runnable);
+ void addTickTask(TickTask runnable);
/**
* Removes a tick task
*
* @param runnable the task
*/
- void removeTickTask(Runnable runnable);
+ void removeTickTask(TickTask runnable);
+
+ interface TickTask {
+ void onTick(TickCounter counter);
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 0e38eb4..8c65c2d 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -18,9 +18,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.aggregator;
+package me.lucko.spark.common.sampler.aggregator;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Map;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java
index a72b47f..8fbd03f 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java
@@ -18,11 +18,11 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.aggregator;
+package me.lucko.spark.common.sampler.aggregator;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.node.AbstractNode;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.node.AbstractNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java
index ef568c8..8f8124b 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java
@@ -18,12 +18,12 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.aggregator;
+package me.lucko.spark.common.sampler.aggregator;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.TickCounter;
-import me.lucko.spark.sampler.node.AbstractNode;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.TickCounter;
+import me.lucko.spark.common.sampler.node.AbstractNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.ArrayList;
import java.util.List;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index 859014f..5cfc0f2 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -19,7 +19,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.node;
+package me.lucko.spark.common.sampler.node;
import com.google.gson.stream.JsonWriter;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index 8cbcd0f..c4e7ac4 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -19,7 +19,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.node;
+package me.lucko.spark.common.sampler.node;
import com.google.gson.stream.JsonWriter;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index 2acce21..4e8714c 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.node;
+package me.lucko.spark.common.sampler.node;
import com.google.gson.stream.JsonWriter;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
new file mode 100644
index 0000000..1ff169d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
@@ -0,0 +1,45 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.Response;
+
+import java.io.IOException;
+
+public class AbstractHttpClient {
+
+ /** The http client */
+ protected final OkHttpClient okHttp;
+
+ public AbstractHttpClient(OkHttpClient okHttp) {
+ this.okHttp = okHttp;
+ }
+
+ protected Response makeHttpRequest(Request request) throws IOException {
+ Response response = this.okHttp.newCall(request).execute();
+ if (!response.isSuccessful()) {
+ throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
+ }
+ return response;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
new file mode 100644
index 0000000..ff8f4e3
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -0,0 +1,149 @@
+/*
+ * This file is part of bytebin, licensed under the MIT License.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package me.lucko.spark.common.util;
+
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
+import java.io.IOException;
+
+/**
+ * Utility for posting content to bytebin.
+ */
+public class BytebinClient extends AbstractHttpClient {
+
+ /** The bytebin URL */
+ private final String url;
+ /** The client user agent */
+ private final String userAgent;
+
+ /**
+ * Creates a new bytebin instance
+ *
+ * @param url the bytebin url
+ * @param userAgent the client user agent string
+ */
+ public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) {
+ super(okHttpClient);
+ if (url.endsWith("/")) {
+ this.url = url;
+ } else {
+ this.url = url + "/";
+ }
+ this.userAgent = userAgent;
+ }
+
+ /**
+ * POSTs GZIP compressed content to bytebin.
+ *
+ * @param buf the compressed content
+ * @param contentType the type of the content
+ * @param allowModification if the paste should be modifiable
+ * @return the key of the resultant content
+ * @throws IOException if an error occurs
+ */
+ public Content postContent(byte[] buf, MediaType contentType, boolean allowModification) throws IOException {
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url + "post")
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip");
+
+ if (allowModification) {
+ requestBuilder.header("Allow-Modification", "true");
+ }
+
+ Request request = requestBuilder.post(body).build();
+ try (Response response = makeHttpRequest(request)) {
+ String key = response.header("Location");
+ if (key == null) {
+ throw new IllegalStateException("Key not returned");
+ }
+
+ if (allowModification) {
+ String modificationKey = response.header("Modification-Key");
+ if (modificationKey == null) {
+ throw new IllegalStateException("Modification key not returned");
+ }
+ return new Content(key, modificationKey);
+ } else {
+ return new Content(key);
+ }
+ }
+ }
+
+ /**
+ * PUTs modified GZIP compressed content to bytebin in place of existing content.
+ *
+ * @param existingContent the existing content
+ * @param buf the compressed content to put
+ * @param contentType the type of the content
+ * @throws IOException if an error occurs
+ */
+ public void modifyContent(Content existingContent, byte[] buf, MediaType contentType) throws IOException {
+ if (!existingContent.modifiable) {
+ throw new IllegalArgumentException("Existing content is not modifiable");
+ }
+
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url + existingContent.key())
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip")
+ .header("Modification-Key", existingContent.modificationKey);
+
+ Request request = requestBuilder.put(body).build();
+ makeHttpRequest(request).close();
+ }
+
+ public static final class Content {
+ private final String key;
+ private final boolean modifiable;
+ private final String modificationKey;
+
+ Content(String key) {
+ this.key = key;
+ this.modifiable = false;
+ this.modificationKey = null;
+ }
+
+ Content(String key, String modificationKey) {
+ this.key = key;
+ this.modifiable = true;
+ this.modificationKey = modificationKey;
+ }
+
+ public String key() {
+ return this.key;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java
index 8ba7b10..cc0722a 100644
--- a/spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.util;
+package me.lucko.spark.common.util;
import java.util.Arrays;
import java.util.Objects;
diff --git a/spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java b/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java
index 20dbe17..a232e77 100644
--- a/spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.util;
+package me.lucko.spark.common.util;
/**
* Utilities for working with Java type descriptors.
diff --git a/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java
deleted file mode 100644
index 00e1c69..0000000
--- a/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * This file is part of bytebin, licensed under the MIT License.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-package me.lucko.spark.util;
-
-import okhttp3.MediaType;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-
-import java.io.IOException;
-
-/**
- * Utility for posting content to bytebin.
- */
-public class BytebinClient {
-
- /** The bytebin URL */
- private final String url;
- /** The client user agent */
- private final String userAgent;
- /** The http client */
- protected final OkHttpClient okHttp;
-
- /**
- * Creates a new bytebin instance
- *
- * @param url the bytebin url
- * @param userAgent the client user agent string
- */
- public BytebinClient(String url, String userAgent) {
- if (url.endsWith("/")) {
- this.url = url + "post";
- } else {
- this.url = url + "/post";
- }
- this.userAgent = userAgent;
- this.okHttp = new OkHttpClient();
- }
-
- /**
- * Posts GZIP compressed content to bytebin.
- *
- * @param buf the compressed content
- * @param contentType the type of the content
- * @return the key of the resultant content
- * @throws IOException if an error occurs
- */
- public String postGzippedContent(byte[] buf, MediaType contentType) throws IOException {
- RequestBody body = RequestBody.create(contentType, buf);
-
- Request.Builder requestBuilder = new Request.Builder()
- .url(this.url)
- .header("User-Agent", this.userAgent)
- .header("Content-Encoding", "gzip")
- .post(body);
-
- Request request = requestBuilder.build();
- try (Response response = makeHttpRequest(request)) {
- return response.header("Location");
- }
- }
-
- protected Response makeHttpRequest(Request request) throws IOException {
- Response response = this.okHttp.newCall(request).execute();
- if (!response.isSuccessful()) {
- throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
- }
- return response;
- }
-}