aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2019-04-16 21:37:59 +0100
committerLuck <git@lucko.me>2019-04-16 21:37:59 +0100
commitecd4cec8545460a4fc4ca65b911c2503a00cd8e7 (patch)
tree62067383a1044abc3a09724e89c6e7c619e87ec0 /spark-common/src/main/java/me/lucko/spark/common
parent8a61b404848ed8e3c27f06eb73239d37d4273240 (diff)
downloadspark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.tar.gz
spark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.tar.bz2
spark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.zip
Lots of refactoring, add tps command
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java105
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java49
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Command.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java120
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java79
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java77
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java171
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java90
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java143
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java170
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java200
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java90
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java126
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java72
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java64
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java54
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java82
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java184
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java143
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java86
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java45
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java149
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java60
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java81
28 files changed, 2486 insertions, 163 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index ef21d1c..8eb4565 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -21,19 +21,20 @@
package me.lucko.spark.common;
import com.google.common.collect.ImmutableList;
-
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.modules.MemoryModule;
+import me.lucko.spark.common.command.modules.MonitoringModule;
import me.lucko.spark.common.command.modules.SamplerModule;
import me.lucko.spark.common.command.modules.TickMonitoringModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-import me.lucko.spark.util.BytebinClient;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+import me.lucko.spark.common.sampler.TickCounter;
+import me.lucko.spark.common.util.BytebinClient;
+import okhttp3.OkHttpClient;
-import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -41,52 +42,68 @@ import java.util.List;
import java.util.stream.Collectors;
/**
- * Abstract command handling class used by all platforms.
+ * Abstract spark implementation used by all platforms.
*
* @param <S> the sender (e.g. CommandSender) type used by the platform
*/
-public abstract class SparkPlatform<S> {
+public class SparkPlatform<S> {
/** The URL of the viewer frontend */
public static final String VIEWER_URL = "https://sparkprofiler.github.io/#";
+ /** The shared okhttp client */
+ private static final OkHttpClient OK_HTTP_CLIENT = new OkHttpClient();
/** The bytebin instance used by the platform */
- public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient("https://bytebin.lucko.me/", "spark-plugin");
-
- /** The prefix used in all messages */
- private static final String PREFIX = "&8[&fspark&8] &7";
-
- private static <T> List<Command<T>> prepareCommands() {
- ImmutableList.Builder<Command<T>> builder = ImmutableList.builder();
- new SamplerModule<T>().registerCommands(builder::add);
- new TickMonitoringModule<T>().registerCommands(builder::add);
- new MemoryModule<T>().registerCommands(builder::add);
- return builder.build();
+ public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient(OK_HTTP_CLIENT, "https://bytebin.lucko.me/", "spark-plugin");
+
+ private final List<Command<S>> commands;
+ private final SparkPlugin<S> plugin;
+
+ private final TickCounter tickCounter;
+ private final TpsCalculator tpsCalculator;
+
+ public SparkPlatform(SparkPlugin<S> plugin) {
+ this.plugin = plugin;
+
+ ImmutableList.Builder<Command<S>> commandsBuilder = ImmutableList.builder();
+ new SamplerModule<S>().registerCommands(commandsBuilder::add);
+ new MonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new TickMonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new MemoryModule<S>().registerCommands(commandsBuilder::add);
+ this.commands = commandsBuilder.build();
+
+ this.tickCounter = plugin.createTickCounter();
+ this.tpsCalculator = this.tickCounter != null ? new TpsCalculator() : null;
+ }
+
+ public void enable() {
+ if (this.tickCounter != null) {
+ this.tickCounter.addTickTask(this.tpsCalculator);
+ this.tickCounter.start();
+ }
+ }
+
+ public void disable() {
+ if (this.tickCounter != null) {
+ this.tickCounter.close();
+ }
+ }
+
+ public SparkPlugin<S> getPlugin() {
+ return this.plugin;
}
- private final List<Command<S>> commands = prepareCommands();
-
- // abstract methods implemented by each platform
- public abstract String getVersion();
- public abstract Path getPluginFolder();
- public abstract String getLabel();
- public abstract void sendMessage(S sender, String message);
- public abstract void sendMessage(String message);
- public abstract void sendLink(String url);
- public abstract void runAsync(Runnable r);
- public abstract ThreadDumper getDefaultThreadDumper();
- public abstract TickCounter newTickCounter();
-
- public void sendPrefixedMessage(S sender, String message) {
- sendMessage(sender, PREFIX + message);
+ public TickCounter getTickCounter() {
+ return this.tickCounter;
}
- public void sendPrefixedMessage(String message) {
- sendMessage(PREFIX + message);
+ public TpsCalculator getTpsCalculator() {
+ return this.tpsCalculator;
}
public void executeCommand(S sender, String[] args) {
+ CommandResponseHandler<S> resp = new CommandResponseHandler<>(this, sender);
if (args.length == 0) {
- sendUsage(sender);
+ sendUsage(resp);
return;
}
@@ -96,15 +113,15 @@ public abstract class SparkPlatform<S> {
for (Command<S> command : this.commands) {
if (command.aliases().contains(alias)) {
try {
- command.executor().execute(this, sender, new Arguments(rawArgs));
+ command.executor().execute(this, sender, resp, new Arguments(rawArgs));
} catch (IllegalArgumentException e) {
- sendMessage(sender, "&c" + e.getMessage());
+ resp.replyPrefixed("&c" + e.getMessage());
}
return;
}
}
- sendUsage(sender);
+ sendUsage(resp);
}
public List<String> tabCompleteCommand(S sender, String[] args) {
@@ -127,15 +144,15 @@ public abstract class SparkPlatform<S> {
return Collections.emptyList();
}
- private void sendUsage(S sender) {
- sendPrefixedMessage(sender, "&fspark &7v" + getVersion());
+ private void sendUsage(CommandResponseHandler<S> sender) {
+ sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion());
for (Command<S> command : this.commands) {
- sendMessage(sender, "&b&l> &7/" + getLabel() + " " + command.aliases().get(0));
+ sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
for (Command.ArgumentInfo arg : command.arguments()) {
if (arg.requiresParameter()) {
- sendMessage(sender, " &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
+ sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
} else {
- sendMessage(sender, " &8[&7--" + arg.argumentName() + "]");
+ sender.reply(" &8[&7--" + arg.argumentName() + "]");
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
new file mode 100644
index 0000000..7a3a353
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -0,0 +1,49 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common;
+
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.nio.file.Path;
+import java.util.Set;
+
+public interface SparkPlugin<S> {
+
+ String getVersion();
+
+ Path getPluginFolder();
+
+ String getLabel();
+
+ Set<S> getSenders();
+
+ void sendMessage(S sender, String message);
+
+ void sendLink(S sender, String url);
+
+ void runAsync(Runnable r);
+
+ ThreadDumper getDefaultThreadDumper();
+
+ TickCounter createTickCounter();
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
index fb440b1..c9f6551 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
@@ -21,7 +21,6 @@
package me.lucko.spark.common.command;
import com.google.common.collect.ImmutableList;
-
import me.lucko.spark.common.SparkPlatform;
import java.util.Collections;
@@ -109,7 +108,7 @@ public class Command<S> {
@FunctionalInterface
public interface Executor<S> {
- void execute(SparkPlatform<S> platform, S sender, Arguments arguments);
+ void execute(SparkPlatform<S> platform, S sender, CommandResponseHandler resp, Arguments arguments);
}
@FunctionalInterface
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
new file mode 100644
index 0000000..a5a7391
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -0,0 +1,75 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command;
+
+import me.lucko.spark.common.SparkPlatform;
+
+import java.util.Set;
+import java.util.function.Consumer;
+
+public class CommandResponseHandler<S> {
+
+ /** The prefix used in all messages */
+ private static final String PREFIX = "&8[&fspark&8] &7";
+
+ private final SparkPlatform<S> platform;
+ private final S sender;
+
+ public CommandResponseHandler(SparkPlatform<S> platform, S sender) {
+ this.platform = platform;
+ this.sender = sender;
+ }
+
+ public S sender() {
+ return this.sender;
+ }
+
+ public void allSenders(Consumer<? super S> action) {
+ Set<S> senders = this.platform.getPlugin().getSenders();
+ senders.add(this.sender);
+ senders.forEach(action);
+ }
+
+ public void reply(String message) {
+ this.platform.getPlugin().sendMessage(this.sender, message);
+ }
+
+ public void broadcast(String message) {
+ allSenders(sender -> this.platform.getPlugin().sendMessage(sender, message));
+ }
+
+ public void replyPrefixed(String message) {
+ this.platform.getPlugin().sendMessage(this.sender, PREFIX + message);
+ }
+
+ public void broadcastPrefixed(String message) {
+ allSenders(sender -> this.platform.getPlugin().sendMessage(sender, PREFIX + message));
+ }
+
+ public void replyLink(String link) {
+ this.platform.getPlugin().sendLink(this.sender, link);
+ }
+
+ public void broadcastLink(String link) {
+ allSenders(sender -> this.platform.getPlugin().sendLink(sender, link));
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
index 5f17d54..2cb2e07 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
@@ -24,9 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.memory.HeapDump;
-import me.lucko.spark.memory.HeapDumpSummary;
-
+import me.lucko.spark.common.memory.HeapDump;
+import me.lucko.spark.common.memory.HeapDumpSummary;
import okhttp3.MediaType;
import java.io.IOException;
@@ -44,34 +43,34 @@ public class MemoryModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("heapsummary")
.argumentUsage("run-gc-before", null)
- .executor((platform, sender, arguments) -> {
- platform.runAsync(() -> {
- if (arguments.boolFlag("run-gc-before")) {
- platform.sendPrefixedMessage("&7Running garbage collector...");
- System.gc();
- }
-
- platform.sendPrefixedMessage("&7Creating a new heap dump summary, please wait...");
-
- HeapDumpSummary heapDump;
- try {
- heapDump = HeapDumpSummary.createNew();
- } catch (Exception e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap.");
- e.printStackTrace();
- return;
- }
-
- byte[] output = heapDump.formCompressedDataPayload();
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bHeap dump summmary output:");
- platform.sendLink(SparkPlatform.VIEWER_URL + key);
- } catch (IOException e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
- e.printStackTrace();
- }
- });
+ .executor((platform, sender, resp, arguments) -> {
+ platform.getPlugin().runAsync(() -> {
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed("&7Running garbage collector...");
+ System.gc();
+ }
+
+ resp.broadcastPrefixed("&7Creating a new heap dump summary, please wait...");
+
+ HeapDumpSummary heapDump;
+ try {
+ heapDump = HeapDumpSummary.createNew();
+ } catch (Exception e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst inspecting the heap.");
+ e.printStackTrace();
+ return;
+ }
+
+ byte[] output = heapDump.formCompressedDataPayload();
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
+ resp.broadcastPrefixed("&bHeap dump summmary output:");
+ resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
+ } catch (IOException e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst uploading the data.");
+ e.printStackTrace();
+ }
+ });
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before"))
.build()
@@ -81,35 +80,36 @@ public class MemoryModule<S> implements CommandModule<S> {
.aliases("heapdump")
.argumentUsage("run-gc-before", null)
.argumentUsage("include-non-live", null)
- .executor((platform, sender, arguments) -> {
- platform.runAsync(() -> {
- Path pluginFolder = platform.getPluginFolder();
- try {
- Files.createDirectories(pluginFolder);
- } catch (IOException e) {
- // ignore
- }
-
- Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
- boolean liveOnly = !arguments.boolFlag("include-non-live");
-
- if (arguments.boolFlag("run-gc-before")) {
- platform.sendPrefixedMessage("&7Running garbage collector...");
- System.gc();
- }
-
- platform.sendPrefixedMessage("&7Creating a new heap dump, please wait...");
-
- try {
- HeapDump.dumpHeap(file, liveOnly);
- } catch (Exception e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst creating a heap dump.");
- e.printStackTrace();
- return;
- }
-
- platform.sendPrefixedMessage("&bHeap dump written to: " + file.toString());
- });
+ .executor((platform, sender, resp, arguments) -> {
+ // ignore
+ platform.getPlugin().runAsync(() -> {
+ Path pluginFolder = platform.getPlugin().getPluginFolder();
+ try {
+ Files.createDirectories(pluginFolder);
+ } catch (IOException e) {
+ // ignore
+ }
+
+ Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
+ boolean liveOnly = !arguments.boolFlag("include-non-live");
+
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed("&7Running garbage collector...");
+ System.gc();
+ }
+
+ resp.broadcastPrefixed("&7Creating a new heap dump, please wait...");
+
+ try {
+ HeapDump.dumpHeap(file, liveOnly);
+ } catch (Exception e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst creating a heap dump.");
+ e.printStackTrace();
+ return;
+ }
+
+ resp.broadcastPrefixed("&bHeap dump written to: " + file.toString());
+ });
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live"))
.build()
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
new file mode 100644
index 0000000..b543e1d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
@@ -0,0 +1,51 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+
+import java.util.function.Consumer;
+
+public class MonitoringModule<S> implements CommandModule<S> {
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("tps")
+ .executor((platform, sender, resp, arguments) -> {
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator == null) {
+ resp.replyPrefixed("TPS data is not available.");
+ return;
+ }
+
+ String formattedTpsString = tpsCalculator.toFormattedString();
+ resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m");
+ resp.replyPrefixed(formattedTpsString);
+ })
+ .tabCompleter(Command.TabCompleter.empty())
+ .build()
+ );
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 9d00a96..a0f171c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -23,14 +23,14 @@ package me.lucko.spark.common.command.modules;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.sampler.Sampler;
-import me.lucko.spark.sampler.SamplerBuilder;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.Sampler;
+import me.lucko.spark.common.sampler.SamplerBuilder;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.TickCounter;
import okhttp3.MediaType;
import java.io.IOException;
@@ -62,15 +62,15 @@ public class SamplerModule<S> implements CommandModule<S> {
.argumentUsage("interval", "interval millis")
.argumentUsage("only-ticks-over", "tick length millis")
.argumentUsage("include-line-numbers", null)
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
int timeoutSeconds = arguments.intFlag("timeout");
if (timeoutSeconds != -1 && timeoutSeconds <= 10) {
- platform.sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
+ resp.replyPrefixed("&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
return;
}
if (timeoutSeconds != -1 && timeoutSeconds < 30) {
- platform.sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
+ resp.replyPrefixed("&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
}
double intervalMillis = arguments.doubleFlag("interval");
@@ -84,7 +84,7 @@ public class SamplerModule<S> implements CommandModule<S> {
ThreadDumper threadDumper;
if (threads.isEmpty()) {
// use the server thread
- threadDumper = platform.getDefaultThreadDumper();
+ threadDumper = platform.getPlugin().getDefaultThreadDumper();
} else if (threads.contains("*")) {
threadDumper = ThreadDumper.ALL;
} else {
@@ -108,10 +108,9 @@ public class SamplerModule<S> implements CommandModule<S> {
int ticksOver = arguments.intFlag("only-ticks-over");
TickCounter tickCounter = null;
if (ticksOver != -1) {
- try {
- tickCounter = platform.newTickCounter();
- } catch (UnsupportedOperationException e) {
- platform.sendPrefixedMessage(sender, "&cTick counting is not supported!");
+ tickCounter = platform.getTickCounter();
+ if (tickCounter == null) {
+ resp.replyPrefixed("&cTick counting is not supported!");
return;
}
}
@@ -119,11 +118,11 @@ public class SamplerModule<S> implements CommandModule<S> {
Sampler sampler;
synchronized (this.activeSamplerMutex) {
if (this.activeSampler != null) {
- platform.sendPrefixedMessage(sender, "&7An active sampler is already running.");
+ resp.replyPrefixed("&7An active sampler is already running.");
return;
}
- platform.sendPrefixedMessage("&7Initializing a new profiler, please wait...");
+ resp.broadcastPrefixed("&7Initializing a new profiler, please wait...");
SamplerBuilder builder = new SamplerBuilder();
builder.threadDumper(threadDumper);
@@ -138,11 +137,11 @@ public class SamplerModule<S> implements CommandModule<S> {
}
sampler = this.activeSampler = builder.start();
- platform.sendPrefixedMessage("&bProfiler now active!");
+ resp.broadcastPrefixed("&bProfiler now active!");
if (timeoutSeconds == -1) {
- platform.sendPrefixedMessage("&7Use '/" + platform.getLabel() + " stop' to stop profiling and upload the results.");
+ resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results.");
} else {
- platform.sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
+ resp.broadcastPrefixed("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
}
}
@@ -151,7 +150,7 @@ public class SamplerModule<S> implements CommandModule<S> {
// send message if profiling fails
future.whenCompleteAsync((s, throwable) -> {
if (throwable != null) {
- platform.sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
+ resp.broadcastPrefixed("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
throwable.printStackTrace();
}
});
@@ -168,8 +167,8 @@ public class SamplerModule<S> implements CommandModule<S> {
// await the result
if (timeoutSeconds != -1) {
future.thenAcceptAsync(s -> {
- platform.sendPrefixedMessage("&7The active sampling operation has completed! Uploading results...");
- handleUpload(platform, s);
+ resp.broadcastPrefixed("&7The active sampling operation has completed! Uploading results...");
+ handleUpload(platform, resp, s);
});
}
})
@@ -188,21 +187,21 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("info")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
long timeout = this.activeSampler.getEndTime();
if (timeout == -1) {
- platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout.");
+ resp.replyPrefixed("&7There is an active sampler currently running, with no defined timeout.");
} else {
long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L;
- platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
+ resp.replyPrefixed("&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
}
long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L;
- platform.sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far.");
+ resp.replyPrefixed("&7It has been sampling for " + runningTime + " seconds so far.");
}
}
})
@@ -211,14 +210,14 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("stop", "upload", "paste")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
this.activeSampler.cancel();
- platform.sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results...");
- handleUpload(platform, this.activeSampler);
+ resp.broadcastPrefixed("&7The active sampling operation has been stopped! Uploading results...");
+ handleUpload(platform, resp, this.activeSampler);
this.activeSampler = null;
}
}
@@ -228,14 +227,14 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("cancel")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
this.activeSampler.cancel();
this.activeSampler = null;
- platform.sendPrefixedMessage("&bThe active sampling task has been cancelled.");
+ resp.broadcastPrefixed("&bThe active sampling task has been cancelled.");
}
}
})
@@ -243,15 +242,15 @@ public class SamplerModule<S> implements CommandModule<S> {
);
}
- private void handleUpload(SparkPlatform<S> platform, Sampler sampler) {
- platform.runAsync(() -> {
+ private void handleUpload(SparkPlatform<S> platform, CommandResponseHandler<S> resp, Sampler sampler) {
+ platform.getPlugin().runAsync(() -> {
byte[] output = sampler.formCompressedDataPayload();
try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bSampling results:");
- platform.sendLink(SparkPlatform.VIEWER_URL + key);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
+ resp.broadcastPrefixed("&bSampling results:");
+ resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
+ resp.broadcastPrefixed("&cAn error occurred whilst uploading the results.");
e.printStackTrace();
}
});
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
index 3ad8909..bea7a07 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.command.modules;
-import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.monitor.TickMonitor;
-import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.common.monitor.tick.TickMonitor;
+import me.lucko.spark.common.sampler.TickCounter;
import java.util.function.Consumer;
@@ -37,27 +37,29 @@ public class TickMonitoringModule<S> implements CommandModule<S> {
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
consumer.accept(Command.<S>builder()
- .aliases("monitoring")
+ .aliases("tickmonitoring")
.argumentUsage("threshold", "percentage increase")
.argumentUsage("without-gc", null)
- .executor((platform, sender, arguments) -> {
- if (this.activeTickMonitor == null) {
+ .executor((platform, sender, resp, arguments) -> {
+ TickCounter tickCounter = platform.getTickCounter();
+ if (tickCounter == null) {
+ resp.replyPrefixed("&cNot supported!");
+ return;
+ }
+ if (this.activeTickMonitor == null) {
int threshold = arguments.intFlag("threshold");
if (threshold == -1) {
threshold = 100;
}
- try {
- TickCounter tickCounter = platform.newTickCounter();
- this.activeTickMonitor = new ReportingTickMonitor(platform, tickCounter, threshold, !arguments.boolFlag("without-gc"));
- } catch (UnsupportedOperationException e) {
- platform.sendPrefixedMessage(sender, "&cNot supported!");
- }
+ this.activeTickMonitor = new ReportingTickMonitor(resp, tickCounter, threshold, !arguments.boolFlag("without-gc"));
+ tickCounter.addTickTask(this.activeTickMonitor);
} else {
+ tickCounter.removeTickTask(this.activeTickMonitor);
this.activeTickMonitor.close();
this.activeTickMonitor = null;
- platform.sendPrefixedMessage("&7Tick monitor disabled.");
+ resp.broadcastPrefixed("&7Tick monitor disabled.");
}
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--threshold", "--without-gc"))
@@ -66,16 +68,16 @@ public class TickMonitoringModule<S> implements CommandModule<S> {
}
private class ReportingTickMonitor extends TickMonitor {
- private final SparkPlatform<S> platform;
+ private final CommandResponseHandler<S> resp;
- ReportingTickMonitor(SparkPlatform<S> platform, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
+ ReportingTickMonitor(CommandResponseHandler<S> resp, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
super(tickCounter, percentageChangeThreshold, monitorGc);
- this.platform = platform;
+ this.resp = resp;
}
@Override
protected void sendMessage(String message) {
- this.platform.sendPrefixedMessage(message);
+ this.resp.broadcastPrefixed(message);
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
new file mode 100644
index 0000000..6200c17
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
@@ -0,0 +1,77 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.memory;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.Method;
+import java.nio.file.Path;
+
+/**
+ * Utility for creating .hprof memory heap snapshots.
+ */
+public final class HeapDump {
+
+ private HeapDump() {}
+
+ /** The object name of the com.sun.management.HotSpotDiagnosticMXBean */
+ private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=HotSpotDiagnostic";
+
+ /**
+ * Creates a heap dump at the given output path.
+ *
+ * @param outputPath the path to write the snapshot to
+ * @param live if true dump only live objects i.e. objects that are reachable from others
+ * @throws Exception catch all
+ */
+ public static void dumpHeap(Path outputPath, boolean live) throws Exception {
+ String outputPathString = outputPath.toAbsolutePath().normalize().toString();
+
+ if (isOpenJ9()) {
+ Class<?> dumpClass = Class.forName("com.ibm.jvm.Dump");
+ Method heapDumpMethod = dumpClass.getMethod("heapDumpToFile", String.class);
+ heapDumpMethod.invoke(null, outputPathString);
+ } else {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN);
+
+ HotSpotDiagnosticMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, HotSpotDiagnosticMXBean.class);
+ proxy.dumpHeap(outputPathString, live);
+ }
+ }
+
+ public static boolean isOpenJ9() {
+ try {
+ Class.forName("com.ibm.jvm.Dump");
+ return true;
+ } catch (ClassNotFoundException e) {
+ return false;
+ }
+ }
+
+ public interface HotSpotDiagnosticMXBean {
+ void dumpHeap(String outputFile, boolean live) throws IOException;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
new file mode 100644
index 0000000..f3f62db
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
@@ -0,0 +1,171 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.memory;
+
+import com.google.gson.stream.JsonWriter;
+import me.lucko.spark.common.util.TypeDescriptors;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.management.ManagementFactory;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Represents a "heap dump summary" from the VM.
+ *
+ * <p>Contains a number of entries, corresponding to types of objects in the virtual machine
+ * and their recorded impact on memory usage.</p>
+ */
+public final class HeapDumpSummary {
+
+ /** The object name of the com.sun.management.DiagnosticCommandMBean */
+ private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=DiagnosticCommand";
+ /** A regex pattern representing the expected format of the raw heap output */
+ private static final Pattern OUTPUT_FORMAT = Pattern.compile("^\\s*(\\d+):\\s*(\\d+)\\s*(\\d+)\\s*([^\\s]+).*$");
+
+ /**
+ * Obtains the raw heap data output from the DiagnosticCommandMBean.
+ *
+ * @return the raw output
+ * @throws Exception lots could go wrong!
+ */
+ private static String getRawHeapData() throws Exception {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN);
+
+ DiagnosticCommandMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, DiagnosticCommandMXBean.class);
+ return proxy.gcClassHistogram(new String[0]);
+ }
+
+ /**
+ * Creates a new heap dump based on the current VM.
+ *
+ * @return the created heap dump
+ * @throws RuntimeException if an error occurred whilst requesting a heap dump from the VM
+ */
+ public static HeapDumpSummary createNew() {
+ String rawOutput;
+ try {
+ rawOutput = getRawHeapData();
+ } catch (Exception e) {
+ throw new RuntimeException("Unable to get heap dump", e);
+ }
+
+ return new HeapDumpSummary(Arrays.stream(rawOutput.split("\n"))
+ .map(line -> {
+ Matcher matcher = OUTPUT_FORMAT.matcher(line);
+ if (!matcher.matches()) {
+ return null;
+ }
+
+ return new Entry(
+ Integer.parseInt(matcher.group(1)),
+ Integer.parseInt(matcher.group(2)),
+ Long.parseLong(matcher.group(3)),
+ TypeDescriptors.getJavaType(matcher.group(4))
+ );
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toList()));
+ }
+
+ /** The entries in this heap dump */
+ private final List<Entry> entries;
+
+ private HeapDumpSummary(List<Entry> entries) {
+ this.entries = entries;
+ }
+
+ private void writeOutput(JsonWriter writer) throws IOException {
+ writer.beginObject();
+ writer.name("type").value("heap");
+ writer.name("entries").beginArray();
+ for (Entry entry : this.entries) {
+ writer.beginObject();
+ writer.name("#").value(entry.getOrder());
+ writer.name("i").value(entry.getInstances());
+ writer.name("s").value(entry.getBytes());
+ writer.name("t").value(entry.getType());
+ writer.endObject();
+ }
+ writer.endArray();
+ writer.endObject();
+ }
+
+ public byte[] formCompressedDataPayload() {
+ ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
+ try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) {
+ try (JsonWriter jsonWriter = new JsonWriter(writer)) {
+ writeOutput(jsonWriter);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ return byteOut.toByteArray();
+ }
+
+ public static final class Entry {
+ private final int order;
+ private final int instances;
+ private final long bytes;
+ private final String type;
+
+ Entry(int order, int instances, long bytes, String type) {
+ this.order = order;
+ this.instances = instances;
+ this.bytes = bytes;
+ this.type = type;
+ }
+
+ public int getOrder() {
+ return this.order;
+ }
+
+ public int getInstances() {
+ return this.instances;
+ }
+
+ public long getBytes() {
+ return this.bytes;
+ }
+
+ public String getType() {
+ return this.type;
+ }
+ }
+
+ public interface DiagnosticCommandMXBean {
+ String gcClassHistogram(String[] args);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
new file mode 100644
index 0000000..93a5fd8
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
@@ -0,0 +1,90 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.gc;
+
+import com.sun.management.GarbageCollectionNotificationInfo;
+
+import javax.management.ListenerNotFoundException;
+import javax.management.Notification;
+import javax.management.NotificationEmitter;
+import javax.management.NotificationListener;
+import javax.management.openmbean.CompositeData;
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GarbageCollectionMonitor implements NotificationListener, AutoCloseable {
+
+ private final List<Listener> listeners = new ArrayList<>();
+ private final List<NotificationEmitter> emitters = new ArrayList<>();
+
+ public GarbageCollectionMonitor() {
+ List<GarbageCollectorMXBean> beans = ManagementFactory.getGarbageCollectorMXBeans();
+ for (GarbageCollectorMXBean bean : beans) {
+ if (!(bean instanceof NotificationEmitter)) {
+ continue;
+ }
+
+ NotificationEmitter notificationEmitter = (NotificationEmitter) bean;
+ notificationEmitter.addNotificationListener(this, null, null);
+ this.emitters.add(notificationEmitter);
+ }
+ }
+
+ public void addListener(Listener listener) {
+ this.listeners.add(listener);
+ }
+
+ public void removeListener(Listener listener) {
+ this.listeners.remove(listener);
+ }
+
+ @Override
+ public void handleNotification(Notification notification, Object handback) {
+ if (!notification.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) {
+ return;
+ }
+
+ GarbageCollectionNotificationInfo data = GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData());
+ for (Listener listener : this.listeners) {
+ listener.onGc(data);
+ }
+ }
+
+ @Override
+ public void close() {
+ for (NotificationEmitter e : this.emitters) {
+ try {
+ e.removeNotificationListener(this);
+ } catch (ListenerNotFoundException ex) {
+ ex.printStackTrace();
+ }
+ }
+ this.emitters.clear();
+ this.listeners.clear();
+ }
+
+ public interface Listener {
+ void onGc(GarbageCollectionNotificationInfo data);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
new file mode 100644
index 0000000..034e876
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
@@ -0,0 +1,143 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.tick;
+
+import com.sun.management.GarbageCollectionNotificationInfo;
+import me.lucko.spark.common.monitor.gc.GarbageCollectionMonitor;
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.text.DecimalFormat;
+import java.util.DoubleSummaryStatistics;
+
+public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollectionMonitor.Listener, AutoCloseable {
+ private static final DecimalFormat df = new DecimalFormat("#.##");
+
+ private final TickCounter tickCounter;
+ private final GarbageCollectionMonitor garbageCollectionMonitor;
+ private final int percentageChangeThreshold;
+
+ // data
+ private volatile double lastTickTime = 0;
+ private State state = null;
+ private final DoubleSummaryStatistics averageTickTime = new DoubleSummaryStatistics();
+ private double avg;
+
+ public TickMonitor(TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
+ this.tickCounter = tickCounter;
+ this.percentageChangeThreshold = percentageChangeThreshold;
+
+ if (monitorGc) {
+ this.garbageCollectionMonitor = new GarbageCollectionMonitor();
+ this.garbageCollectionMonitor.addListener(this);
+ } else {
+ this.garbageCollectionMonitor = null;
+ }
+ }
+
+ protected abstract void sendMessage(String message);
+
+ @Override
+ public void close() {
+ if (this.garbageCollectionMonitor != null) {
+ this.garbageCollectionMonitor.close();
+ }
+ }
+
+ @Override
+ public void onTick(TickCounter counter) {
+ double now = ((double) System.nanoTime()) / 1000000d;
+
+ // init
+ if (this.state == null) {
+ this.state = State.SETUP;
+ this.lastTickTime = now;
+ sendMessage("Tick monitor started. Before the monitor becomes fully active, the server's " +
+ "average tick rate will be calculated over a period of 120 ticks (approx 6 seconds).");
+ return;
+ }
+
+ // find the diff
+ double last = this.lastTickTime;
+ double diff = now - last;
+ boolean ignore = last == 0;
+ this.lastTickTime = now;
+
+ if (ignore) {
+ return;
+ }
+
+ // form averages
+ if (this.state == State.SETUP) {
+ this.averageTickTime.accept(diff);
+
+ // move onto the next state
+ if (this.averageTickTime.getCount() >= 120) {
+
+ sendMessage("&bAnalysis is now complete.");
+ sendMessage("&f> &7Max: " + df.format(this.averageTickTime.getMax()) + "ms");
+ sendMessage("&f> &7Min: " + df.format(this.averageTickTime.getMin()) + "ms");
+ sendMessage("&f> &7Avg: " + df.format(this.averageTickTime.getAverage()) + "ms");
+ sendMessage("Starting now, any ticks with >" + this.percentageChangeThreshold + "% increase in " +
+ "duration compared to the average will be reported.");
+
+ this.avg = this.averageTickTime.getAverage();
+ this.state = State.MONITORING;
+ }
+ }
+
+ if (this.state == State.MONITORING) {
+ double increase = diff - this.avg;
+ if (increase <= 0) {
+ return;
+ }
+
+ double percentageChange = (increase * 100d) / this.avg;
+ if (percentageChange > this.percentageChangeThreshold) {
+ sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
+ "&7 ms. (&b" + df.format(percentageChange) + "% &7increase from average)");
+ }
+ }
+ }
+
+ @Override
+ public void onGc(GarbageCollectionNotificationInfo data) {
+ if (this.state == State.SETUP) {
+ // set lastTickTime to zero so this tick won't be counted in the average
+ this.lastTickTime = 0;
+ return;
+ }
+
+ String gcType = data.getGcAction();
+ if (gcType.equals("end of minor GC")) {
+ gcType = "Young Gen GC";
+ } else if (gcType.equals("end of major GC")) {
+ gcType = "Old Gen GC";
+ }
+
+ sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &b" +
+ df.format(data.getGcInfo().getDuration()) + "&7 ms. (type = " + gcType + ")");
+ }
+
+ private enum State {
+ SETUP,
+ MONITORING
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java
new file mode 100644
index 0000000..2f3af3e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java
@@ -0,0 +1,170 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.tick;
+
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Calculates the servers TPS (ticks per second) rate.
+ *
+ * <p>The code use to calculate the TPS is the same as the code used by the Minecraft server itself.
+ * This means that this class will output values the same as the /tps command.</p>
+ *
+ * <p>We calculate our own values instead of pulling them from the server for two reasons. Firstly,
+ * it's easier - pulling from the server requires reflection code on each of the platforms, we'd
+ * rather avoid that. Secondly, it allows us to generate rolling averages over a shorter period of
+ * time.</p>
+ */
+public class TpsCalculator implements TickCounter.TickTask {
+
+ private static final long SEC_IN_NANO = TimeUnit.SECONDS.toNanos(1);
+ private static final int TPS = 20;
+ private static final int SAMPLE_INTERVAL = 20;
+ private static final BigDecimal TPS_BASE = new BigDecimal(SEC_IN_NANO).multiply(new BigDecimal((long) SAMPLE_INTERVAL));
+
+ private final RollingAverage tps5S = new RollingAverage(5);
+ private final RollingAverage tps10S = new RollingAverage(10);
+ private final RollingAverage tps1M = new RollingAverage(60);
+ private final RollingAverage tps5M = new RollingAverage(60 * 5);
+ private final RollingAverage tps15M = new RollingAverage(60 * 15);
+
+ private final RollingAverage[] averages = new RollingAverage[]{
+ this.tps5S, this.tps10S, this.tps1M, this.tps5M, this.tps15M
+ };
+
+ private long last = 0;
+
+ // called every tick
+ @Override
+ public void onTick(TickCounter counter) {
+ if (counter.getCurrentTick() % SAMPLE_INTERVAL != 0) {
+ return;
+ }
+
+ long now = System.nanoTime();
+
+ if (this.last == 0) {
+ this.last = now;
+ return;
+ }
+
+ long diff = now - this.last;
+ BigDecimal currentTps = TPS_BASE.divide(new BigDecimal(diff), 30, RoundingMode.HALF_UP);
+
+ for (RollingAverage rollingAverage : this.averages) {
+ rollingAverage.add(currentTps, diff);
+ }
+
+ this.last = now;
+ }
+
+ public RollingAverage avg5Sec() {
+ return this.tps5S;
+ }
+
+ public RollingAverage avg10Sec() {
+ return this.tps10S;
+ }
+
+ public RollingAverage avg1Min() {
+ return this.tps1M;
+ }
+
+ public RollingAverage avg5Min() {
+ return this.tps5M;
+ }
+
+ public RollingAverage avg15Min() {
+ return this.tps15M;
+ }
+
+ public String toFormattedString() {
+ return formatTps(this.tps5S.getAverage()) + ", " +
+ formatTps(this.tps10S.getAverage()) + ", " +
+ formatTps(this.tps1M.getAverage()) + ", " +
+ formatTps(this.tps5M.getAverage()) + ", " +
+ formatTps(this.tps15M.getAverage());
+ }
+
+ public static String formatTps(double tps) {
+ StringBuilder sb = new StringBuilder();
+ if (tps > 18.0) {
+ sb.append("&a");
+ } else if (tps > 16.0) {
+ sb.append("&e");
+ } else {
+ sb.append("&c");
+ }
+ if (tps > 20.0) {
+ sb.append('*');
+ }
+ return sb.append(Math.min(Math.round(tps * 100.0) / 100.0, 20.0)).toString();
+ }
+
+ /**
+ * Rolling average calculator taken.
+ *
+ * <p>This code is taken from PaperMC/Paper, licensed under MIT.</p>
+ *
+ * @author aikar (PaperMC) https://github.com/PaperMC/Paper/blob/master/Spigot-Server-Patches/0021-Further-improve-server-tick-loop.patch
+ */
+ public static final class RollingAverage {
+ private final int size;
+ private long time;
+ private BigDecimal total;
+ private int index = 0;
+ private final BigDecimal[] samples;
+ private final long[] times;
+
+ RollingAverage(int size) {
+ this.size = size;
+ this.time = size * SEC_IN_NANO;
+ this.total = new BigDecimal((long) TPS).multiply(new BigDecimal(SEC_IN_NANO)).multiply(new BigDecimal((long) size));
+ this.samples = new BigDecimal[size];
+ this.times = new long[size];
+ for (int i = 0; i < size; i++) {
+ this.samples[i] = new BigDecimal((long) TPS);
+ this.times[i] = SEC_IN_NANO;
+ }
+ }
+
+ public void add(BigDecimal x, long t) {
+ this.time -= this.times[this.index];
+ this.total = this.total.subtract(this.samples[this.index].multiply(new BigDecimal(this.times[this.index])));
+ this.samples[this.index] = x;
+ this.times[this.index] = t;
+ this.time += t;
+ this.total = this.total.add(x.multiply(new BigDecimal(t)));
+ if (++this.index == this.size) {
+ this.index = 0;
+ }
+ }
+
+ public double getAverage() {
+ return this.total.divide(new BigDecimal(this.time), 30, RoundingMode.HALF_UP).doubleValue();
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
new file mode 100644
index 0000000..d504247
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -0,0 +1,200 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (C) Albert Pham <http://www.sk89q.com>
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.google.gson.stream.JsonWriter;
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.aggregator.SimpleDataAggregator;
+import me.lucko.spark.common.sampler.aggregator.TickedDataAggregator;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.management.ManagementFactory;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Main sampler class.
+ */
+public class Sampler implements Runnable {
+ private static final AtomicInteger THREAD_ID = new AtomicInteger(0);
+
+ /** The worker pool for inserting stack nodes */
+ private final ScheduledExecutorService workerPool = Executors.newScheduledThreadPool(
+ 6, new ThreadFactoryBuilder().setNameFormat("spark-worker-" + THREAD_ID.getAndIncrement() + "-%d").build()
+ );
+
+ /** The main sampling task */
+ private ScheduledFuture<?> task;
+
+ /** The thread management interface for the current JVM */
+ private final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
+ /** The instance used to generate thread information for use in sampling */
+ private final ThreadDumper threadDumper;
+ /** Responsible for aggregating and then outputting collected sampling data */
+ private final DataAggregator dataAggregator;
+
+ /** A future to encapsulation the completion of this sampler instance */
+ private final CompletableFuture<Sampler> future = new CompletableFuture<>();
+
+ /** The interval to wait between sampling, in microseconds */
+ private final int interval;
+ /** The time when sampling first began */
+ private long startTime = -1;
+ /** The unix timestamp (in millis) when this sampler should automatically complete.*/
+ private final long endTime; // -1 for nothing
+
+ public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean includeLineNumbers) {
+ this.threadDumper = threadDumper;
+ this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, includeLineNumbers);
+ this.interval = interval;
+ this.endTime = endTime;
+ }
+
+ public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean includeLineNumbers, TickCounter tickCounter, int tickLengthThreshold) {
+ this.threadDumper = threadDumper;
+ this.dataAggregator = new TickedDataAggregator(this.workerPool, tickCounter, threadGrouper, interval, includeLineNumbers, tickLengthThreshold);
+ this.interval = interval;
+ this.endTime = endTime;
+ }
+
+ /**
+ * Starts the sampler.
+ */
+ public void start() {
+ this.startTime = System.currentTimeMillis();
+ this.dataAggregator.start();
+ this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS);
+ }
+
+ public long getStartTime() {
+ if (this.startTime == -1) {
+ throw new IllegalStateException("Not yet started");
+ }
+ return this.startTime;
+ }
+
+ public long getEndTime() {
+ return this.endTime;
+ }
+
+ public CompletableFuture<Sampler> getFuture() {
+ return this.future;
+ }
+
+ public void cancel() {
+ this.task.cancel(false);
+ }
+
+ @Override
+ public void run() {
+ // this is effectively synchronized, the worker pool will not allow this task
+ // to concurrently execute.
+ try {
+ if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) {
+ this.future.complete(this);
+ cancel();
+ return;
+ }
+
+ ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean);
+ this.workerPool.execute(new InsertDataTask(this.dataAggregator, threadDumps));
+ } catch (Throwable t) {
+ this.future.completeExceptionally(t);
+ cancel();
+ }
+ }
+
+ private static final class InsertDataTask implements Runnable {
+ private final DataAggregator dataAggregator;
+ private final ThreadInfo[] threadDumps;
+
+ InsertDataTask(DataAggregator dataAggregator, ThreadInfo[] threadDumps) {
+ this.dataAggregator = dataAggregator;
+ this.threadDumps = threadDumps;
+ }
+
+ @Override
+ public void run() {
+ for (ThreadInfo threadInfo : this.threadDumps) {
+ String threadName = threadInfo.getThreadName();
+ StackTraceElement[] stack = threadInfo.getStackTrace();
+
+ if (threadName == null || stack == null) {
+ continue;
+ }
+
+ this.dataAggregator.insertData(threadName, stack);
+ }
+ }
+ }
+
+ private void writeOutput(JsonWriter writer) throws IOException {
+ writer.beginObject();
+
+ writer.name("type").value("sampler");
+ writer.name("threads").beginArray();
+
+ List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
+ data.sort(Map.Entry.comparingByKey());
+
+ for (Map.Entry<String, ThreadNode> entry : data) {
+ writer.beginObject();
+ writer.name("threadName").value(entry.getKey());
+ writer.name("totalTime").value(entry.getValue().getTotalTime());
+ writer.name("rootNode");
+ entry.getValue().serializeTo(writer);
+ writer.endObject();
+ }
+
+ writer.endArray();
+ writer.endObject();
+ }
+
+ public byte[] formCompressedDataPayload() {
+ ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
+ try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) {
+ try (JsonWriter jsonWriter = new JsonWriter(writer)) {
+ writeOutput(jsonWriter);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ return byteOut.toByteArray();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
new file mode 100644
index 0000000..4ce69df
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -0,0 +1,90 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Builds {@link Sampler} instances.
+ */
+public class SamplerBuilder {
+
+ private double samplingInterval = 4; // milliseconds
+ private boolean includeLineNumbers = false;
+ private long timeout = -1;
+ private ThreadDumper threadDumper = ThreadDumper.ALL;
+ private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME;
+
+ private int ticksOver = -1;
+ private TickCounter tickCounter = null;
+
+ public SamplerBuilder() {
+ }
+
+ public SamplerBuilder samplingInterval(double samplingInterval) {
+ this.samplingInterval = samplingInterval;
+ return this;
+ }
+
+ public SamplerBuilder completeAfter(long timeout, TimeUnit unit) {
+ if (timeout <= 0) {
+ throw new IllegalArgumentException("timeout > 0");
+ }
+ this.timeout = System.currentTimeMillis() + unit.toMillis(timeout);
+ return this;
+ }
+
+ public SamplerBuilder threadDumper(ThreadDumper threadDumper) {
+ this.threadDumper = threadDumper;
+ return this;
+ }
+
+ public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) {
+ this.threadGrouper = threadGrouper;
+ return this;
+ }
+
+ public SamplerBuilder ticksOver(int ticksOver, TickCounter tickCounter) {
+ this.ticksOver = ticksOver;
+ this.tickCounter = tickCounter;
+ return this;
+ }
+
+ public SamplerBuilder includeLineNumbers(boolean includeLineNumbers) {
+ this.includeLineNumbers = includeLineNumbers;
+ return this;
+ }
+
+ public Sampler start() {
+ Sampler sampler;
+
+ int intervalMicros = (int) (this.samplingInterval * 1000d);
+ if (this.ticksOver != -1 && this.tickCounter != null) {
+ sampler = new Sampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.includeLineNumbers, this.tickCounter, this.ticksOver);
+ } else {
+ sampler = new Sampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.includeLineNumbers);
+ }
+
+ sampler.start();
+ return sampler;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
new file mode 100644
index 0000000..14938ac
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -0,0 +1,126 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (C) Albert Pham <http://www.sk89q.com>
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import me.lucko.spark.common.util.ThreadFinder;
+
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.stream.Collectors;
+
+/**
+ * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being
+ * sampled.
+ */
+@FunctionalInterface
+public interface ThreadDumper {
+
+ /**
+ * Generates {@link ThreadInfo} data for the sampled threads.
+ *
+ * @param threadBean the thread bean instance to obtain the data from
+ * @return an array of generated thread info instances
+ */
+ ThreadInfo[] dumpThreads(ThreadMXBean threadBean);
+
+ /**
+ * Implementation of {@link ThreadDumper} that generates data for all threads.
+ */
+ ThreadDumper ALL = threadBean -> threadBean.dumpAllThreads(false, false);
+
+ /**
+ * Implementation of {@link ThreadDumper} that generates data for a specific set of threads.
+ */
+ final class Specific implements ThreadDumper {
+ private final ThreadFinder threadFinder = new ThreadFinder();
+ private final long[] ids;
+
+ public Specific(long[] ids) {
+ this.ids = ids;
+ }
+
+ public Specific(Set<String> names) {
+ Set<String> namesLower = names.stream().map(String::toLowerCase).collect(Collectors.toSet());
+ this.ids = this.threadFinder.getThreads()
+ .filter(t -> namesLower.contains(t.getName().toLowerCase()))
+ .mapToLong(Thread::getId)
+ .toArray();
+ }
+
+ @Override
+ public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) {
+ return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE);
+ }
+ }
+
+ /**
+ * Implementation of {@link ThreadDumper} that generates data for a regex matched set of threads.
+ */
+ final class Regex implements ThreadDumper {
+ private final ThreadFinder threadFinder = new ThreadFinder();
+ private final Set<Pattern> namePatterns;
+ private final Map<Long, Boolean> cache = new HashMap<>();
+
+ public Regex(Set<String> namePatterns) {
+ this.namePatterns = namePatterns.stream()
+ .map(regex -> {
+ try {
+ return Pattern.compile(regex);
+ } catch (PatternSyntaxException e) {
+ return null;
+ }
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) {
+ return this.threadFinder.getThreads()
+ .filter(thread -> {
+ Boolean result = this.cache.get(thread.getId());
+ if (result != null) {
+ return result;
+ }
+
+ for (Pattern pattern : this.namePatterns) {
+ if (pattern.matcher(thread.getName()).matches()) {
+ this.cache.put(thread.getId(), true);
+ return true;
+ }
+ }
+ this.cache.put(thread.getId(), false);
+ return false;
+ })
+ .map(thread -> threadBean.getThreadInfo(thread.getId()))
+ .filter(Objects::nonNull)
+ .toArray(ThreadInfo[]::new);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
new file mode 100644
index 0000000..f53800a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -0,0 +1,72 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Function for grouping threads together
+ */
+@FunctionalInterface
+public interface ThreadGrouper {
+
+ /**
+ * Gets the group for the given thread.
+ *
+ * @param threadName the name of the thread
+ * @return the group
+ */
+ String getGroup(String threadName);
+
+ /**
+ * Implementation of {@link ThreadGrouper} that just groups by thread name.
+ */
+ ThreadGrouper BY_NAME = threadName -> threadName;
+
+ /**
+ * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool
+ * the thread originated from.
+ *
+ * <p>The regex pattern used to match pools expects a digit at the end of the thread name,
+ * separated from the pool name with any of one or more of ' ', '-', or '#'.</p>
+ */
+ ThreadGrouper BY_POOL = new ThreadGrouper() {
+ private final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+
+ @Override
+ public String getGroup(String threadName) {
+ Matcher matcher = this.pattern.matcher(threadName);
+ if (!matcher.matches()) {
+ return threadName;
+ }
+
+ return matcher.group(1).trim() + " (Combined)";
+ }
+ };
+
+ /**
+ * Implementation of {@link ThreadGrouper} which groups all threads as one, under
+ * the name "All".
+ */
+ ThreadGrouper AS_ONE = threadName -> "All";
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java
new file mode 100644
index 0000000..aa839ba
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java
@@ -0,0 +1,64 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+/**
+ * A hook with the game's "tick loop".
+ */
+public interface TickCounter extends AutoCloseable {
+
+ /**
+ * Starts the counter
+ */
+ void start();
+
+ /**
+ * Stops the counter
+ */
+ @Override
+ void close();
+
+ /**
+ * Gets the current tick number
+ *
+ * @return the current tick
+ */
+ int getCurrentTick();
+
+ /**
+ * Adds a task to be called each time the tick increments
+ *
+ * @param runnable the task
+ */
+ void addTickTask(TickTask runnable);
+
+ /**
+ * Removes a tick task
+ *
+ * @param runnable the task
+ */
+ void removeTickTask(TickTask runnable);
+
+ interface TickTask {
+ void onTick(TickCounter counter);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
new file mode 100644
index 0000000..8c65c2d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -0,0 +1,54 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.aggregator;
+
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.util.Map;
+
+/**
+ * Aggregates sampling data.
+ */
+public interface DataAggregator {
+
+ /**
+ * Called before the sampler begins to insert data
+ */
+ default void start() {
+
+ }
+
+ /**
+ * Forms the output data
+ *
+ * @return the output data
+ */
+ Map<String, ThreadNode> getData();
+
+ /**
+ * Inserts sampling data into this aggregator
+ *
+ * @param threadName the name of the thread
+ * @param stack the call stack
+ */
+ void insertData(String threadName, StackTraceElement[] stack);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java
new file mode 100644
index 0000000..8fbd03f
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java
@@ -0,0 +1,82 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.aggregator;
+
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.node.AbstractNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Basic implementation of {@link DataAggregator}.
+ */
+public class SimpleDataAggregator implements DataAggregator {
+
+ /** A map of root stack nodes for each thread with sampling data */
+ private final Map<String, ThreadNode> threadData = new ConcurrentHashMap<>();
+
+ /** The worker pool used for sampling */
+ private final ExecutorService workerPool;
+
+ /** The instance used to group threads together */
+ private final ThreadGrouper threadGrouper;
+
+ /** The interval to wait between sampling, in microseconds */
+ private final int interval;
+
+ /** If line numbers should be included in the output */
+ private final boolean includeLineNumbers;
+
+ public SimpleDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean includeLineNumbers) {
+ this.workerPool = workerPool;
+ this.threadGrouper = threadGrouper;
+ this.interval = interval;
+ this.includeLineNumbers = includeLineNumbers;
+ }
+
+ @Override
+ public void insertData(String threadName, StackTraceElement[] stack) {
+ try {
+ String group = this.threadGrouper.getGroup(threadName);
+ AbstractNode node = this.threadData.computeIfAbsent(group, ThreadNode::new);
+ node.log(stack, this.interval, this.includeLineNumbers);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public Map<String, ThreadNode> getData() {
+ // wait for all pending data to be inserted
+ this.workerPool.shutdown();
+ try {
+ this.workerPool.awaitTermination(15, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ return this.threadData;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java
new file mode 100644
index 0000000..8f8124b
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java
@@ -0,0 +1,184 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.aggregator;
+
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.TickCounter;
+import me.lucko.spark.common.sampler.node.AbstractNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks"
+ * which exceed a certain threshold in duration.
+ */
+public class TickedDataAggregator implements DataAggregator {
+
+ /** A map of root stack nodes for each thread with sampling data */
+ private final Map<String, ThreadNode> threadData = new ConcurrentHashMap<>();
+
+ /** The worker pool for inserting stack nodes */
+ private final ExecutorService workerPool;
+
+ /** Used to monitor the current "tick" of the server */
+ private final TickCounter tickCounter;
+
+ /** The instance used to group threads together */
+ private final ThreadGrouper threadGrouper;
+
+ /** The interval to wait between sampling, in microseconds */
+ private final int interval;
+
+ /** If line numbers should be included in the output */
+ private final boolean includeLineNumbers;
+
+ /** Tick durations under this threshold will not be inserted, measured in microseconds */
+ private final long tickLengthThreshold;
+
+ /** The expected number of samples in each tick */
+ private final int expectedSize;
+
+ private final Object mutex = new Object();
+
+ // state
+ private int currentTick = -1;
+ private TickList currentData = new TickList(0);
+
+ public TickedDataAggregator(ExecutorService workerPool, TickCounter tickCounter, ThreadGrouper threadGrouper, int interval, boolean includeLineNumbers, int tickLengthThreshold) {
+ this.workerPool = workerPool;
+ this.tickCounter = tickCounter;
+ this.threadGrouper = threadGrouper;
+ this.interval = interval;
+ this.includeLineNumbers = includeLineNumbers;
+ this.tickLengthThreshold = TimeUnit.MILLISECONDS.toMicros(tickLengthThreshold);
+ // 50 millis in a tick, plus 10 so we have a bit of room to go over
+ double intervalMilliseconds = interval / 1000d;
+ this.expectedSize = (int) ((50 / intervalMilliseconds) + 10);
+ }
+
+ @Override
+ public void insertData(String threadName, StackTraceElement[] stack) {
+ synchronized (this.mutex) {
+ int tick = this.tickCounter.getCurrentTick();
+ if (this.currentTick != tick) {
+ pushCurrentTick();
+ this.currentTick = tick;
+ this.currentData = new TickList(this.expectedSize);
+ }
+
+ // form the queued data
+ QueuedThreadInfo queuedData = new QueuedThreadInfo(threadName, stack);
+ // insert it
+ this.currentData.addData(queuedData);
+ }
+ }
+
+ // guarded by 'mutex'
+ private void pushCurrentTick() {
+ TickList currentData = this.currentData;
+
+ // approximate how long the tick lasted
+ int tickLengthMicros = currentData.getList().size() * this.interval;
+
+ // don't push data below the threshold
+ if (tickLengthMicros < this.tickLengthThreshold) {
+ return;
+ }
+
+ this.workerPool.submit(currentData);
+ }
+
+ @Override
+ public void start() {
+ this.tickCounter.start();
+ }
+
+ @Override
+ public Map<String, ThreadNode> getData() {
+ // push the current tick
+ synchronized (this.mutex) {
+ pushCurrentTick();
+ }
+
+ // close the tick counter
+ this.tickCounter.close();
+
+ // wait for all pending data to be inserted
+ this.workerPool.shutdown();
+ try {
+ this.workerPool.awaitTermination(15, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ return this.threadData;
+ }
+
+ // called by TickList
+ void insertData(List<QueuedThreadInfo> dataList) {
+ for (QueuedThreadInfo data : dataList) {
+ try {
+ String group = this.threadGrouper.getGroup(data.threadName);
+ AbstractNode node = this.threadData.computeIfAbsent(group, ThreadNode::new);
+ node.log(data.stack, this.interval, this.includeLineNumbers);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private final class TickList implements Runnable {
+ private final List<QueuedThreadInfo> list;
+
+ TickList(int expectedSize) {
+ this.list = new ArrayList<>(expectedSize);
+ }
+
+ @Override
+ public void run() {
+ insertData(this.list);
+ }
+
+ public List<QueuedThreadInfo> getList() {
+ return this.list;
+ }
+
+ public void addData(QueuedThreadInfo data) {
+ this.list.add(data);
+ }
+ }
+
+ private static final class QueuedThreadInfo {
+ private final String threadName;
+ private final StackTraceElement[] stack;
+
+ QueuedThreadInfo(String threadName, StackTraceElement[] stack) {
+ this.threadName = threadName;
+ this.stack = stack;
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
new file mode 100644
index 0000000..5cfc0f2
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -0,0 +1,143 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (C) Albert Pham <http://www.sk89q.com>
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.node;
+
+import com.google.gson.stream.JsonWriter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.LongAdder;
+
+/**
+ * Encapsulates a timed node in the sampling stack.
+ */
+public abstract class AbstractNode {
+
+ private static final int MAX_STACK_DEPTH = 300;
+
+ /**
+ * A map of this nodes children
+ */
+ private final Map<String, StackTraceNode> children = new ConcurrentHashMap<>();
+
+ /**
+ * The accumulated sample time for this node, measured in microseconds
+ */
+ private final LongAdder totalTime = new LongAdder();
+
+ /**
+ * Returns the total sample time for this node in milliseconds.
+ *
+ * @return the total time
+ */
+ public long getTotalTime() {
+ long millis = TimeUnit.MICROSECONDS.toMillis(this.totalTime.longValue());
+ if (millis == 0) {
+ return 1;
+ }
+ return millis;
+ }
+
+ private AbstractNode resolveChild(String className, String methodName, int lineNumber) {
+ return this.children.computeIfAbsent(
+ StackTraceNode.generateKey(className, methodName, lineNumber),
+ name -> new StackTraceNode(className, methodName, lineNumber)
+ );
+ }
+
+ public void log(StackTraceElement[] elements, long time, boolean includeLineNumbers) {
+ log(elements, 0, time, includeLineNumbers);
+ }
+
+ private void log(StackTraceElement[] elements, int offset, long time, boolean includeLineNumbers) {
+ this.totalTime.add(time);
+
+ if (offset >= MAX_STACK_DEPTH) {
+ return;
+ }
+
+ if (elements.length - offset == 0) {
+ return;
+ }
+
+ // the first element in the array is the top of the call stack, and the last is the root
+ // offset starts at 0.
+
+ // pointer is determined by subtracting the offset from the index of the last element
+ int pointer = (elements.length - 1) - offset;
+ StackTraceElement element = elements[pointer];
+
+ // the parent stack element is located at pointer+1.
+ // when the current offset is 0, we know the current pointer is at the last element in the
+ // array (the root) and therefore there is no parent.
+ StackTraceElement parent = offset == 0 ? null : elements[pointer + 1];
+
+ // get the line number of the parent element - the line which called "us"
+ int lineNumber = parent == null || !includeLineNumbers ? StackTraceNode.NULL_LINE_NUMBER : parent.getLineNumber();
+
+ // resolve a child element within the structure for the element at pointer
+ AbstractNode child = resolveChild(element.getClassName(), element.getMethodName(), lineNumber);
+ // call the log method on the found child, with an incremented offset.
+ child.log(elements, offset + 1, time, includeLineNumbers);
+ }
+
+ private Collection<? extends AbstractNode> getChildren() {
+ if (this.children.isEmpty()) {
+ return Collections.emptyList();
+ }
+
+ List<StackTraceNode> list = new ArrayList<>(this.children.values());
+ list.sort(null);
+ return list;
+ }
+
+ public void serializeTo(JsonWriter writer) throws IOException {
+ writer.beginObject();
+
+ // append metadata about this node
+ appendMetadata(writer);
+
+ // include the total time recorded for this node
+ writer.name("t").value(getTotalTime());
+
+ // append child nodes, if any are present
+ Collection<? extends AbstractNode> childNodes = getChildren();
+ if (!childNodes.isEmpty()) {
+ writer.name("c").beginArray();
+ for (AbstractNode child : childNodes) {
+ child.serializeTo(writer);
+ }
+ writer.endArray();
+ }
+
+ writer.endObject();
+ }
+
+ protected abstract void appendMetadata(JsonWriter writer) throws IOException;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
new file mode 100644
index 0000000..c4e7ac4
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -0,0 +1,86 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (C) Albert Pham <http://www.sk89q.com>
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.node;
+
+import com.google.gson.stream.JsonWriter;
+
+import java.io.IOException;
+
+/**
+ * Represents a stack trace element within the {@link AbstractNode node} structure.
+ */
+public final class StackTraceNode extends AbstractNode implements Comparable<StackTraceNode> {
+
+ /**
+ * Magic number to denote "no present" line number for a node.
+ */
+ public static final int NULL_LINE_NUMBER = -1;
+
+ /**
+ * Forms a key to represent the given node.
+ *
+ * @param className the name of the class
+ * @param methodName the name of the method
+ * @param lineNumber the line number of the parent method call
+ * @return the key
+ */
+ static String generateKey(String className, String methodName, int lineNumber) {
+ return className + "." + methodName + "." + lineNumber;
+ }
+
+ /** The name of the class */
+ private final String className;
+ /** The name of the method */
+ private final String methodName;
+ /** The line number of the invocation which created this node */
+ private final int lineNumber;
+
+ public StackTraceNode(String className, String methodName, int lineNumber) {
+ this.className = className;
+ this.methodName = methodName;
+ this.lineNumber = lineNumber;
+ }
+
+ @Override
+ protected void appendMetadata(JsonWriter writer) throws IOException {
+ writer.name("cl").value(this.className);
+ writer.name("m").value(this.methodName);
+ if (this.lineNumber >= 0) {
+ writer.name("ln").value(this.lineNumber);
+ }
+ }
+
+ private String key() {
+ return generateKey(this.className, this.methodName, this.lineNumber);
+ }
+
+ @Override
+ public int compareTo(StackTraceNode that) {
+ int i = -Long.compare(this.getTotalTime(), that.getTotalTime());
+ if (i != 0) {
+ return i;
+ }
+
+ return this.key().compareTo(that.key());
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
new file mode 100644
index 0000000..4e8714c
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.node;
+
+import com.google.gson.stream.JsonWriter;
+
+import java.io.IOException;
+
+/**
+ * The root of a sampling stack for a given thread / thread group.
+ */
+public final class ThreadNode extends AbstractNode {
+
+ /**
+ * The name of this thread
+ */
+ private final String threadName;
+
+ public ThreadNode(String threadName) {
+ this.threadName = threadName;
+ }
+
+ protected void appendMetadata(JsonWriter writer) throws IOException {
+ writer.name("name").value(this.threadName);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
new file mode 100644
index 0000000..1ff169d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
@@ -0,0 +1,45 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.Response;
+
+import java.io.IOException;
+
+public class AbstractHttpClient {
+
+ /** The http client */
+ protected final OkHttpClient okHttp;
+
+ public AbstractHttpClient(OkHttpClient okHttp) {
+ this.okHttp = okHttp;
+ }
+
+ protected Response makeHttpRequest(Request request) throws IOException {
+ Response response = this.okHttp.newCall(request).execute();
+ if (!response.isSuccessful()) {
+ throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
+ }
+ return response;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
new file mode 100644
index 0000000..ff8f4e3
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -0,0 +1,149 @@
+/*
+ * This file is part of bytebin, licensed under the MIT License.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package me.lucko.spark.common.util;
+
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
+import java.io.IOException;
+
+/**
+ * Utility for posting content to bytebin.
+ */
+public class BytebinClient extends AbstractHttpClient {
+
+ /** The bytebin URL */
+ private final String url;
+ /** The client user agent */
+ private final String userAgent;
+
+ /**
+ * Creates a new bytebin instance
+ *
+ * @param url the bytebin url
+ * @param userAgent the client user agent string
+ */
+ public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) {
+ super(okHttpClient);
+ if (url.endsWith("/")) {
+ this.url = url;
+ } else {
+ this.url = url + "/";
+ }
+ this.userAgent = userAgent;
+ }
+
+ /**
+ * POSTs GZIP compressed content to bytebin.
+ *
+ * @param buf the compressed content
+ * @param contentType the type of the content
+ * @param allowModification if the paste should be modifiable
+ * @return the key of the resultant content
+ * @throws IOException if an error occurs
+ */
+ public Content postContent(byte[] buf, MediaType contentType, boolean allowModification) throws IOException {
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url + "post")
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip");
+
+ if (allowModification) {
+ requestBuilder.header("Allow-Modification", "true");
+ }
+
+ Request request = requestBuilder.post(body).build();
+ try (Response response = makeHttpRequest(request)) {
+ String key = response.header("Location");
+ if (key == null) {
+ throw new IllegalStateException("Key not returned");
+ }
+
+ if (allowModification) {
+ String modificationKey = response.header("Modification-Key");
+ if (modificationKey == null) {
+ throw new IllegalStateException("Modification key not returned");
+ }
+ return new Content(key, modificationKey);
+ } else {
+ return new Content(key);
+ }
+ }
+ }
+
+ /**
+ * PUTs modified GZIP compressed content to bytebin in place of existing content.
+ *
+ * @param existingContent the existing content
+ * @param buf the compressed content to put
+ * @param contentType the type of the content
+ * @throws IOException if an error occurs
+ */
+ public void modifyContent(Content existingContent, byte[] buf, MediaType contentType) throws IOException {
+ if (!existingContent.modifiable) {
+ throw new IllegalArgumentException("Existing content is not modifiable");
+ }
+
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url + existingContent.key())
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip")
+ .header("Modification-Key", existingContent.modificationKey);
+
+ Request request = requestBuilder.put(body).build();
+ makeHttpRequest(request).close();
+ }
+
+ public static final class Content {
+ private final String key;
+ private final boolean modifiable;
+ private final String modificationKey;
+
+ Content(String key) {
+ this.key = key;
+ this.modifiable = false;
+ this.modificationKey = null;
+ }
+
+ Content(String key, String modificationKey) {
+ this.key = key;
+ this.modifiable = true;
+ this.modificationKey = modificationKey;
+ }
+
+ public String key() {
+ return this.key;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java
new file mode 100644
index 0000000..cc0722a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java
@@ -0,0 +1,60 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import java.util.Arrays;
+import java.util.Objects;
+import java.util.stream.Stream;
+
+/**
+ * Utility to find active threads.
+ */
+public final class ThreadFinder {
+
+ private static final ThreadGroup ROOT_THREAD_GROUP;
+ static {
+ ThreadGroup rootGroup = Thread.currentThread().getThreadGroup();
+ ThreadGroup parentGroup;
+ while ((parentGroup = rootGroup.getParent()) != null) {
+ rootGroup = parentGroup;
+ }
+ ROOT_THREAD_GROUP = rootGroup;
+ }
+
+ // cache the approx active count at the time of construction.
+ // the usages of this class are likely to be somewhat short-lived, so it's good
+ // enough to just cache a value on init.
+ private final int approxActiveCount = ROOT_THREAD_GROUP.activeCount();
+
+ /**
+ * Gets a stream of all known active threads.
+ *
+ * @return a stream of threads
+ */
+ public Stream<Thread> getThreads() {
+ Thread[] threads = new Thread[this.approxActiveCount + 20]; // +20 to allow a bit of growth for newly created threads
+ while (ROOT_THREAD_GROUP.enumerate(threads, true) == threads.length) {
+ threads = new Thread[threads.length * 2];
+ }
+ return Arrays.stream(threads).filter(Objects::nonNull);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java b/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java
new file mode 100644
index 0000000..a232e77
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java
@@ -0,0 +1,81 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+/**
+ * Utilities for working with Java type descriptors.
+ */
+public final class TypeDescriptors {
+
+ /**
+ * Returns the Java type corresponding to the given type descriptor.
+ *
+ * @param typeDescriptor a type descriptor.
+ * @return the Java type corresponding to the given type descriptor.
+ */
+ public static String getJavaType(String typeDescriptor) {
+ return getJavaType(typeDescriptor.toCharArray(), 0);
+ }
+
+ private static String getJavaType(char[] buf, int offset) {
+ int len;
+ switch (buf[offset]) {
+ case 'V':
+ return "void";
+ case 'Z':
+ return "boolean";
+ case 'C':
+ return "char";
+ case 'B':
+ return "byte";
+ case 'S':
+ return "short";
+ case 'I':
+ return "int";
+ case 'F':
+ return "float";
+ case 'J':
+ return "long";
+ case 'D':
+ return "double";
+ case '[': // array
+ len = 1;
+ while (buf[offset + len] == '[') {
+ len++;
+ }
+
+ StringBuilder sb = new StringBuilder(getJavaType(buf, offset + len));
+ for (int i = len; i > 0; --i) {
+ sb.append("[]");
+ }
+ return sb.toString();
+ case 'L': // object
+ len = 1;
+ while (buf[offset + len] != ';') {
+ len++;
+ }
+ return new String(buf, offset + 1, len - 1);
+ default:
+ return new String(buf, offset, buf.length - offset);
+ }
+ }
+
+}