aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2019-04-16 21:37:59 +0100
committerLuck <git@lucko.me>2019-04-16 21:37:59 +0100
commitecd4cec8545460a4fc4ca65b911c2503a00cd8e7 (patch)
tree62067383a1044abc3a09724e89c6e7c619e87ec0
parent8a61b404848ed8e3c27f06eb73239d37d4273240 (diff)
downloadspark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.tar.gz
spark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.tar.bz2
spark-ecd4cec8545460a4fc4ca65b911c2503a00cd8e7.zip
Lots of refactoring, add tps command
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java13
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java129
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java126
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java105
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java49
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Command.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java120
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java79
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java (renamed from spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java)9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java (renamed from spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java)12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java (renamed from spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java)35
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java (renamed from spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java)26
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java170
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java)11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java)4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java)10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java)4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java)8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java)10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java (renamed from spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java45
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java149
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java (renamed from spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java (renamed from spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java93
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlugin.java (renamed from spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java)18
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlugin.java (renamed from spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java)28
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlugin.java (renamed from spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java)52
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickCounter.java13
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java4
-rw-r--r--spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java192
-rw-r--r--spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java18
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java151
40 files changed, 1159 insertions, 703 deletions
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java
index 3b95399..44a69cb 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java
@@ -20,8 +20,7 @@
package me.lucko.spark.bukkit;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.TickCounter;
import org.bukkit.plugin.Plugin;
import org.bukkit.scheduler.BukkitTask;
@@ -32,7 +31,7 @@ public class BukkitTickCounter implements TickCounter, Runnable {
private final Plugin plugin;
private BukkitTask task;
- private final Set<Runnable> tasks = new HashSet<>();
+ private final Set<TickTask> tasks = new HashSet<>();
private int tick = 0;
public BukkitTickCounter(Plugin plugin) {
@@ -41,8 +40,8 @@ public class BukkitTickCounter implements TickCounter, Runnable {
@Override
public void run() {
- for (Runnable r : this.tasks) {
- r.run();
+ for (TickTask r : this.tasks) {
+ r.onTick(this);
}
this.tick++;
}
@@ -63,12 +62,12 @@ public class BukkitTickCounter implements TickCounter, Runnable {
}
@Override
- public void addTickTask(Runnable runnable) {
+ public void addTickTask(TickTask runnable) {
this.tasks.add(runnable);
}
@Override
- public void removeTickTask(Runnable runnable) {
+ public void removeTickTask(TickTask runnable) {
this.tasks.remove(runnable);
}
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
index f1395c4..1d6160a 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
@@ -21,100 +21,97 @@
package me.lucko.spark.bukkit;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
-import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import java.nio.file.Path;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
-public class SparkBukkitPlugin extends JavaPlugin {
+public class SparkBukkitPlugin extends JavaPlugin implements SparkPlugin<CommandSender> {
- private final SparkPlatform<CommandSender> sparkPlatform = new SparkPlatform<CommandSender>() {
+ private final SparkPlatform<CommandSender> platform = new SparkPlatform<>(this);
- private String colorize(String message) {
- return ChatColor.translateAlternateColorCodes('&', message);
- }
+ @Override
+ public void onEnable() {
+ this.platform.enable();
+ }
- private void broadcast(String msg) {
- getServer().getConsoleSender().sendMessage(msg);
- for (Player player : getServer().getOnlinePlayers()) {
- if (player.hasPermission("spark")) {
- player.sendMessage(msg);
- }
- }
- }
+ @Override
+ public void onDisable() {
+ this.platform.disable();
+ }
- @Override
- public String getVersion() {
- return getDescription().getVersion();
+ @Override
+ public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
+ if (!sender.hasPermission("spark")) {
+ sender.sendMessage(ChatColor.RED + "You do not have permission to use this command.");
+ return true;
}
- @Override
- public Path getPluginFolder() {
- return getDataFolder().toPath();
- }
+ this.platform.executeCommand(sender, args);
+ return true;
+ }
- @Override
- public String getLabel() {
- return "spark";
+ @Override
+ public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) {
+ if (!sender.hasPermission("spark")) {
+ return Collections.emptyList();
}
+ return this.platform.tabCompleteCommand(sender, args);
+ }
- @Override
- public void sendMessage(CommandSender sender, String message) {
- sender.sendMessage(colorize(message));
- }
+ @Override
+ public String getVersion() {
+ return getDescription().getVersion();
+ }
- @Override
- public void sendMessage(String message) {
- String msg = colorize(message);
- broadcast(msg);
- }
+ @Override
+ public Path getPluginFolder() {
+ return getDataFolder().toPath();
+ }
- @Override
- public void sendLink(String url) {
- String msg = colorize("&7" + url);
- broadcast(msg);
- }
+ @Override
+ public String getLabel() {
+ return "spark";
+ }
- @Override
- public void runAsync(Runnable r) {
- getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r);
- }
+ @Override
+ public Set<CommandSender> getSenders() {
+ Set<CommandSender> senders = new HashSet<>(getServer().getOnlinePlayers());
+ senders.add(getServer().getConsoleSender());
+ return senders;
+ }
- @Override
- public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
- }
+ @Override
+ public void sendMessage(CommandSender sender, String message) {
+ sender.sendMessage(ChatColor.translateAlternateColorCodes('&', message));
+ }
- @Override
- public TickCounter newTickCounter() {
- return new BukkitTickCounter(SparkBukkitPlugin.this);
- }
- };
+ @Override
+ public void sendLink(CommandSender sender, String url) {
+ sendMessage(sender, "&7" + url);
+ }
@Override
- public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
- if (!sender.hasPermission("spark")) {
- sender.sendMessage(ChatColor.RED + "You do not have permission to use this command.");
- return true;
- }
+ public void runAsync(Runnable r) {
+ getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r);
+ }
- this.sparkPlatform.executeCommand(sender, args);
- return true;
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
}
@Override
- public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) {
- if (!sender.hasPermission("spark")) {
- return Collections.emptyList();
- }
- return this.sparkPlatform.tabCompleteCommand(sender, args);
+ public TickCounter createTickCounter() {
+ return new BukkitTickCounter(this);
}
}
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java
index 6d23683..5c39d34 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java
@@ -21,95 +21,93 @@
package me.lucko.spark.bungeecord;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
import net.md_5.bungee.api.ChatColor;
import net.md_5.bungee.api.CommandSender;
-import net.md_5.bungee.api.chat.BaseComponent;
import net.md_5.bungee.api.chat.ClickEvent;
import net.md_5.bungee.api.chat.TextComponent;
-import net.md_5.bungee.api.connection.ProxiedPlayer;
import net.md_5.bungee.api.plugin.Command;
import net.md_5.bungee.api.plugin.Plugin;
import net.md_5.bungee.api.plugin.TabExecutor;
import java.nio.file.Path;
import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
-public class SparkBungeeCordPlugin extends Plugin {
+public class SparkBungeeCordPlugin extends Plugin implements SparkPlugin<CommandSender> {
- private final SparkPlatform<CommandSender> sparkPlatform = new SparkPlatform<CommandSender>() {
- private BaseComponent[] colorize(String message) {
- return TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message));
- }
+ private final SparkPlatform<CommandSender> platform = new SparkPlatform<>(this);
- private void broadcast(BaseComponent... msg) {
- getProxy().getConsole().sendMessage(msg);
- for (ProxiedPlayer player : getProxy().getPlayers()) {
- if (player.hasPermission("spark")) {
- player.sendMessage(msg);
- }
- }
- }
+ @Override
+ public void onEnable() {
+ this.platform.enable();
+ getProxy().getPluginManager().registerCommand(this, new SparkCommand(this));
+ }
- @Override
- public String getVersion() {
- return getDescription().getVersion();
- }
+ @Override
+ public void onDisable() {
+ this.platform.disable();
+ }
- @Override
- public Path getPluginFolder() {
- return getDataFolder().toPath();
- }
+ @Override
+ public String getVersion() {
+ return getDescription().getVersion();
+ }
- @Override
- public String getLabel() {
- return "sparkb";
- }
+ @Override
+ public Path getPluginFolder() {
+ return getDataFolder().toPath();
+ }
- @Override
- public void sendMessage(CommandSender sender, String message) {
- sender.sendMessage(colorize(message));
- }
+ @Override
+ public String getLabel() {
+ return "sparkb";
+ }
- @Override
- public void sendMessage(String message) {
- broadcast(colorize(message));
- }
+ @Override
+ public Set<CommandSender> getSenders() {
+ Set<CommandSender> senders = new HashSet<>(getProxy().getPlayers());
+ senders.add(getProxy().getConsole());
+ return senders;
+ }
- @Override
- public void sendLink(String url) {
- TextComponent component = new TextComponent(url);
- component.setColor(ChatColor.GRAY);
- component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url));
- broadcast(component);
- }
+ @Override
+ public void sendMessage(CommandSender sender, String message) {
+ sender.sendMessage(TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message)));
+ }
- @Override
- public void runAsync(Runnable r) {
- getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r);
- }
+ @Override
+ public void sendLink(CommandSender sender, String url) {
+ TextComponent component = new TextComponent(url);
+ component.setColor(ChatColor.GRAY);
+ component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url));
+ sender.sendMessage(component);
+ }
- @Override
- public ThreadDumper getDefaultThreadDumper() {
- return ThreadDumper.ALL;
- }
+ @Override
+ public void runAsync(Runnable r) {
+ getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r);
+ }
- @Override
- public TickCounter newTickCounter() {
- throw new UnsupportedOperationException();
- }
- };
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return ThreadDumper.ALL;
+ }
@Override
- public void onEnable() {
- getProxy().getPluginManager().registerCommand(this, new SparkCommand());
+ public TickCounter createTickCounter() {
+ return null;
}
- private final class SparkCommand extends Command implements TabExecutor {
- public SparkCommand() {
+ private static final class SparkCommand extends Command implements TabExecutor {
+ private final SparkBungeeCordPlugin plugin;
+
+ SparkCommand(SparkBungeeCordPlugin plugin) {
super("sparkb", null, "sparkbungee");
+ this.plugin = plugin;
}
@Override
@@ -121,7 +119,7 @@ public class SparkBungeeCordPlugin extends Plugin {
return;
}
- SparkBungeeCordPlugin.this.sparkPlatform.executeCommand(sender, args);
+ this.plugin.platform.executeCommand(sender, args);
}
@Override
@@ -129,7 +127,7 @@ public class SparkBungeeCordPlugin extends Plugin {
if (!sender.hasPermission("spark")) {
return Collections.emptyList();
}
- return SparkBungeeCordPlugin.this.sparkPlatform.tabCompleteCommand(sender, args);
+ return this.plugin.platform.tabCompleteCommand(sender, args);
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index ef21d1c..8eb4565 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -21,19 +21,20 @@
package me.lucko.spark.common;
import com.google.common.collect.ImmutableList;
-
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.modules.MemoryModule;
+import me.lucko.spark.common.command.modules.MonitoringModule;
import me.lucko.spark.common.command.modules.SamplerModule;
import me.lucko.spark.common.command.modules.TickMonitoringModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-import me.lucko.spark.util.BytebinClient;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+import me.lucko.spark.common.sampler.TickCounter;
+import me.lucko.spark.common.util.BytebinClient;
+import okhttp3.OkHttpClient;
-import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -41,52 +42,68 @@ import java.util.List;
import java.util.stream.Collectors;
/**
- * Abstract command handling class used by all platforms.
+ * Abstract spark implementation used by all platforms.
*
* @param <S> the sender (e.g. CommandSender) type used by the platform
*/
-public abstract class SparkPlatform<S> {
+public class SparkPlatform<S> {
/** The URL of the viewer frontend */
public static final String VIEWER_URL = "https://sparkprofiler.github.io/#";
+ /** The shared okhttp client */
+ private static final OkHttpClient OK_HTTP_CLIENT = new OkHttpClient();
/** The bytebin instance used by the platform */
- public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient("https://bytebin.lucko.me/", "spark-plugin");
-
- /** The prefix used in all messages */
- private static final String PREFIX = "&8[&fspark&8] &7";
-
- private static <T> List<Command<T>> prepareCommands() {
- ImmutableList.Builder<Command<T>> builder = ImmutableList.builder();
- new SamplerModule<T>().registerCommands(builder::add);
- new TickMonitoringModule<T>().registerCommands(builder::add);
- new MemoryModule<T>().registerCommands(builder::add);
- return builder.build();
+ public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient(OK_HTTP_CLIENT, "https://bytebin.lucko.me/", "spark-plugin");
+
+ private final List<Command<S>> commands;
+ private final SparkPlugin<S> plugin;
+
+ private final TickCounter tickCounter;
+ private final TpsCalculator tpsCalculator;
+
+ public SparkPlatform(SparkPlugin<S> plugin) {
+ this.plugin = plugin;
+
+ ImmutableList.Builder<Command<S>> commandsBuilder = ImmutableList.builder();
+ new SamplerModule<S>().registerCommands(commandsBuilder::add);
+ new MonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new TickMonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new MemoryModule<S>().registerCommands(commandsBuilder::add);
+ this.commands = commandsBuilder.build();
+
+ this.tickCounter = plugin.createTickCounter();
+ this.tpsCalculator = this.tickCounter != null ? new TpsCalculator() : null;
+ }
+
+ public void enable() {
+ if (this.tickCounter != null) {
+ this.tickCounter.addTickTask(this.tpsCalculator);
+ this.tickCounter.start();
+ }
+ }
+
+ public void disable() {
+ if (this.tickCounter != null) {
+ this.tickCounter.close();
+ }
+ }
+
+ public SparkPlugin<S> getPlugin() {
+ return this.plugin;
}
- private final List<Command<S>> commands = prepareCommands();
-
- // abstract methods implemented by each platform
- public abstract String getVersion();
- public abstract Path getPluginFolder();
- public abstract String getLabel();
- public abstract void sendMessage(S sender, String message);
- public abstract void sendMessage(String message);
- public abstract void sendLink(String url);
- public abstract void runAsync(Runnable r);
- public abstract ThreadDumper getDefaultThreadDumper();
- public abstract TickCounter newTickCounter();
-
- public void sendPrefixedMessage(S sender, String message) {
- sendMessage(sender, PREFIX + message);
+ public TickCounter getTickCounter() {
+ return this.tickCounter;
}
- public void sendPrefixedMessage(String message) {
- sendMessage(PREFIX + message);
+ public TpsCalculator getTpsCalculator() {
+ return this.tpsCalculator;
}
public void executeCommand(S sender, String[] args) {
+ CommandResponseHandler<S> resp = new CommandResponseHandler<>(this, sender);
if (args.length == 0) {
- sendUsage(sender);
+ sendUsage(resp);
return;
}
@@ -96,15 +113,15 @@ public abstract class SparkPlatform<S> {
for (Command<S> command : this.commands) {
if (command.aliases().contains(alias)) {
try {
- command.executor().execute(this, sender, new Arguments(rawArgs));
+ command.executor().execute(this, sender, resp, new Arguments(rawArgs));
} catch (IllegalArgumentException e) {
- sendMessage(sender, "&c" + e.getMessage());
+ resp.replyPrefixed("&c" + e.getMessage());
}
return;
}
}
- sendUsage(sender);
+ sendUsage(resp);
}
public List<String> tabCompleteCommand(S sender, String[] args) {
@@ -127,15 +144,15 @@ public abstract class SparkPlatform<S> {
return Collections.emptyList();
}
- private void sendUsage(S sender) {
- sendPrefixedMessage(sender, "&fspark &7v" + getVersion());
+ private void sendUsage(CommandResponseHandler<S> sender) {
+ sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion());
for (Command<S> command : this.commands) {
- sendMessage(sender, "&b&l> &7/" + getLabel() + " " + command.aliases().get(0));
+ sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
for (Command.ArgumentInfo arg : command.arguments()) {
if (arg.requiresParameter()) {
- sendMessage(sender, " &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
+ sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
} else {
- sendMessage(sender, " &8[&7--" + arg.argumentName() + "]");
+ sender.reply(" &8[&7--" + arg.argumentName() + "]");
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
new file mode 100644
index 0000000..7a3a353
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -0,0 +1,49 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common;
+
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.nio.file.Path;
+import java.util.Set;
+
+public interface SparkPlugin<S> {
+
+ String getVersion();
+
+ Path getPluginFolder();
+
+ String getLabel();
+
+ Set<S> getSenders();
+
+ void sendMessage(S sender, String message);
+
+ void sendLink(S sender, String url);
+
+ void runAsync(Runnable r);
+
+ ThreadDumper getDefaultThreadDumper();
+
+ TickCounter createTickCounter();
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
index fb440b1..c9f6551 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
@@ -21,7 +21,6 @@
package me.lucko.spark.common.command;
import com.google.common.collect.ImmutableList;
-
import me.lucko.spark.common.SparkPlatform;
import java.util.Collections;
@@ -109,7 +108,7 @@ public class Command<S> {
@FunctionalInterface
public interface Executor<S> {
- void execute(SparkPlatform<S> platform, S sender, Arguments arguments);
+ void execute(SparkPlatform<S> platform, S sender, CommandResponseHandler resp, Arguments arguments);
}
@FunctionalInterface
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
new file mode 100644
index 0000000..a5a7391
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -0,0 +1,75 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command;
+
+import me.lucko.spark.common.SparkPlatform;
+
+import java.util.Set;
+import java.util.function.Consumer;
+
+public class CommandResponseHandler<S> {
+
+ /** The prefix used in all messages */
+ private static final String PREFIX = "&8[&fspark&8] &7";
+
+ private final SparkPlatform<S> platform;
+ private final S sender;
+
+ public CommandResponseHandler(SparkPlatform<S> platform, S sender) {
+ this.platform = platform;
+ this.sender = sender;
+ }
+
+ public S sender() {
+ return this.sender;
+ }
+
+ public void allSenders(Consumer<? super S> action) {
+ Set<S> senders = this.platform.getPlugin().getSenders();
+ senders.add(this.sender);
+ senders.forEach(action);
+ }
+
+ public void reply(String message) {
+ this.platform.getPlugin().sendMessage(this.sender, message);
+ }
+
+ public void broadcast(String message) {
+ allSenders(sender -> this.platform.getPlugin().sendMessage(sender, message));
+ }
+
+ public void replyPrefixed(String message) {
+ this.platform.getPlugin().sendMessage(this.sender, PREFIX + message);
+ }
+
+ public void broadcastPrefixed(String message) {
+ allSenders(sender -> this.platform.getPlugin().sendMessage(sender, PREFIX + message));
+ }
+
+ public void replyLink(String link) {
+ this.platform.getPlugin().sendLink(this.sender, link);
+ }
+
+ public void broadcastLink(String link) {
+ allSenders(sender -> this.platform.getPlugin().sendLink(sender, link));
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
index 5f17d54..2cb2e07 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
@@ -24,9 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.memory.HeapDump;
-import me.lucko.spark.memory.HeapDumpSummary;
-
+import me.lucko.spark.common.memory.HeapDump;
+import me.lucko.spark.common.memory.HeapDumpSummary;
import okhttp3.MediaType;
import java.io.IOException;
@@ -44,34 +43,34 @@ public class MemoryModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("heapsummary")
.argumentUsage("run-gc-before", null)
- .executor((platform, sender, arguments) -> {
- platform.runAsync(() -> {
- if (arguments.boolFlag("run-gc-before")) {
- platform.sendPrefixedMessage("&7Running garbage collector...");
- System.gc();
- }
-
- platform.sendPrefixedMessage("&7Creating a new heap dump summary, please wait...");
-
- HeapDumpSummary heapDump;
- try {
- heapDump = HeapDumpSummary.createNew();
- } catch (Exception e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap.");
- e.printStackTrace();
- return;
- }
-
- byte[] output = heapDump.formCompressedDataPayload();
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bHeap dump summmary output:");
- platform.sendLink(SparkPlatform.VIEWER_URL + key);
- } catch (IOException e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
- e.printStackTrace();
- }
- });
+ .executor((platform, sender, resp, arguments) -> {
+ platform.getPlugin().runAsync(() -> {
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed("&7Running garbage collector...");
+ System.gc();
+ }
+
+ resp.broadcastPrefixed("&7Creating a new heap dump summary, please wait...");
+
+ HeapDumpSummary heapDump;
+ try {
+ heapDump = HeapDumpSummary.createNew();
+ } catch (Exception e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst inspecting the heap.");
+ e.printStackTrace();
+ return;
+ }
+
+ byte[] output = heapDump.formCompressedDataPayload();
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
+ resp.broadcastPrefixed("&bHeap dump summmary output:");
+ resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
+ } catch (IOException e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst uploading the data.");
+ e.printStackTrace();
+ }
+ });
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before"))
.build()
@@ -81,35 +80,36 @@ public class MemoryModule<S> implements CommandModule<S> {
.aliases("heapdump")
.argumentUsage("run-gc-before", null)
.argumentUsage("include-non-live", null)
- .executor((platform, sender, arguments) -> {
- platform.runAsync(() -> {
- Path pluginFolder = platform.getPluginFolder();
- try {
- Files.createDirectories(pluginFolder);
- } catch (IOException e) {
- // ignore
- }
-
- Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
- boolean liveOnly = !arguments.boolFlag("include-non-live");
-
- if (arguments.boolFlag("run-gc-before")) {
- platform.sendPrefixedMessage("&7Running garbage collector...");
- System.gc();
- }
-
- platform.sendPrefixedMessage("&7Creating a new heap dump, please wait...");
-
- try {
- HeapDump.dumpHeap(file, liveOnly);
- } catch (Exception e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst creating a heap dump.");
- e.printStackTrace();
- return;
- }
-
- platform.sendPrefixedMessage("&bHeap dump written to: " + file.toString());
- });
+ .executor((platform, sender, resp, arguments) -> {
+ // ignore
+ platform.getPlugin().runAsync(() -> {
+ Path pluginFolder = platform.getPlugin().getPluginFolder();
+ try {
+ Files.createDirectories(pluginFolder);
+ } catch (IOException e) {
+ // ignore
+ }
+
+ Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
+ boolean liveOnly = !arguments.boolFlag("include-non-live");
+
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed("&7Running garbage collector...");
+ System.gc();
+ }
+
+ resp.broadcastPrefixed("&7Creating a new heap dump, please wait...");
+
+ try {
+ HeapDump.dumpHeap(file, liveOnly);
+ } catch (Exception e) {
+ resp.broadcastPrefixed("&cAn error occurred whilst creating a heap dump.");
+ e.printStackTrace();
+ return;
+ }
+
+ resp.broadcastPrefixed("&bHeap dump written to: " + file.toString());
+ });
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live"))
.build()
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
new file mode 100644
index 0000000..b543e1d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
@@ -0,0 +1,51 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+
+import java.util.function.Consumer;
+
+public class MonitoringModule<S> implements CommandModule<S> {
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("tps")
+ .executor((platform, sender, resp, arguments) -> {
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator == null) {
+ resp.replyPrefixed("TPS data is not available.");
+ return;
+ }
+
+ String formattedTpsString = tpsCalculator.toFormattedString();
+ resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m");
+ resp.replyPrefixed(formattedTpsString);
+ })
+ .tabCompleter(Command.TabCompleter.empty())
+ .build()
+ );
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 9d00a96..a0f171c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -23,14 +23,14 @@ package me.lucko.spark.common.command.modules;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.sampler.Sampler;
-import me.lucko.spark.sampler.SamplerBuilder;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.Sampler;
+import me.lucko.spark.common.sampler.SamplerBuilder;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.TickCounter;
import okhttp3.MediaType;
import java.io.IOException;
@@ -62,15 +62,15 @@ public class SamplerModule<S> implements CommandModule<S> {
.argumentUsage("interval", "interval millis")
.argumentUsage("only-ticks-over", "tick length millis")
.argumentUsage("include-line-numbers", null)
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
int timeoutSeconds = arguments.intFlag("timeout");
if (timeoutSeconds != -1 && timeoutSeconds <= 10) {
- platform.sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
+ resp.replyPrefixed("&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10.");
return;
}
if (timeoutSeconds != -1 && timeoutSeconds < 30) {
- platform.sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
+ resp.replyPrefixed("&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds.");
}
double intervalMillis = arguments.doubleFlag("interval");
@@ -84,7 +84,7 @@ public class SamplerModule<S> implements CommandModule<S> {
ThreadDumper threadDumper;
if (threads.isEmpty()) {
// use the server thread
- threadDumper = platform.getDefaultThreadDumper();
+ threadDumper = platform.getPlugin().getDefaultThreadDumper();
} else if (threads.contains("*")) {
threadDumper = ThreadDumper.ALL;
} else {
@@ -108,10 +108,9 @@ public class SamplerModule<S> implements CommandModule<S> {
int ticksOver = arguments.intFlag("only-ticks-over");
TickCounter tickCounter = null;
if (ticksOver != -1) {
- try {
- tickCounter = platform.newTickCounter();
- } catch (UnsupportedOperationException e) {
- platform.sendPrefixedMessage(sender, "&cTick counting is not supported!");
+ tickCounter = platform.getTickCounter();
+ if (tickCounter == null) {
+ resp.replyPrefixed("&cTick counting is not supported!");
return;
}
}
@@ -119,11 +118,11 @@ public class SamplerModule<S> implements CommandModule<S> {
Sampler sampler;
synchronized (this.activeSamplerMutex) {
if (this.activeSampler != null) {
- platform.sendPrefixedMessage(sender, "&7An active sampler is already running.");
+ resp.replyPrefixed("&7An active sampler is already running.");
return;
}
- platform.sendPrefixedMessage("&7Initializing a new profiler, please wait...");
+ resp.broadcastPrefixed("&7Initializing a new profiler, please wait...");
SamplerBuilder builder = new SamplerBuilder();
builder.threadDumper(threadDumper);
@@ -138,11 +137,11 @@ public class SamplerModule<S> implements CommandModule<S> {
}
sampler = this.activeSampler = builder.start();
- platform.sendPrefixedMessage("&bProfiler now active!");
+ resp.broadcastPrefixed("&bProfiler now active!");
if (timeoutSeconds == -1) {
- platform.sendPrefixedMessage("&7Use '/" + platform.getLabel() + " stop' to stop profiling and upload the results.");
+ resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results.");
} else {
- platform.sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
+ resp.broadcastPrefixed("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.");
}
}
@@ -151,7 +150,7 @@ public class SamplerModule<S> implements CommandModule<S> {
// send message if profiling fails
future.whenCompleteAsync((s, throwable) -> {
if (throwable != null) {
- platform.sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
+ resp.broadcastPrefixed("&cSampling operation failed unexpectedly. Error: " + throwable.toString());
throwable.printStackTrace();
}
});
@@ -168,8 +167,8 @@ public class SamplerModule<S> implements CommandModule<S> {
// await the result
if (timeoutSeconds != -1) {
future.thenAcceptAsync(s -> {
- platform.sendPrefixedMessage("&7The active sampling operation has completed! Uploading results...");
- handleUpload(platform, s);
+ resp.broadcastPrefixed("&7The active sampling operation has completed! Uploading results...");
+ handleUpload(platform, resp, s);
});
}
})
@@ -188,21 +187,21 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("info")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
long timeout = this.activeSampler.getEndTime();
if (timeout == -1) {
- platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout.");
+ resp.replyPrefixed("&7There is an active sampler currently running, with no defined timeout.");
} else {
long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L;
- platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
+ resp.replyPrefixed("&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds.");
}
long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L;
- platform.sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far.");
+ resp.replyPrefixed("&7It has been sampling for " + runningTime + " seconds so far.");
}
}
})
@@ -211,14 +210,14 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("stop", "upload", "paste")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
this.activeSampler.cancel();
- platform.sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results...");
- handleUpload(platform, this.activeSampler);
+ resp.broadcastPrefixed("&7The active sampling operation has been stopped! Uploading results...");
+ handleUpload(platform, resp, this.activeSampler);
this.activeSampler = null;
}
}
@@ -228,14 +227,14 @@ public class SamplerModule<S> implements CommandModule<S> {
consumer.accept(Command.<S>builder()
.aliases("cancel")
- .executor((platform, sender, arguments) -> {
+ .executor((platform, sender, resp, arguments) -> {
synchronized (this.activeSamplerMutex) {
if (this.activeSampler == null) {
- platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running.");
+ resp.replyPrefixed("&7There isn't an active sampling task running.");
} else {
this.activeSampler.cancel();
this.activeSampler = null;
- platform.sendPrefixedMessage("&bThe active sampling task has been cancelled.");
+ resp.broadcastPrefixed("&bThe active sampling task has been cancelled.");
}
}
})
@@ -243,15 +242,15 @@ public class SamplerModule<S> implements CommandModule<S> {
);
}
- private void handleUpload(SparkPlatform<S> platform, Sampler sampler) {
- platform.runAsync(() -> {
+ private void handleUpload(SparkPlatform<S> platform, CommandResponseHandler<S> resp, Sampler sampler) {
+ platform.getPlugin().runAsync(() -> {
byte[] output = sampler.formCompressedDataPayload();
try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bSampling results:");
- platform.sendLink(SparkPlatform.VIEWER_URL + key);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
+ resp.broadcastPrefixed("&bSampling results:");
+ resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
- platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
+ resp.broadcastPrefixed("&cAn error occurred whilst uploading the results.");
e.printStackTrace();
}
});
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
index 3ad8909..bea7a07 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.command.modules;
-import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.monitor.TickMonitor;
-import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.common.monitor.tick.TickMonitor;
+import me.lucko.spark.common.sampler.TickCounter;
import java.util.function.Consumer;
@@ -37,27 +37,29 @@ public class TickMonitoringModule<S> implements CommandModule<S> {
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
consumer.accept(Command.<S>builder()
- .aliases("monitoring")
+ .aliases("tickmonitoring")
.argumentUsage("threshold", "percentage increase")
.argumentUsage("without-gc", null)
- .executor((platform, sender, arguments) -> {
- if (this.activeTickMonitor == null) {
+ .executor((platform, sender, resp, arguments) -> {
+ TickCounter tickCounter = platform.getTickCounter();
+ if (tickCounter == null) {
+ resp.replyPrefixed("&cNot supported!");
+ return;
+ }
+ if (this.activeTickMonitor == null) {
int threshold = arguments.intFlag("threshold");
if (threshold == -1) {
threshold = 100;
}
- try {
- TickCounter tickCounter = platform.newTickCounter();
- this.activeTickMonitor = new ReportingTickMonitor(platform, tickCounter, threshold, !arguments.boolFlag("without-gc"));
- } catch (UnsupportedOperationException e) {
- platform.sendPrefixedMessage(sender, "&cNot supported!");
- }
+ this.activeTickMonitor = new ReportingTickMonitor(resp, tickCounter, threshold, !arguments.boolFlag("without-gc"));
+ tickCounter.addTickTask(this.activeTickMonitor);
} else {
+ tickCounter.removeTickTask(this.activeTickMonitor);
this.activeTickMonitor.close();
this.activeTickMonitor = null;
- platform.sendPrefixedMessage("&7Tick monitor disabled.");
+ resp.broadcastPrefixed("&7Tick monitor disabled.");
}
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--threshold", "--without-gc"))
@@ -66,16 +68,16 @@ public class TickMonitoringModule<S> implements CommandModule<S> {
}
private class ReportingTickMonitor extends TickMonitor {
- private final SparkPlatform<S> platform;
+ private final CommandResponseHandler<S> resp;
- ReportingTickMonitor(SparkPlatform<S> platform, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
+ ReportingTickMonitor(CommandResponseHandler<S> resp, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) {
super(tickCounter, percentageChangeThreshold, monitorGc);
- this.platform = platform;
+ this.resp = resp;
}
@Override
protected void sendMessage(String message) {
- this.platform.sendPrefixedMessage(message);
+ this.resp.broadcastPrefixed(message);
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
index 4017d64..6200c17 100644
--- a/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
@@ -18,17 +18,16 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.memory;
+package me.lucko.spark.common.memory;
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Method;
import java.nio.file.Path;
-import javax.management.JMX;
-import javax.management.MBeanServer;
-import javax.management.ObjectName;
-
/**
* Utility for creating .hprof memory heap snapshots.
*/
diff --git a/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
index 402b89e..f3f62db 100644
--- a/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
@@ -18,12 +18,14 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.memory;
+package me.lucko.spark.common.memory;
import com.google.gson.stream.JsonWriter;
+import me.lucko.spark.common.util.TypeDescriptors;
-import me.lucko.spark.util.TypeDescriptors;
-
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
@@ -38,10 +40,6 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.zip.GZIPOutputStream;
-import javax.management.JMX;
-import javax.management.MBeanServer;
-import javax.management.ObjectName;
-
/**
* Represents a "heap dump summary" from the VM.
*
diff --git a/spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
index 906ca07..93a5fd8 100644
--- a/spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
@@ -18,29 +18,26 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.monitor;
+package me.lucko.spark.common.monitor.gc;
import com.sun.management.GarbageCollectionNotificationInfo;
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.util.ArrayList;
-import java.util.List;
-
import javax.management.ListenerNotFoundException;
import javax.management.Notification;
import javax.management.NotificationEmitter;
import javax.management.NotificationListener;
import javax.management.openmbean.CompositeData;
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.util.ArrayList;
+import java.util.List;
public class GarbageCollectionMonitor implements NotificationListener, AutoCloseable {
- private final TickMonitor tickMonitor;
+ private final List<Listener> listeners = new ArrayList<>();
private final List<NotificationEmitter> emitters = new ArrayList<>();
- public GarbageCollectionMonitor(TickMonitor tickMonitor) {
- this.tickMonitor = tickMonitor;
-
+ public GarbageCollectionMonitor() {
List<GarbageCollectorMXBean> beans = ManagementFactory.getGarbageCollectorMXBeans();
for (GarbageCollectorMXBean bean : beans) {
if (!(bean instanceof NotificationEmitter)) {
@@ -53,6 +50,14 @@ public class GarbageCollectionMonitor implements NotificationListener, AutoClose
}
}
+ public void addListener(Listener listener) {
+ this.listeners.add(listener);
+ }
+
+ public void removeListener(Listener listener) {
+ this.listeners.remove(listener);
+ }
+
@Override
public void handleNotification(Notification notification, Object handback) {
if (!notification.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) {
@@ -60,7 +65,9 @@ public class GarbageCollectionMonitor implements NotificationListener, AutoClose
}
GarbageCollectionNotificationInfo data = GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData());
- this.tickMonitor.onGc(data);
+ for (Listener listener : this.listeners) {
+ listener.onGc(data);
+ }
}
@Override
@@ -73,5 +80,11 @@ public class GarbageCollectionMonitor implements NotificationListener, AutoClose
}
}
this.emitters.clear();
+ this.listeners.clear();
+ }
+
+ public interface Listener {
+ void onGc(GarbageCollectionNotificationInfo data);
}
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
index abb6148..034e876 100644
--- a/spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
@@ -18,16 +18,16 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.monitor;
+package me.lucko.spark.common.monitor.tick;
import com.sun.management.GarbageCollectionNotificationInfo;
-
-import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.common.monitor.gc.GarbageCollectionMonitor;
+import me.lucko.spark.common.sampler.TickCounter;
import java.text.DecimalFormat;
import java.util.DoubleSummaryStatistics;
-public abstract class TickMonitor implements Runnable, AutoCloseable {
+public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollectionMonitor.Listener, AutoCloseable {
private static final DecimalFormat df = new DecimalFormat("#.##");
private final TickCounter tickCounter;
@@ -44,24 +44,25 @@ public abstract class TickMonitor implements Runnable, AutoCloseable {
this.tickCounter = tickCounter;
this.percentageChangeThreshold = percentageChangeThreshold;
- this.tickCounter.start();
- this.tickCounter.addTickTask(this);
-
- this.garbageCollectionMonitor = monitorGc ? new GarbageCollectionMonitor(this) : null;
+ if (monitorGc) {
+ this.garbageCollectionMonitor = new GarbageCollectionMonitor();
+ this.garbageCollectionMonitor.addListener(this);
+ } else {
+ this.garbageCollectionMonitor = null;
+ }
}
protected abstract void sendMessage(String message);
@Override
public void close() {
- this.tickCounter.close();
if (this.garbageCollectionMonitor != null) {
this.garbageCollectionMonitor.close();
}
}
@Override
- public void run() {
+ public void onTick(TickCounter counter) {
double now = ((double) System.nanoTime()) / 1000000d;
// init
@@ -110,13 +111,14 @@ public abstract class TickMonitor implements Runnable, AutoCloseable {
double percentageChange = (increase * 100d) / this.avg;
if (percentageChange > this.percentageChangeThreshold) {
- sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
+ sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
"&7 ms. (&b" + df.format(percentageChange) + "% &7increase from average)");
}
}
}
- void onGc(GarbageCollectionNotificationInfo data) {
+ @Override
+ public void onGc(GarbageCollectionNotificationInfo data) {
if (this.state == State.SETUP) {
// set lastTickTime to zero so this tick won't be counted in the average
this.lastTickTime = 0;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java
new file mode 100644
index 0000000..2f3af3e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java
@@ -0,0 +1,170 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.tick;
+
+import me.lucko.spark.common.sampler.TickCounter;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Calculates the servers TPS (ticks per second) rate.
+ *
+ * <p>The code use to calculate the TPS is the same as the code used by the Minecraft server itself.
+ * This means that this class will output values the same as the /tps command.</p>
+ *
+ * <p>We calculate our own values instead of pulling them from the server for two reasons. Firstly,
+ * it's easier - pulling from the server requires reflection code on each of the platforms, we'd
+ * rather avoid that. Secondly, it allows us to generate rolling averages over a shorter period of
+ * time.</p>
+ */
+public class TpsCalculator implements TickCounter.TickTask {
+
+ private static final long SEC_IN_NANO = TimeUnit.SECONDS.toNanos(1);
+ private static final int TPS = 20;
+ private static final int SAMPLE_INTERVAL = 20;
+ private static final BigDecimal TPS_BASE = new BigDecimal(SEC_IN_NANO).multiply(new BigDecimal((long) SAMPLE_INTERVAL));
+
+ private final RollingAverage tps5S = new RollingAverage(5);
+ private final RollingAverage tps10S = new RollingAverage(10);
+ private final RollingAverage tps1M = new RollingAverage(60);
+ private final RollingAverage tps5M = new RollingAverage(60 * 5);
+ private final RollingAverage tps15M = new RollingAverage(60 * 15);
+
+ private final RollingAverage[] averages = new RollingAverage[]{
+ this.tps5S, this.tps10S, this.tps1M, this.tps5M, this.tps15M
+ };
+
+ private long last = 0;
+
+ // called every tick
+ @Override
+ public void onTick(TickCounter counter) {
+ if (counter.getCurrentTick() % SAMPLE_INTERVAL != 0) {
+ return;
+ }
+
+ long now = System.nanoTime();
+
+ if (this.last == 0) {
+ this.last = now;
+ return;
+ }
+
+ long diff = now - this.last;
+ BigDecimal currentTps = TPS_BASE.divide(new BigDecimal(diff), 30, RoundingMode.HALF_UP);
+
+ for (RollingAverage rollingAverage : this.averages) {
+ rollingAverage.add(currentTps, diff);
+ }
+
+ this.last = now;
+ }
+
+ public RollingAverage avg5Sec() {
+ return this.tps5S;
+ }
+
+ public RollingAverage avg10Sec() {
+ return this.tps10S;
+ }
+
+ public RollingAverage avg1Min() {
+ return this.tps1M;
+ }
+
+ public RollingAverage avg5Min() {
+ return this.tps5M;
+ }
+
+ public RollingAverage avg15Min() {
+ return this.tps15M;
+ }
+
+ public String toFormattedString() {
+ return formatTps(this.tps5S.getAverage()) + ", " +
+ formatTps(this.tps10S.getAverage()) + ", " +
+ formatTps(this.tps1M.getAverage()) + ", " +
+ formatTps(this.tps5M.getAverage()) + ", " +
+ formatTps(this.tps15M.getAverage());
+ }
+
+ public static String formatTps(double tps) {
+ StringBuilder sb = new StringBuilder();
+ if (tps > 18.0) {
+ sb.append("&a");
+ } else if (tps > 16.0) {
+ sb.append("&e");
+ } else {
+ sb.append("&c");
+ }
+ if (tps > 20.0) {
+ sb.append('*');
+ }
+ return sb.append(Math.min(Math.round(tps * 100.0) / 100.0, 20.0)).toString();
+ }
+
+ /**
+ * Rolling average calculator taken.
+ *
+ * <p>This code is taken from PaperMC/Paper, licensed under MIT.</p>
+ *
+ * @author aikar (PaperMC) https://github.com/PaperMC/Paper/blob/master/Spigot-Server-Patches/0021-Further-improve-server-tick-loop.patch
+ */
+ public static final class RollingAverage {
+ private final int size;
+ private long time;
+ private BigDecimal total;
+ private int index = 0;
+ private final BigDecimal[] samples;
+ private final long[] times;
+
+ RollingAverage(int size) {
+ this.size = size;
+ this.time = size * SEC_IN_NANO;
+ this.total = new BigDecimal((long) TPS).multiply(new BigDecimal(SEC_IN_NANO)).multiply(new BigDecimal((long) size));
+ this.samples = new BigDecimal[size];
+ this.times = new long[size];
+ for (int i = 0; i < size; i++) {
+ this.samples[i] = new BigDecimal((long) TPS);
+ this.times[i] = SEC_IN_NANO;
+ }
+ }
+
+ public void add(BigDecimal x, long t) {
+ this.time -= this.times[this.index];
+ this.total = this.total.subtract(this.samples[this.index].multiply(new BigDecimal(this.times[this.index])));
+ this.samples[this.index] = x;
+ this.times[this.index] = t;
+ this.time += t;
+ this.total = this.total.add(x.multiply(new BigDecimal(t)));
+ if (++this.index == this.size) {
+ this.index = 0;
+ }
+ }
+
+ public double getAverage() {
+ return this.total.divide(new BigDecimal(this.time), 30, RoundingMode.HALF_UP).doubleValue();
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 6777770..d504247 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -19,15 +19,14 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.gson.stream.JsonWriter;
-
-import me.lucko.spark.sampler.aggregator.DataAggregator;
-import me.lucko.spark.sampler.aggregator.SimpleDataAggregator;
-import me.lucko.spark.sampler.aggregator.TickedDataAggregator;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.aggregator.SimpleDataAggregator;
+import me.lucko.spark.common.sampler.aggregator.TickedDataAggregator;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index bf9dc04..4ce69df 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
import java.util.concurrent.TimeUnit;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 5b68eaf..14938ac 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -19,9 +19,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
-import me.lucko.spark.util.ThreadFinder;
+import me.lucko.spark.common.util.ThreadFinder;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index 3f1be33..f53800a 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java
index 059e420..aa839ba 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler;
+package me.lucko.spark.common.sampler;
/**
* A hook with the game's "tick loop".
@@ -48,13 +48,17 @@ public interface TickCounter extends AutoCloseable {
*
* @param runnable the task
*/
- void addTickTask(Runnable runnable);
+ void addTickTask(TickTask runnable);
/**
* Removes a tick task
*
* @param runnable the task
*/
- void removeTickTask(Runnable runnable);
+ void removeTickTask(TickTask runnable);
+
+ interface TickTask {
+ void onTick(TickCounter counter);
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 0e38eb4..8c65c2d 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -18,9 +18,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.aggregator;
+package me.lucko.spark.common.sampler.aggregator;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Map;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java
index a72b47f..8fbd03f 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java
@@ -18,11 +18,11 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.aggregator;
+package me.lucko.spark.common.sampler.aggregator;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.node.AbstractNode;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.node.AbstractNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java
index ef568c8..8f8124b 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java
@@ -18,12 +18,12 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.aggregator;
+package me.lucko.spark.common.sampler.aggregator;
-import me.lucko.spark.sampler.ThreadGrouper;
-import me.lucko.spark.sampler.TickCounter;
-import me.lucko.spark.sampler.node.AbstractNode;
-import me.lucko.spark.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.TickCounter;
+import me.lucko.spark.common.sampler.node.AbstractNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.ArrayList;
import java.util.List;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index 859014f..5cfc0f2 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -19,7 +19,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.node;
+package me.lucko.spark.common.sampler.node;
import com.google.gson.stream.JsonWriter;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index 8cbcd0f..c4e7ac4 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -19,7 +19,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.node;
+package me.lucko.spark.common.sampler.node;
import com.google.gson.stream.JsonWriter;
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index 2acce21..4e8714c 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.sampler.node;
+package me.lucko.spark.common.sampler.node;
import com.google.gson.stream.JsonWriter;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
new file mode 100644
index 0000000..1ff169d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
@@ -0,0 +1,45 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.Response;
+
+import java.io.IOException;
+
+public class AbstractHttpClient {
+
+ /** The http client */
+ protected final OkHttpClient okHttp;
+
+ public AbstractHttpClient(OkHttpClient okHttp) {
+ this.okHttp = okHttp;
+ }
+
+ protected Response makeHttpRequest(Request request) throws IOException {
+ Response response = this.okHttp.newCall(request).execute();
+ if (!response.isSuccessful()) {
+ throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
+ }
+ return response;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
new file mode 100644
index 0000000..ff8f4e3
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -0,0 +1,149 @@
+/*
+ * This file is part of bytebin, licensed under the MIT License.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package me.lucko.spark.common.util;
+
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
+import java.io.IOException;
+
+/**
+ * Utility for posting content to bytebin.
+ */
+public class BytebinClient extends AbstractHttpClient {
+
+ /** The bytebin URL */
+ private final String url;
+ /** The client user agent */
+ private final String userAgent;
+
+ /**
+ * Creates a new bytebin instance
+ *
+ * @param url the bytebin url
+ * @param userAgent the client user agent string
+ */
+ public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) {
+ super(okHttpClient);
+ if (url.endsWith("/")) {
+ this.url = url;
+ } else {
+ this.url = url + "/";
+ }
+ this.userAgent = userAgent;
+ }
+
+ /**
+ * POSTs GZIP compressed content to bytebin.
+ *
+ * @param buf the compressed content
+ * @param contentType the type of the content
+ * @param allowModification if the paste should be modifiable
+ * @return the key of the resultant content
+ * @throws IOException if an error occurs
+ */
+ public Content postContent(byte[] buf, MediaType contentType, boolean allowModification) throws IOException {
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url + "post")
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip");
+
+ if (allowModification) {
+ requestBuilder.header("Allow-Modification", "true");
+ }
+
+ Request request = requestBuilder.post(body).build();
+ try (Response response = makeHttpRequest(request)) {
+ String key = response.header("Location");
+ if (key == null) {
+ throw new IllegalStateException("Key not returned");
+ }
+
+ if (allowModification) {
+ String modificationKey = response.header("Modification-Key");
+ if (modificationKey == null) {
+ throw new IllegalStateException("Modification key not returned");
+ }
+ return new Content(key, modificationKey);
+ } else {
+ return new Content(key);
+ }
+ }
+ }
+
+ /**
+ * PUTs modified GZIP compressed content to bytebin in place of existing content.
+ *
+ * @param existingContent the existing content
+ * @param buf the compressed content to put
+ * @param contentType the type of the content
+ * @throws IOException if an error occurs
+ */
+ public void modifyContent(Content existingContent, byte[] buf, MediaType contentType) throws IOException {
+ if (!existingContent.modifiable) {
+ throw new IllegalArgumentException("Existing content is not modifiable");
+ }
+
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url + existingContent.key())
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip")
+ .header("Modification-Key", existingContent.modificationKey);
+
+ Request request = requestBuilder.put(body).build();
+ makeHttpRequest(request).close();
+ }
+
+ public static final class Content {
+ private final String key;
+ private final boolean modifiable;
+ private final String modificationKey;
+
+ Content(String key) {
+ this.key = key;
+ this.modifiable = false;
+ this.modificationKey = null;
+ }
+
+ Content(String key, String modificationKey) {
+ this.key = key;
+ this.modifiable = true;
+ this.modificationKey = modificationKey;
+ }
+
+ public String key() {
+ return this.key;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java
index 8ba7b10..cc0722a 100644
--- a/spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.util;
+package me.lucko.spark.common.util;
import java.util.Arrays;
import java.util.Objects;
diff --git a/spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java b/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java
index 20dbe17..a232e77 100644
--- a/spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.util;
+package me.lucko.spark.common.util;
/**
* Utilities for working with Java type descriptors.
diff --git a/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java
deleted file mode 100644
index 00e1c69..0000000
--- a/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * This file is part of bytebin, licensed under the MIT License.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-package me.lucko.spark.util;
-
-import okhttp3.MediaType;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-
-import java.io.IOException;
-
-/**
- * Utility for posting content to bytebin.
- */
-public class BytebinClient {
-
- /** The bytebin URL */
- private final String url;
- /** The client user agent */
- private final String userAgent;
- /** The http client */
- protected final OkHttpClient okHttp;
-
- /**
- * Creates a new bytebin instance
- *
- * @param url the bytebin url
- * @param userAgent the client user agent string
- */
- public BytebinClient(String url, String userAgent) {
- if (url.endsWith("/")) {
- this.url = url + "post";
- } else {
- this.url = url + "/post";
- }
- this.userAgent = userAgent;
- this.okHttp = new OkHttpClient();
- }
-
- /**
- * Posts GZIP compressed content to bytebin.
- *
- * @param buf the compressed content
- * @param contentType the type of the content
- * @return the key of the resultant content
- * @throws IOException if an error occurs
- */
- public String postGzippedContent(byte[] buf, MediaType contentType) throws IOException {
- RequestBody body = RequestBody.create(contentType, buf);
-
- Request.Builder requestBuilder = new Request.Builder()
- .url(this.url)
- .header("User-Agent", this.userAgent)
- .header("Content-Encoding", "gzip")
- .post(body);
-
- Request request = requestBuilder.build();
- try (Response response = makeHttpRequest(request)) {
- return response.header("Location");
- }
- }
-
- protected Response makeHttpRequest(Request request) throws IOException {
- Response response = this.okHttp.newCall(request).execute();
- if (!response.isSuccessful()) {
- throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
- }
- return response;
- }
-}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlugin.java
index c383636..2c6c2fb 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlugin.java
@@ -20,35 +20,35 @@
package me.lucko.spark.forge;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.TickCounter;
import net.minecraft.client.Minecraft;
import net.minecraft.command.ICommandSender;
import net.minecraft.server.MinecraftServer;
-import net.minecraft.util.text.ITextComponent;
import net.minecraftforge.client.ClientCommandHandler;
import net.minecraftforge.fml.common.gameevent.TickEvent;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
-public class ForgeClientSparkPlatform extends ForgeSparkPlatform {
+public class ForgeClientSparkPlugin extends ForgeSparkPlugin {
public static void register(SparkForgeMod mod) {
- ClientCommandHandler.instance.registerCommand(new ForgeClientSparkPlatform(mod));
+ ClientCommandHandler.instance.registerCommand(new ForgeClientSparkPlugin(mod));
}
- public ForgeClientSparkPlatform(SparkForgeMod mod) {
+ public ForgeClientSparkPlugin(SparkForgeMod mod) {
super(mod);
}
@Override
- protected void broadcast(ITextComponent msg) {
- Minecraft.getMinecraft().player.sendMessage(msg);
+ public Set<ICommandSender> getSenders() {
+ return new HashSet<>(Collections.singleton(Minecraft.getMinecraft().player));
}
@Override
- public TickCounter newTickCounter() {
+ public TickCounter createTickCounter() {
return new ForgeTickCounter(TickEvent.Type.CLIENT);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlugin.java
index a5c6c01..98740c0 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlugin.java
@@ -20,38 +20,32 @@
package me.lucko.spark.forge;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.TickCounter;
import net.minecraft.command.ICommandSender;
-import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.server.MinecraftServer;
-import net.minecraft.util.text.ITextComponent;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.gameevent.TickEvent;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
-public class ForgeServerSparkPlatform extends ForgeSparkPlatform {
-
- public ForgeServerSparkPlatform(SparkForgeMod mod) {
+public class ForgeServerSparkPlugin extends ForgeSparkPlugin {
+ public ForgeServerSparkPlugin(SparkForgeMod mod) {
super(mod);
}
@Override
- protected void broadcast(ITextComponent msg) {
- FMLCommonHandler.instance().getMinecraftServerInstance().sendMessage(msg);
-
- List<EntityPlayerMP> players = FMLCommonHandler.instance().getMinecraftServerInstance().getPlayerList().getPlayers();
- for (EntityPlayerMP player : players) {
- if (player.canUseCommand(4, "spark")) {
- player.sendMessage(msg);
- }
- }
+ public Set<ICommandSender> getSenders() {
+ MinecraftServer mcServer = FMLCommonHandler.instance().getMinecraftServerInstance();
+ Set<ICommandSender> senders = new HashSet<>(mcServer.getPlayerList().getPlayers());
+ senders.add(mcServer);
+ return senders;
}
@Override
- public TickCounter newTickCounter() {
+ public TickCounter createTickCounter() {
return new ForgeTickCounter(TickEvent.Type.SERVER);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlugin.java
index 1f4c173..609894d 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlugin.java
@@ -21,10 +21,9 @@
package me.lucko.spark.forge;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.sampler.ThreadDumper;
-
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.sampler.ThreadDumper;
import net.kyori.text.TextComponent;
import net.kyori.text.serializer.ComponentSerializers;
import net.minecraft.command.ICommand;
@@ -38,25 +37,27 @@ import net.minecraft.util.text.TextFormatting;
import net.minecraft.util.text.event.ClickEvent;
import net.minecraftforge.fml.common.Mod;
+import javax.annotation.Nullable;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
-import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-
-import javax.annotation.Nullable;
+import java.util.concurrent.ScheduledExecutorService;
@SuppressWarnings("NullableProblems")
-public abstract class ForgeSparkPlatform extends SparkPlatform<ICommandSender> implements ICommand {
+public abstract class ForgeSparkPlugin implements SparkPlugin<ICommandSender>, ICommand {
private final SparkForgeMod mod;
+ private final ScheduledExecutorService scheduler;
+ private final SparkPlatform<ICommandSender> platform;
- private final ExecutorService worker = Executors.newSingleThreadExecutor(
- new ThreadFactoryBuilder().setNameFormat("spark-forge-async-worker").build()
- );
-
- protected ForgeSparkPlatform(SparkForgeMod mod) {
+ protected ForgeSparkPlugin(SparkForgeMod mod) {
this.mod = mod;
+ this.scheduler = Executors.newSingleThreadScheduledExecutor(
+ new ThreadFactoryBuilder().setNameFormat("spark-forge-async-worker").build()
+ );
+ this.platform = new SparkPlatform<>(this);
+ this.platform.enable();
}
@Override
@@ -70,38 +71,27 @@ public abstract class ForgeSparkPlatform extends SparkPlatform<ICommandSender> i
}
@SuppressWarnings("deprecation")
- protected ITextComponent colorize(String message) {
- TextComponent component = ComponentSerializers.LEGACY.deserialize(message, '&');
- return ITextComponent.Serializer.jsonToComponent(ComponentSerializers.JSON.serialize(component));
- }
-
- protected abstract void broadcast(ITextComponent msg);
-
@Override
public void sendMessage(ICommandSender sender, String message) {
- sender.sendMessage(colorize(message));
- }
-
- @Override
- public void sendMessage(String message) {
- ITextComponent msg = colorize(message);
- broadcast(msg);
+ TextComponent component = ComponentSerializers.LEGACY.deserialize(message, '&');
+ ITextComponent mcComponent = ITextComponent.Serializer.jsonToComponent(ComponentSerializers.JSON.serialize(component));
+ sender.sendMessage(mcComponent);
}
@Override
- public void sendLink(String url) {
+ public void sendLink(ICommandSender sender, String url) {
TextComponentString msg = new TextComponentString(url);
Style style = msg.getStyle();
style.setColor(TextFormatting.GRAY);
style.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url));
msg.setStyle(style);
- broadcast(msg);
+ sender.sendMessage(msg);
}
@Override
public void runAsync(Runnable r) {
- this.worker.execute(r);
+ this.scheduler.execute(r);
}
@Override
@@ -128,7 +118,7 @@ public abstract class ForgeSparkPlatform extends SparkPlatform<ICommandSender> i
return;
}
- executeCommand(sender, args);
+ this.platform.executeCommand(sender, args);
}
@Override
@@ -136,7 +126,7 @@ public abstract class ForgeSparkPlatform extends SparkPlatform<ICommandSender> i
if (!checkPermission(server, sender)) {
return Collections.emptyList();
}
- return tabCompleteCommand(sender, args);
+ return this.platform.tabCompleteCommand(sender, args);
}
@Override
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickCounter.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickCounter.java
index 17f595b..de7d846 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickCounter.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeTickCounter.java
@@ -20,8 +20,7 @@
package me.lucko.spark.forge;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.sampler.TickCounter;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.gameevent.TickEvent;
@@ -32,7 +31,7 @@ import java.util.Set;
public class ForgeTickCounter implements TickCounter {
private final TickEvent.Type type;
- private final Set<Runnable> tasks = new HashSet<>();
+ private final Set<TickTask> tasks = new HashSet<>();
private int tick = 0;
public ForgeTickCounter(TickEvent.Type type) {
@@ -49,8 +48,8 @@ public class ForgeTickCounter implements TickCounter {
return;
}
- for (Runnable r : this.tasks){
- r.run();
+ for (TickTask r : this.tasks){
+ r.onTick(this);
}
this.tick++;
}
@@ -71,12 +70,12 @@ public class ForgeTickCounter implements TickCounter {
}
@Override
- public void addTickTask(Runnable runnable) {
+ public void addTickTask(TickTask runnable) {
this.tasks.add(runnable);
}
@Override
- public void removeTickTask(Runnable runnable) {
+ public void removeTickTask(TickTask runnable) {
this.tasks.remove(runnable);
}
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java b/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java
index aa4f379..32dfd77 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java
@@ -48,13 +48,13 @@ public class SparkForgeMod {
@EventHandler
public void init(FMLInitializationEvent e) {
if (FMLCommonHandler.instance().getSide() == Side.CLIENT) {
- ForgeClientSparkPlatform.register(this);
+ ForgeClientSparkPlugin.register(this);
}
}
@EventHandler
public void serverInit(FMLServerStartingEvent e) {
- e.registerServerCommand(new ForgeServerSparkPlatform(this));
+ e.registerServerCommand(new ForgeServerSparkPlugin(this));
}
public Path getConfigDirectory() {
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java
index 2abaf3f..6d0c56a 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java
+++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java
@@ -21,19 +21,18 @@
package me.lucko.spark.sponge;
import com.google.inject.Inject;
-
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
import org.spongepowered.api.Game;
import org.spongepowered.api.command.CommandCallable;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.config.ConfigDir;
-import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.game.state.GameStartedServerEvent;
+import org.spongepowered.api.event.game.state.GameStoppingServerEvent;
import org.spongepowered.api.plugin.Dependency;
import org.spongepowered.api.plugin.Plugin;
import org.spongepowered.api.scheduler.AsynchronousExecutor;
@@ -45,14 +44,15 @@ import org.spongepowered.api.text.serializer.TextSerializers;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
+import javax.annotation.Nullable;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Path;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
import java.util.Optional;
-
-import javax.annotation.Nullable;
+import java.util.Set;
@Plugin(
id = "spark",
@@ -65,80 +65,13 @@ import javax.annotation.Nullable;
@Dependency(id = "spongeapi")
}
)
-public class SparkSpongePlugin implements CommandCallable {
+public class SparkSpongePlugin implements SparkPlugin<CommandSource> {
private final Game game;
private final Path configDirectory;
private final SpongeExecutorService asyncExecutor;
- private final SparkPlatform<CommandSource> sparkPlatform = new SparkPlatform<CommandSource>() {
- private Text colorize(String message) {
- return TextSerializers.FORMATTING_CODE.deserialize(message);
- }
-
- private void broadcast(Text msg) {
- SparkSpongePlugin.this.game.getServer().getConsole().sendMessage(msg);
- for (Player player : SparkSpongePlugin.this.game.getServer().getOnlinePlayers()) {
- if (player.hasPermission("spark")) {
- player.sendMessage(msg);
- }
- }
- }
-
- @Override
- public String getVersion() {
- return SparkSpongePlugin.class.getAnnotation(Plugin.class).version();
- }
-
- @Override
- public Path getPluginFolder() {
- return SparkSpongePlugin.this.configDirectory;
- }
-
- @Override
- public String getLabel() {
- return "spark";
- }
-
- @Override
- public void sendMessage(CommandSource sender, String message) {
- sender.sendMessage(colorize(message));
- }
-
- @Override
- public void sendMessage(String message) {
- Text msg = colorize(message);
- broadcast(msg);
- }
-
- @Override
- public void sendLink(String url) {
- try {
- Text msg = Text.builder(url)
- .color(TextColors.GRAY)
- .onClick(TextActions.openUrl(new URL(url)))
- .build();
- broadcast(msg);
- } catch (MalformedURLException e) {
- e.printStackTrace();
- }
- }
-
- @Override
- public void runAsync(Runnable r) {
- SparkSpongePlugin.this.asyncExecutor.execute(r);
- }
-
- @Override
- public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
- }
-
- @Override
- public TickCounter newTickCounter() {
- return new SpongeTickCounter(SparkSpongePlugin.this);
- }
- };
+ private final SparkPlatform<CommandSource> platform = new SparkPlatform<>(this);
@Inject
public SparkSpongePlugin(Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
@@ -148,43 +81,112 @@ public class SparkSpongePlugin implements CommandCallable {
}
@Listener
- public void onServerStart(GameStartedServerEvent event) {
- this.game.getCommandManager().register(this, this, "spark");
+ public void onEnable(GameStartedServerEvent event) {
+ this.platform.enable();
+ this.game.getCommandManager().register(this, new SparkCommand(this), "spark");
+ }
+
+ @Listener
+ public void onDisable(GameStoppingServerEvent event) {
+ this.platform.disable();
}
@Override
- public CommandResult process(CommandSource source, String arguments) {
- if (!testPermission(source)) {
- source.sendMessage(Text.builder("You do not have permission to use this command.").color(TextColors.RED).build());
- return CommandResult.empty();
- }
+ public String getVersion() {
+ return SparkSpongePlugin.class.getAnnotation(Plugin.class).version();
+ }
- this.sparkPlatform.executeCommand(source, arguments.split(" "));
- return CommandResult.empty();
+ @Override
+ public Path getPluginFolder() {
+ return this.configDirectory;
}
@Override
- public List<String> getSuggestions(CommandSource source, String arguments, @Nullable Location<World> targetPosition) {
- return Collections.emptyList();
+ public String getLabel() {
+ return "spark";
}
@Override
- public boolean testPermission(CommandSource source) {
- return source.hasPermission("spark");
+ public Set<CommandSource> getSenders() {
+ Set<CommandSource> senders = new HashSet<>(this.game.getServer().getOnlinePlayers());
+ senders.add(this.game.getServer().getConsole());
+ return senders;
}
@Override
- public Optional<Text> getShortDescription(CommandSource source) {
- return Optional.of(Text.of("Main spark plugin command"));
+ public void sendMessage(CommandSource sender, String message) {
+ sender.sendMessage(TextSerializers.FORMATTING_CODE.deserialize(message));
+ }
+
+ @Override
+ public void sendLink(CommandSource sender, String url) {
+ try {
+ Text msg = Text.builder(url)
+ .color(TextColors.GRAY)
+ .onClick(TextActions.openUrl(new URL(url)))
+ .build();
+ sender.sendMessage(msg);
+ } catch (MalformedURLException e) {
+ e.printStackTrace();
+ }
}
@Override
- public Optional<Text> getHelp(CommandSource source) {
- return Optional.of(Text.of("Run '/spark' to view usage."));
+ public void runAsync(Runnable r) {
+ this.asyncExecutor.execute(r);
}
@Override
- public Text getUsage(CommandSource source) {
- return Text.of("Run '/spark' to view usage.");
+ public ThreadDumper getDefaultThreadDumper() {
+ return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
+ }
+
+ @Override
+ public TickCounter createTickCounter() {
+ return new SpongeTickCounter(this);
+ }
+
+ private static final class SparkCommand implements CommandCallable {
+ private final SparkSpongePlugin plugin;
+
+ private SparkCommand(SparkSpongePlugin plugin) {
+ this.plugin = plugin;
+ }
+
+ @Override
+ public CommandResult process(CommandSource source, String arguments) {
+ if (!testPermission(source)) {
+ source.sendMessage(Text.builder("You do not have permission to use this command.").color(TextColors.RED).build());
+ return CommandResult.empty();
+ }
+
+ this.plugin.platform.executeCommand(source, arguments.split(" "));
+ return CommandResult.empty();
+ }
+
+ @Override
+ public List<String> getSuggestions(CommandSource source, String arguments, @Nullable Location<World> targetPosition) {
+ return Collections.emptyList();
+ }
+
+ @Override
+ public boolean testPermission(CommandSource source) {
+ return source.hasPermission("spark");
+ }
+
+ @Override
+ public Optional<Text> getShortDescription(CommandSource source) {
+ return Optional.of(Text.of("Main spark plugin command"));
+ }
+
+ @Override
+ public Optional<Text> getHelp(CommandSource source) {
+ return Optional.of(Text.of("Run '/spark' to view usage."));
+ }
+
+ @Override
+ public Text getUsage(CommandSource source) {
+ return Text.of("Run '/spark' to view usage.");
+ }
}
}
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java
index 14b30b8..187f301 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java
+++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java
@@ -20,18 +20,16 @@
package me.lucko.spark.sponge;
-import me.lucko.spark.sampler.TickCounter;
-
-import org.spongepowered.api.scheduler.Task;
+import me.lucko.spark.common.sampler.TickCounter;
import java.util.HashSet;
import java.util.Set;
public class SpongeTickCounter implements TickCounter, Runnable {
private final SparkSpongePlugin plugin;
- private Task task;
+ private org.spongepowered.api.scheduler.Task task;
- private final Set<Runnable> tasks = new HashSet<>();
+ private final Set<TickTask> tasks = new HashSet<>();
private int tick = 0;
public SpongeTickCounter(SparkSpongePlugin plugin) {
@@ -40,15 +38,15 @@ public class SpongeTickCounter implements TickCounter, Runnable {
@Override
public void run() {
- for (Runnable r : this.tasks){
- r.run();
+ for (TickTask r : this.tasks){
+ r.onTick(this);
}
this.tick++;
}
@Override
public void start() {
- this.task = Task.builder().intervalTicks(1).name("spark-ticker").execute(this).submit(this.plugin);
+ this.task = org.spongepowered.api.scheduler.Task.builder().intervalTicks(1).name("spark-ticker").execute(this).submit(this.plugin);
}
@Override
@@ -62,12 +60,12 @@ public class SpongeTickCounter implements TickCounter, Runnable {
}
@Override
- public void addTickTask(Runnable runnable) {
+ public void addTickTask(TickTask runnable) {
this.tasks.add(runnable);
}
@Override
- public void removeTickTask(Runnable runnable) {
+ public void removeTickTask(TickTask runnable) {
this.tasks.remove(runnable);
}
}
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java
index cf5ed79..fef48e7 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java
@@ -21,26 +21,29 @@
package me.lucko.spark.velocity;
import com.google.inject.Inject;
+import com.velocitypowered.api.command.Command;
import com.velocitypowered.api.command.CommandSource;
import com.velocitypowered.api.event.PostOrder;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.proxy.ProxyInitializeEvent;
+import com.velocitypowered.api.event.proxy.ProxyShutdownEvent;
import com.velocitypowered.api.plugin.Plugin;
import com.velocitypowered.api.plugin.annotation.DataDirectory;
-import com.velocitypowered.api.proxy.Player;
import com.velocitypowered.api.proxy.ProxyServer;
-
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.sampler.ThreadDumper;
-import me.lucko.spark.sampler.TickCounter;
-
-import net.kyori.text.Component;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.TickCounter;
import net.kyori.text.TextComponent;
import net.kyori.text.event.ClickEvent;
import net.kyori.text.format.TextColor;
import net.kyori.text.serializer.ComponentSerializers;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.optional.qual.MaybePresent;
import java.nio.file.Path;
+import java.util.HashSet;
+import java.util.Set;
@Plugin(
id = "spark",
@@ -49,92 +52,90 @@ import java.nio.file.Path;
description = "@desc@",
authors = {"Luck", "sk89q"}
)
-public class SparkVelocityPlugin {
+public class SparkVelocityPlugin implements SparkPlugin<CommandSource>, Command {
+
+ private final SparkPlatform<CommandSource> platform = new SparkPlatform<>(this);
private final ProxyServer proxy;
private final Path configDirectory;
- private final SparkPlatform<CommandSource> sparkPlatform = new SparkPlatform<CommandSource>() {
- @SuppressWarnings("deprecation")
- private TextComponent colorize(String message) {
- return ComponentSerializers.LEGACY.deserialize(message, '&');
- }
+ @Inject
+ public SparkVelocityPlugin(ProxyServer proxy, @DataDirectory Path configDirectory) {
+ this.proxy = proxy;
+ this.configDirectory = configDirectory;
+ }
- private void broadcast(Component msg) {
- SparkVelocityPlugin.this.proxy.getConsoleCommandSource().sendMessage(msg);
- for (Player player : SparkVelocityPlugin.this.proxy.getAllPlayers()) {
- if (player.hasPermission("spark")) {
- player.sendMessage(msg);
- }
- }
- }
+ @Subscribe(order = PostOrder.FIRST)
+ public void onEnable(ProxyInitializeEvent e) {
+ this.platform.enable();
+ this.proxy.getCommandManager().register(this, "sparkv", "sparkvelocity");
+ }
- @Override
- public String getVersion() {
- return SparkVelocityPlugin.class.getAnnotation(Plugin.class).version();
- }
+ @Subscribe(order = PostOrder.LAST)
+ public void onDisable(ProxyShutdownEvent e) {
+ this.platform.disable();
+ }
- @Override
- public Path getPluginFolder() {
- return SparkVelocityPlugin.this.configDirectory;
+ @Override
+ public void execute(@MaybePresent CommandSource sender, @NonNull @MaybePresent String[] args) {
+ if (!sender.hasPermission("spark")) {
+ TextComponent msg = TextComponent.builder("You do not have permission to use this command.").color(TextColor.RED).build();
+ sender.sendMessage(msg);
+ return;
}
- @Override
- public String getLabel() {
- return "sparkv";
- }
+ this.platform.executeCommand(sender, args);
+ }
- @Override
- public void sendMessage(CommandSource sender, String message) {
- sender.sendMessage(colorize(message));
- }
+ @Override
+ public String getVersion() {
+ return SparkVelocityPlugin.class.getAnnotation(Plugin.class).version();
+ }
- @Override
- public void sendMessage(String message) {
- broadcast(colorize(message));
- }
+ @Override
+ public Path getPluginFolder() {
+ return this.configDirectory;
+ }
- @Override
- public void sendLink(String url) {
- TextComponent msg = TextComponent.builder(url)
- .color(TextColor.GRAY)
- .clickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url))
- .build();
- broadcast(msg);
- }
+ @Override
+ public String getLabel() {
+ return "sparkv";
+ }
- @Override
- public void runAsync(Runnable r) {
- SparkVelocityPlugin.this.proxy.getScheduler().buildTask(SparkVelocityPlugin.this, r).schedule();
- }
+ @Override
+ public Set<CommandSource> getSenders() {
+ Set<CommandSource> senders = new HashSet<>(this.proxy.getAllPlayers());
+ senders.add(this.proxy.getConsoleCommandSource());
+ return senders;
+ }
- @Override
- public ThreadDumper getDefaultThreadDumper() {
- return ThreadDumper.ALL;
- }
+ @SuppressWarnings("deprecation")
+ @Override
+ public void sendMessage(CommandSource sender, String message) {
+ sender.sendMessage(ComponentSerializers.LEGACY.deserialize(message, '&'));
+ }
- @Override
- public TickCounter newTickCounter() {
- throw new UnsupportedOperationException();
- }
- };
+ @Override
+ public void sendLink(CommandSource sender, String url) {
+ TextComponent msg = TextComponent.builder(url)
+ .color(TextColor.GRAY)
+ .clickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url))
+ .build();
+ sender.sendMessage(msg);
+ }
- @Inject
- public SparkVelocityPlugin(ProxyServer proxy, @DataDirectory Path configDirectory) {
- this.proxy = proxy;
- this.configDirectory = configDirectory;
+ @Override
+ public void runAsync(Runnable r) {
+ this.proxy.getScheduler().buildTask(this, r).schedule();
}
- @Subscribe(order = PostOrder.FIRST)
- public void onEnable(ProxyInitializeEvent e) {
- this.proxy.getCommandManager().register((sender, args) -> {
- if (!sender.hasPermission("spark")) {
- TextComponent msg = TextComponent.builder("You do not have permission to use this command.").color(TextColor.RED).build();
- sender.sendMessage(msg);
- return;
- }
-
- SparkVelocityPlugin.this.sparkPlatform.executeCommand(sender, args);
- }, "sparkv", "sparkvelocity");
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return ThreadDumper.ALL;
+ }
+
+ @Override
+ public TickCounter createTickCounter() {
+ return null;
}
}