From ecd4cec8545460a4fc4ca65b911c2503a00cd8e7 Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 16 Apr 2019 21:37:59 +0100 Subject: Lots of refactoring, add tps command --- .../me/lucko/spark/bukkit/BukkitTickCounter.java | 13 +- .../me/lucko/spark/bukkit/SparkBukkitPlugin.java | 129 +++++++------ .../spark/bungeecord/SparkBungeeCordPlugin.java | 126 +++++++------ .../java/me/lucko/spark/common/SparkPlatform.java | 105 ++++++----- .../java/me/lucko/spark/common/SparkPlugin.java | 49 +++++ .../me/lucko/spark/common/command/Command.java | 3 +- .../common/command/CommandResponseHandler.java | 75 ++++++++ .../spark/common/command/modules/MemoryModule.java | 120 ++++++------ .../common/command/modules/MonitoringModule.java | 51 ++++++ .../common/command/modules/SamplerModule.java | 79 ++++---- .../command/modules/TickMonitoringModule.java | 36 ++-- .../me/lucko/spark/common/memory/HeapDump.java | 77 ++++++++ .../lucko/spark/common/memory/HeapDumpSummary.java | 171 ++++++++++++++++++ .../monitor/gc/GarbageCollectionMonitor.java | 90 +++++++++ .../spark/common/monitor/tick/TickMonitor.java | 143 +++++++++++++++ .../spark/common/monitor/tick/TpsCalculator.java | 170 +++++++++++++++++ .../me/lucko/spark/common/sampler/Sampler.java | 200 ++++++++++++++++++++ .../lucko/spark/common/sampler/SamplerBuilder.java | 90 +++++++++ .../lucko/spark/common/sampler/ThreadDumper.java | 126 +++++++++++++ .../lucko/spark/common/sampler/ThreadGrouper.java | 72 ++++++++ .../me/lucko/spark/common/sampler/TickCounter.java | 64 +++++++ .../common/sampler/aggregator/DataAggregator.java | 54 ++++++ .../sampler/aggregator/SimpleDataAggregator.java | 82 +++++++++ .../sampler/aggregator/TickedDataAggregator.java | 184 +++++++++++++++++++ .../spark/common/sampler/node/AbstractNode.java | 143 +++++++++++++++ .../spark/common/sampler/node/StackTraceNode.java | 86 +++++++++ .../spark/common/sampler/node/ThreadNode.java | 44 +++++ .../spark/common/util/AbstractHttpClient.java | 45 +++++ .../me/lucko/spark/common/util/BytebinClient.java | 149 +++++++++++++++ .../me/lucko/spark/common/util/ThreadFinder.java | 60 ++++++ .../lucko/spark/common/util/TypeDescriptors.java | 81 +++++++++ .../main/java/me/lucko/spark/memory/HeapDump.java | 78 -------- .../me/lucko/spark/memory/HeapDumpSummary.java | 173 ------------------ .../spark/monitor/GarbageCollectionMonitor.java | 77 -------- .../java/me/lucko/spark/monitor/TickMonitor.java | 141 --------------- .../main/java/me/lucko/spark/sampler/Sampler.java | 201 --------------------- .../me/lucko/spark/sampler/SamplerBuilder.java | 90 --------- .../java/me/lucko/spark/sampler/ThreadDumper.java | 126 ------------- .../java/me/lucko/spark/sampler/ThreadGrouper.java | 72 -------- .../java/me/lucko/spark/sampler/TickCounter.java | 60 ------ .../spark/sampler/aggregator/DataAggregator.java | 54 ------ .../sampler/aggregator/SimpleDataAggregator.java | 82 --------- .../sampler/aggregator/TickedDataAggregator.java | 184 ------------------- .../me/lucko/spark/sampler/node/AbstractNode.java | 143 --------------- .../lucko/spark/sampler/node/StackTraceNode.java | 86 --------- .../me/lucko/spark/sampler/node/ThreadNode.java | 44 ----- .../java/me/lucko/spark/util/BytebinClient.java | 93 ---------- .../java/me/lucko/spark/util/ThreadFinder.java | 60 ------ .../java/me/lucko/spark/util/TypeDescriptors.java | 81 --------- .../spark/forge/ForgeClientSparkPlatform.java | 74 -------- .../lucko/spark/forge/ForgeClientSparkPlugin.java | 74 ++++++++ .../spark/forge/ForgeServerSparkPlatform.java | 77 -------- .../lucko/spark/forge/ForgeServerSparkPlugin.java | 71 ++++++++ .../me/lucko/spark/forge/ForgeSparkPlatform.java | 151 ---------------- .../me/lucko/spark/forge/ForgeSparkPlugin.java | 141 +++++++++++++++ .../me/lucko/spark/forge/ForgeTickCounter.java | 13 +- .../java/me/lucko/spark/forge/SparkForgeMod.java | 4 +- .../me/lucko/spark/sponge/SparkSpongePlugin.java | 192 ++++++++++---------- .../me/lucko/spark/sponge/SpongeTickCounter.java | 18 +- .../lucko/spark/velocity/SparkVelocityPlugin.java | 151 ++++++++-------- 60 files changed, 3092 insertions(+), 2636 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TpsCalculator.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/TickCounter.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/SimpleDataAggregator.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/TickedDataAggregator.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/ThreadFinder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/TypeDescriptors.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/monitor/GarbageCollectionMonitor.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/monitor/TickMonitor.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/TickCounter.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/aggregator/DataAggregator.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/sampler/node/ThreadNode.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/util/ThreadFinder.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/util/TypeDescriptors.java delete mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlugin.java delete mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlugin.java delete mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlugin.java diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java index 3b95399..44a69cb 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java @@ -20,8 +20,7 @@ package me.lucko.spark.bukkit; -import me.lucko.spark.sampler.TickCounter; - +import me.lucko.spark.common.sampler.TickCounter; import org.bukkit.plugin.Plugin; import org.bukkit.scheduler.BukkitTask; @@ -32,7 +31,7 @@ public class BukkitTickCounter implements TickCounter, Runnable { private final Plugin plugin; private BukkitTask task; - private final Set tasks = new HashSet<>(); + private final Set tasks = new HashSet<>(); private int tick = 0; public BukkitTickCounter(Plugin plugin) { @@ -41,8 +40,8 @@ public class BukkitTickCounter implements TickCounter, Runnable { @Override public void run() { - for (Runnable r : this.tasks) { - r.run(); + for (TickTask r : this.tasks) { + r.onTick(this); } this.tick++; } @@ -63,12 +62,12 @@ public class BukkitTickCounter implements TickCounter, Runnable { } @Override - public void addTickTask(Runnable runnable) { + public void addTickTask(TickTask runnable) { this.tasks.add(runnable); } @Override - public void removeTickTask(Runnable runnable) { + public void removeTickTask(TickTask runnable) { this.tasks.remove(runnable); } } diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java index f1395c4..1d6160a 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java @@ -21,100 +21,97 @@ package me.lucko.spark.bukkit; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.sampler.ThreadDumper; -import me.lucko.spark.sampler.TickCounter; - +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.TickCounter; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; -import org.bukkit.entity.Player; import org.bukkit.plugin.java.JavaPlugin; import java.nio.file.Path; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; -public class SparkBukkitPlugin extends JavaPlugin { +public class SparkBukkitPlugin extends JavaPlugin implements SparkPlugin { - private final SparkPlatform sparkPlatform = new SparkPlatform() { + private final SparkPlatform platform = new SparkPlatform<>(this); - private String colorize(String message) { - return ChatColor.translateAlternateColorCodes('&', message); - } + @Override + public void onEnable() { + this.platform.enable(); + } - private void broadcast(String msg) { - getServer().getConsoleSender().sendMessage(msg); - for (Player player : getServer().getOnlinePlayers()) { - if (player.hasPermission("spark")) { - player.sendMessage(msg); - } - } - } + @Override + public void onDisable() { + this.platform.disable(); + } - @Override - public String getVersion() { - return getDescription().getVersion(); + @Override + public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { + if (!sender.hasPermission("spark")) { + sender.sendMessage(ChatColor.RED + "You do not have permission to use this command."); + return true; } - @Override - public Path getPluginFolder() { - return getDataFolder().toPath(); - } + this.platform.executeCommand(sender, args); + return true; + } - @Override - public String getLabel() { - return "spark"; + @Override + public List onTabComplete(CommandSender sender, Command command, String alias, String[] args) { + if (!sender.hasPermission("spark")) { + return Collections.emptyList(); } + return this.platform.tabCompleteCommand(sender, args); + } - @Override - public void sendMessage(CommandSender sender, String message) { - sender.sendMessage(colorize(message)); - } + @Override + public String getVersion() { + return getDescription().getVersion(); + } - @Override - public void sendMessage(String message) { - String msg = colorize(message); - broadcast(msg); - } + @Override + public Path getPluginFolder() { + return getDataFolder().toPath(); + } - @Override - public void sendLink(String url) { - String msg = colorize("&7" + url); - broadcast(msg); - } + @Override + public String getLabel() { + return "spark"; + } - @Override - public void runAsync(Runnable r) { - getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r); - } + @Override + public Set getSenders() { + Set senders = new HashSet<>(getServer().getOnlinePlayers()); + senders.add(getServer().getConsoleSender()); + return senders; + } - @Override - public ThreadDumper getDefaultThreadDumper() { - return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); - } + @Override + public void sendMessage(CommandSender sender, String message) { + sender.sendMessage(ChatColor.translateAlternateColorCodes('&', message)); + } - @Override - public TickCounter newTickCounter() { - return new BukkitTickCounter(SparkBukkitPlugin.this); - } - }; + @Override + public void sendLink(CommandSender sender, String url) { + sendMessage(sender, "&7" + url); + } @Override - public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { - if (!sender.hasPermission("spark")) { - sender.sendMessage(ChatColor.RED + "You do not have permission to use this command."); - return true; - } + public void runAsync(Runnable r) { + getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r); + } - this.sparkPlatform.executeCommand(sender, args); - return true; + @Override + public ThreadDumper getDefaultThreadDumper() { + return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); } @Override - public List onTabComplete(CommandSender sender, Command command, String alias, String[] args) { - if (!sender.hasPermission("spark")) { - return Collections.emptyList(); - } - return this.sparkPlatform.tabCompleteCommand(sender, args); + public TickCounter createTickCounter() { + return new BukkitTickCounter(this); } } diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java index 6d23683..5c39d34 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java @@ -21,95 +21,93 @@ package me.lucko.spark.bungeecord; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.sampler.ThreadDumper; -import me.lucko.spark.sampler.TickCounter; - +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.TickCounter; import net.md_5.bungee.api.ChatColor; import net.md_5.bungee.api.CommandSender; -import net.md_5.bungee.api.chat.BaseComponent; import net.md_5.bungee.api.chat.ClickEvent; import net.md_5.bungee.api.chat.TextComponent; -import net.md_5.bungee.api.connection.ProxiedPlayer; import net.md_5.bungee.api.plugin.Command; import net.md_5.bungee.api.plugin.Plugin; import net.md_5.bungee.api.plugin.TabExecutor; import java.nio.file.Path; import java.util.Collections; +import java.util.HashSet; +import java.util.Set; -public class SparkBungeeCordPlugin extends Plugin { +public class SparkBungeeCordPlugin extends Plugin implements SparkPlugin { - private final SparkPlatform sparkPlatform = new SparkPlatform() { - private BaseComponent[] colorize(String message) { - return TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message)); - } + private final SparkPlatform platform = new SparkPlatform<>(this); - private void broadcast(BaseComponent... msg) { - getProxy().getConsole().sendMessage(msg); - for (ProxiedPlayer player : getProxy().getPlayers()) { - if (player.hasPermission("spark")) { - player.sendMessage(msg); - } - } - } + @Override + public void onEnable() { + this.platform.enable(); + getProxy().getPluginManager().registerCommand(this, new SparkCommand(this)); + } - @Override - public String getVersion() { - return getDescription().getVersion(); - } + @Override + public void onDisable() { + this.platform.disable(); + } - @Override - public Path getPluginFolder() { - return getDataFolder().toPath(); - } + @Override + public String getVersion() { + return getDescription().getVersion(); + } - @Override - public String getLabel() { - return "sparkb"; - } + @Override + public Path getPluginFolder() { + return getDataFolder().toPath(); + } - @Override - public void sendMessage(CommandSender sender, String message) { - sender.sendMessage(colorize(message)); - } + @Override + public String getLabel() { + return "sparkb"; + } - @Override - public void sendMessage(String message) { - broadcast(colorize(message)); - } + @Override + public Set getSenders() { + Set senders = new HashSet<>(getProxy().getPlayers()); + senders.add(getProxy().getConsole()); + return senders; + } - @Override - public void sendLink(String url) { - TextComponent component = new TextComponent(url); - component.setColor(ChatColor.GRAY); - component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url)); - broadcast(component); - } + @Override + public void sendMessage(CommandSender sender, String message) { + sender.sendMessage(TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message))); + } - @Override - public void runAsync(Runnable r) { - getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r); - } + @Override + public void sendLink(CommandSender sender, String url) { + TextComponent component = new TextComponent(url); + component.setColor(ChatColor.GRAY); + component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url)); + sender.sendMessage(component); + } - @Override - public ThreadDumper getDefaultThreadDumper() { - return ThreadDumper.ALL; - } + @Override + public void runAsync(Runnable r) { + getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r); + } - @Override - public TickCounter newTickCounter() { - throw new UnsupportedOperationException(); - } - }; + @Override + public ThreadDumper getDefaultThreadDumper() { + return ThreadDumper.ALL; + } @Override - public void onEnable() { - getProxy().getPluginManager().registerCommand(this, new SparkCommand()); + public TickCounter createTickCounter() { + return null; } - private final class SparkCommand extends Command implements TabExecutor { - public SparkCommand() { + private static final class SparkCommand extends Command implements TabExecutor { + private final SparkBungeeCordPlugin plugin; + + SparkCommand(SparkBungeeCordPlugin plugin) { super("sparkb", null, "sparkbungee"); + this.plugin = plugin; } @Override @@ -121,7 +119,7 @@ public class SparkBungeeCordPlugin extends Plugin { return; } - SparkBungeeCordPlugin.this.sparkPlatform.executeCommand(sender, args); + this.plugin.platform.executeCommand(sender, args); } @Override @@ -129,7 +127,7 @@ public class SparkBungeeCordPlugin extends Plugin { if (!sender.hasPermission("spark")) { return Collections.emptyList(); } - return SparkBungeeCordPlugin.this.sparkPlatform.tabCompleteCommand(sender, args); + return this.plugin.platform.tabCompleteCommand(sender, args); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index ef21d1c..8eb4565 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -21,19 +21,20 @@ package me.lucko.spark.common; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.CommandResponseHandler; import me.lucko.spark.common.command.modules.MemoryModule; +import me.lucko.spark.common.command.modules.MonitoringModule; import me.lucko.spark.common.command.modules.SamplerModule; import me.lucko.spark.common.command.modules.TickMonitoringModule; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; -import me.lucko.spark.sampler.ThreadDumper; -import me.lucko.spark.sampler.TickCounter; -import me.lucko.spark.util.BytebinClient; +import me.lucko.spark.common.monitor.tick.TpsCalculator; +import me.lucko.spark.common.sampler.TickCounter; +import me.lucko.spark.common.util.BytebinClient; +import okhttp3.OkHttpClient; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -41,52 +42,68 @@ import java.util.List; import java.util.stream.Collectors; /** - * Abstract command handling class used by all platforms. + * Abstract spark implementation used by all platforms. * * @param the sender (e.g. CommandSender) type used by the platform */ -public abstract class SparkPlatform { +public class SparkPlatform { /** The URL of the viewer frontend */ public static final String VIEWER_URL = "https://sparkprofiler.github.io/#"; + /** The shared okhttp client */ + private static final OkHttpClient OK_HTTP_CLIENT = new OkHttpClient(); /** The bytebin instance used by the platform */ - public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient("https://bytebin.lucko.me/", "spark-plugin"); - - /** The prefix used in all messages */ - private static final String PREFIX = "&8[&fspark&8] &7"; - - private static List> prepareCommands() { - ImmutableList.Builder> builder = ImmutableList.builder(); - new SamplerModule().registerCommands(builder::add); - new TickMonitoringModule().registerCommands(builder::add); - new MemoryModule().registerCommands(builder::add); - return builder.build(); + public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient(OK_HTTP_CLIENT, "https://bytebin.lucko.me/", "spark-plugin"); + + private final List> commands; + private final SparkPlugin plugin; + + private final TickCounter tickCounter; + private final TpsCalculator tpsCalculator; + + public SparkPlatform(SparkPlugin plugin) { + this.plugin = plugin; + + ImmutableList.Builder> commandsBuilder = ImmutableList.builder(); + new SamplerModule().registerCommands(commandsBuilder::add); + new MonitoringModule().registerCommands(commandsBuilder::add); + new TickMonitoringModule().registerCommands(commandsBuilder::add); + new MemoryModule().registerCommands(commandsBuilder::add); + this.commands = commandsBuilder.build(); + + this.tickCounter = plugin.createTickCounter(); + this.tpsCalculator = this.tickCounter != null ? new TpsCalculator() : null; + } + + public void enable() { + if (this.tickCounter != null) { + this.tickCounter.addTickTask(this.tpsCalculator); + this.tickCounter.start(); + } + } + + public void disable() { + if (this.tickCounter != null) { + this.tickCounter.close(); + } + } + + public SparkPlugin getPlugin() { + return this.plugin; } - private final List> commands = prepareCommands(); - - // abstract methods implemented by each platform - public abstract String getVersion(); - public abstract Path getPluginFolder(); - public abstract String getLabel(); - public abstract void sendMessage(S sender, String message); - public abstract void sendMessage(String message); - public abstract void sendLink(String url); - public abstract void runAsync(Runnable r); - public abstract ThreadDumper getDefaultThreadDumper(); - public abstract TickCounter newTickCounter(); - - public void sendPrefixedMessage(S sender, String message) { - sendMessage(sender, PREFIX + message); + public TickCounter getTickCounter() { + return this.tickCounter; } - public void sendPrefixedMessage(String message) { - sendMessage(PREFIX + message); + public TpsCalculator getTpsCalculator() { + return this.tpsCalculator; } public void executeCommand(S sender, String[] args) { + CommandResponseHandler resp = new CommandResponseHandler<>(this, sender); if (args.length == 0) { - sendUsage(sender); + sendUsage(resp); return; } @@ -96,15 +113,15 @@ public abstract class SparkPlatform { for (Command command : this.commands) { if (command.aliases().contains(alias)) { try { - command.executor().execute(this, sender, new Arguments(rawArgs)); + command.executor().execute(this, sender, resp, new Arguments(rawArgs)); } catch (IllegalArgumentException e) { - sendMessage(sender, "&c" + e.getMessage()); + resp.replyPrefixed("&c" + e.getMessage()); } return; } } - sendUsage(sender); + sendUsage(resp); } public List tabCompleteCommand(S sender, String[] args) { @@ -127,15 +144,15 @@ public abstract class SparkPlatform { return Collections.emptyList(); } - private void sendUsage(S sender) { - sendPrefixedMessage(sender, "&fspark &7v" + getVersion()); + private void sendUsage(CommandResponseHandler sender) { + sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion()); for (Command command : this.commands) { - sendMessage(sender, "&b&l> &7/" + getLabel() + " " + command.aliases().get(0)); + sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0)); for (Command.ArgumentInfo arg : command.arguments()) { if (arg.requiresParameter()) { - sendMessage(sender, " &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]"); + sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]"); } else { - sendMessage(sender, " &8[&7--" + arg.argumentName() + "]"); + sender.reply(" &8[&7--" + arg.argumentName() + "]"); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java new file mode 100644 index 0000000..7a3a353 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -0,0 +1,49 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common; + +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.TickCounter; + +import java.nio.file.Path; +import java.util.Set; + +public interface SparkPlugin { + + String getVersion(); + + Path getPluginFolder(); + + String getLabel(); + + Set getSenders(); + + void sendMessage(S sender, String message); + + void sendLink(S sender, String url); + + void runAsync(Runnable r); + + ThreadDumper getDefaultThreadDumper(); + + TickCounter createTickCounter(); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java index fb440b1..c9f6551 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.command; import com.google.common.collect.ImmutableList; - import me.lucko.spark.common.SparkPlatform; import java.util.Collections; @@ -109,7 +108,7 @@ public class Command { @FunctionalInterface public interface Executor { - void execute(SparkPlatform platform, S sender, Arguments arguments); + void execute(SparkPlatform platform, S sender, CommandResponseHandler resp, Arguments arguments); } @FunctionalInterface diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java new file mode 100644 index 0000000..a5a7391 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java @@ -0,0 +1,75 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.command; + +import me.lucko.spark.common.SparkPlatform; + +import java.util.Set; +import java.util.function.Consumer; + +public class CommandResponseHandler { + + /** The prefix used in all messages */ + private static final String PREFIX = "&8[&fspark&8] &7"; + + private final SparkPlatform platform; + private final S sender; + + public CommandResponseHandler(SparkPlatform platform, S sender) { + this.platform = platform; + this.sender = sender; + } + + public S sender() { + return this.sender; + } + + public void allSenders(Consumer action) { + Set senders = this.platform.getPlugin().getSenders(); + senders.add(this.sender); + senders.forEach(action); + } + + public void reply(String message) { + this.platform.getPlugin().sendMessage(this.sender, message); + } + + public void broadcast(String message) { + allSenders(sender -> this.platform.getPlugin().sendMessage(sender, message)); + } + + public void replyPrefixed(String message) { + this.platform.getPlugin().sendMessage(this.sender, PREFIX + message); + } + + public void broadcastPrefixed(String message) { + allSenders(sender -> this.platform.getPlugin().sendMessage(sender, PREFIX + message)); + } + + public void replyLink(String link) { + this.platform.getPlugin().sendLink(this.sender, link); + } + + public void broadcastLink(String link) { + allSenders(sender -> this.platform.getPlugin().sendLink(sender, link)); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java index 5f17d54..2cb2e07 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java @@ -24,9 +24,8 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; import me.lucko.spark.common.command.tabcomplete.TabCompleter; -import me.lucko.spark.memory.HeapDump; -import me.lucko.spark.memory.HeapDumpSummary; - +import me.lucko.spark.common.memory.HeapDump; +import me.lucko.spark.common.memory.HeapDumpSummary; import okhttp3.MediaType; import java.io.IOException; @@ -44,34 +43,34 @@ public class MemoryModule implements CommandModule { consumer.accept(Command.builder() .aliases("heapsummary") .argumentUsage("run-gc-before", null) - .executor((platform, sender, arguments) -> { - platform.runAsync(() -> { - if (arguments.boolFlag("run-gc-before")) { - platform.sendPrefixedMessage("&7Running garbage collector..."); - System.gc(); - } - - platform.sendPrefixedMessage("&7Creating a new heap dump summary, please wait..."); - - HeapDumpSummary heapDump; - try { - heapDump = HeapDumpSummary.createNew(); - } catch (Exception e) { - platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap."); - e.printStackTrace(); - return; - } - - byte[] output = heapDump.formCompressedDataPayload(); - try { - String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE); - platform.sendPrefixedMessage("&bHeap dump summmary output:"); - platform.sendLink(SparkPlatform.VIEWER_URL + key); - } catch (IOException e) { - platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data."); - e.printStackTrace(); - } - }); + .executor((platform, sender, resp, arguments) -> { + platform.getPlugin().runAsync(() -> { + if (arguments.boolFlag("run-gc-before")) { + resp.broadcastPrefixed("&7Running garbage collector..."); + System.gc(); + } + + resp.broadcastPrefixed("&7Creating a new heap dump summary, please wait..."); + + HeapDumpSummary heapDump; + try { + heapDump = HeapDumpSummary.createNew(); + } catch (Exception e) { + resp.broadcastPrefixed("&cAn error occurred whilst inspecting the heap."); + e.printStackTrace(); + return; + } + + byte[] output = heapDump.formCompressedDataPayload(); + try { + String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key(); + resp.broadcastPrefixed("&bHeap dump summmary output:"); + resp.broadcastLink(SparkPlatform.VIEWER_URL + key); + } catch (IOException e) { + resp.broadcastPrefixed("&cAn error occurred whilst uploading the data."); + e.printStackTrace(); + } + }); }) .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before")) .build() @@ -81,35 +80,36 @@ public class MemoryModule implements CommandModule { .aliases("heapdump") .argumentUsage("run-gc-before", null) .argumentUsage("include-non-live", null) - .executor((platform, sender, arguments) -> { - platform.runAsync(() -> { - Path pluginFolder = platform.getPluginFolder(); - try { - Files.createDirectories(pluginFolder); - } catch (IOException e) { - // ignore - } - - Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof")); - boolean liveOnly = !arguments.boolFlag("include-non-live"); - - if (arguments.boolFlag("run-gc-before")) { - platform.sendPrefixedMessage("&7Running garbage collector..."); - System.gc(); - } - - platform.sendPrefixedMessage("&7Creating a new heap dump, please wait..."); - - try { - HeapDump.dumpHeap(file, liveOnly); - } catch (Exception e) { - platform.sendPrefixedMessage("&cAn error occurred whilst creating a heap dump."); - e.printStackTrace(); - return; - } - - platform.sendPrefixedMessage("&bHeap dump written to: " + file.toString()); - }); + .executor((platform, sender, resp, arguments) -> { + // ignore + platform.getPlugin().runAsync(() -> { + Path pluginFolder = platform.getPlugin().getPluginFolder(); + try { + Files.createDirectories(pluginFolder); + } catch (IOException e) { + // ignore + } + + Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof")); + boolean liveOnly = !arguments.boolFlag("include-non-live"); + + if (arguments.boolFlag("run-gc-before")) { + resp.broadcastPrefixed("&7Running garbage collector..."); + System.gc(); + } + + resp.broadcastPrefixed("&7Creating a new heap dump, please wait..."); + + try { + HeapDump.dumpHeap(file, liveOnly); + } catch (Exception e) { + resp.broadcastPrefixed("&cAn error occurred whilst creating a heap dump."); + e.printStackTrace(); + return; + } + + resp.broadcastPrefixed("&bHeap dump written to: " + file.toString()); + }); }) .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live")) .build() diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java new file mode 100644 index 0000000..b543e1d --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java @@ -0,0 +1,51 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.command.modules; + +import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.common.monitor.tick.TpsCalculator; + +import java.util.function.Consumer; + +public class MonitoringModule implements CommandModule { + + @Override + public void registerCommands(Consumer> consumer) { + consumer.accept(Command.builder() + .aliases("tps") + .executor((platform, sender, resp, arguments) -> { + TpsCalculator tpsCalculator = platform.getTpsCalculator(); + if (tpsCalculator == null) { + resp.replyPrefixed("TPS data is not available."); + return; + } + + String formattedTpsString = tpsCalculator.toFormattedString(); + resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m"); + resp.replyPrefixed(formattedTpsString); + }) + .tabCompleter(Command.TabCompleter.empty()) + .build() + ); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 9d00a96..a0f171c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -23,14 +23,14 @@ package me.lucko.spark.common.command.modules; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.common.command.CommandResponseHandler; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; -import me.lucko.spark.sampler.Sampler; -import me.lucko.spark.sampler.SamplerBuilder; -import me.lucko.spark.sampler.ThreadDumper; -import me.lucko.spark.sampler.ThreadGrouper; -import me.lucko.spark.sampler.TickCounter; - +import me.lucko.spark.common.sampler.Sampler; +import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.TickCounter; import okhttp3.MediaType; import java.io.IOException; @@ -62,15 +62,15 @@ public class SamplerModule implements CommandModule { .argumentUsage("interval", "interval millis") .argumentUsage("only-ticks-over", "tick length millis") .argumentUsage("include-line-numbers", null) - .executor((platform, sender, arguments) -> { + .executor((platform, sender, resp, arguments) -> { int timeoutSeconds = arguments.intFlag("timeout"); if (timeoutSeconds != -1 && timeoutSeconds <= 10) { - platform.sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10."); + resp.replyPrefixed("&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10."); return; } if (timeoutSeconds != -1 && timeoutSeconds < 30) { - platform.sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds."); + resp.replyPrefixed("&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds."); } double intervalMillis = arguments.doubleFlag("interval"); @@ -84,7 +84,7 @@ public class SamplerModule implements CommandModule { ThreadDumper threadDumper; if (threads.isEmpty()) { // use the server thread - threadDumper = platform.getDefaultThreadDumper(); + threadDumper = platform.getPlugin().getDefaultThreadDumper(); } else if (threads.contains("*")) { threadDumper = ThreadDumper.ALL; } else { @@ -108,10 +108,9 @@ public class SamplerModule implements CommandModule { int ticksOver = arguments.intFlag("only-ticks-over"); TickCounter tickCounter = null; if (ticksOver != -1) { - try { - tickCounter = platform.newTickCounter(); - } catch (UnsupportedOperationException e) { - platform.sendPrefixedMessage(sender, "&cTick counting is not supported!"); + tickCounter = platform.getTickCounter(); + if (tickCounter == null) { + resp.replyPrefixed("&cTick counting is not supported!"); return; } } @@ -119,11 +118,11 @@ public class SamplerModule implements CommandModule { Sampler sampler; synchronized (this.activeSamplerMutex) { if (this.activeSampler != null) { - platform.sendPrefixedMessage(sender, "&7An active sampler is already running."); + resp.replyPrefixed("&7An active sampler is already running."); return; } - platform.sendPrefixedMessage("&7Initializing a new profiler, please wait..."); + resp.broadcastPrefixed("&7Initializing a new profiler, please wait..."); SamplerBuilder builder = new SamplerBuilder(); builder.threadDumper(threadDumper); @@ -138,11 +137,11 @@ public class SamplerModule implements CommandModule { } sampler = this.activeSampler = builder.start(); - platform.sendPrefixedMessage("&bProfiler now active!"); + resp.broadcastPrefixed("&bProfiler now active!"); if (timeoutSeconds == -1) { - platform.sendPrefixedMessage("&7Use '/" + platform.getLabel() + " stop' to stop profiling and upload the results."); + resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results."); } else { - platform.sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."); + resp.broadcastPrefixed("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."); } } @@ -151,7 +150,7 @@ public class SamplerModule implements CommandModule { // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { if (throwable != null) { - platform.sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString()); + resp.broadcastPrefixed("&cSampling operation failed unexpectedly. Error: " + throwable.toString()); throwable.printStackTrace(); } }); @@ -168,8 +167,8 @@ public class SamplerModule implements CommandModule { // await the result if (timeoutSeconds != -1) { future.thenAcceptAsync(s -> { - platform.sendPrefixedMessage("&7The active sampling operation has completed! Uploading results..."); - handleUpload(platform, s); + resp.broadcastPrefixed("&7The active sampling operation has completed! Uploading results..."); + handleUpload(platform, resp, s); }); } }) @@ -188,21 +187,21 @@ public class SamplerModule implements CommandModule { consumer.accept(Command.builder() .aliases("info") - .executor((platform, sender, arguments) -> { + .executor((platform, sender, resp, arguments) -> { synchronized (this.activeSamplerMutex) { if (this.activeSampler == null) { - platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + resp.replyPrefixed("&7There isn't an active sampling task running."); } else { long timeout = this.activeSampler.getEndTime(); if (timeout == -1) { - platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout."); + resp.replyPrefixed("&7There is an active sampler currently running, with no defined timeout."); } else { long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds."); + resp.replyPrefixed("&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds."); } long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; - platform.sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far."); + resp.replyPrefixed("&7It has been sampling for " + runningTime + " seconds so far."); } } }) @@ -211,14 +210,14 @@ public class SamplerModule implements CommandModule { consumer.accept(Command.builder() .aliases("stop", "upload", "paste") - .executor((platform, sender, arguments) -> { + .executor((platform, sender, resp, arguments) -> { synchronized (this.activeSamplerMutex) { if (this.activeSampler == null) { - platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + resp.replyPrefixed("&7There isn't an active sampling task running."); } else { this.activeSampler.cancel(); - platform.sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results..."); - handleUpload(platform, this.activeSampler); + resp.broadcastPrefixed("&7The active sampling operation has been stopped! Uploading results..."); + handleUpload(platform, resp, this.activeSampler); this.activeSampler = null; } } @@ -228,14 +227,14 @@ public class SamplerModule implements CommandModule { consumer.accept(Command.builder() .aliases("cancel") - .executor((platform, sender, arguments) -> { + .executor((platform, sender, resp, arguments) -> { synchronized (this.activeSamplerMutex) { if (this.activeSampler == null) { - platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + resp.replyPrefixed("&7There isn't an active sampling task running."); } else { this.activeSampler.cancel(); this.activeSampler = null; - platform.sendPrefixedMessage("&bThe active sampling task has been cancelled."); + resp.broadcastPrefixed("&bThe active sampling task has been cancelled."); } } }) @@ -243,15 +242,15 @@ public class SamplerModule implements CommandModule { ); } - private void handleUpload(SparkPlatform platform, Sampler sampler) { - platform.runAsync(() -> { + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler) { + platform.getPlugin().runAsync(() -> { byte[] output = sampler.formCompressedDataPayload(); try { - String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE); - platform.sendPrefixedMessage("&bSampling results:"); - platform.sendLink(SparkPlatform.VIEWER_URL + key); + String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key(); + resp.broadcastPrefixed("&bSampling results:"); + resp.broadcastLink(SparkPlatform.VIEWER_URL + key); } catch (IOException e) { - platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results."); + resp.broadcastPrefixed("&cAn error occurred whilst uploading the results."); e.printStackTrace(); } }); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java index 3ad8909..bea7a07 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java @@ -20,12 +20,12 @@ package me.lucko.spark.common.command.modules; -import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.common.command.CommandResponseHandler; import me.lucko.spark.common.command.tabcomplete.TabCompleter; -import me.lucko.spark.monitor.TickMonitor; -import me.lucko.spark.sampler.TickCounter; +import me.lucko.spark.common.monitor.tick.TickMonitor; +import me.lucko.spark.common.sampler.TickCounter; import java.util.function.Consumer; @@ -37,27 +37,29 @@ public class TickMonitoringModule implements CommandModule { @Override public void registerCommands(Consumer> consumer) { consumer.accept(Command.builder() - .aliases("monitoring") + .aliases("tickmonitoring") .argumentUsage("threshold", "percentage increase") .argumentUsage("without-gc", null) - .executor((platform, sender, arguments) -> { - if (this.activeTickMonitor == null) { + .executor((platform, sender, resp, arguments) -> { + TickCounter tickCounter = platform.getTickCounter(); + if (tickCounter == null) { + resp.replyPrefixed("&cNot supported!"); + return; + } + if (this.activeTickMonitor == null) { int threshold = arguments.intFlag("threshold"); if (threshold == -1) { threshold = 100; } - try { - TickCounter tickCounter = platform.newTickCounter(); - this.activeTickMonitor = new ReportingTickMonitor(platform, tickCounter, threshold, !arguments.boolFlag("without-gc")); - } catch (UnsupportedOperationException e) { - platform.sendPrefixedMessage(sender, "&cNot supported!"); - } + this.activeTickMonitor = new ReportingTickMonitor(resp, tickCounter, threshold, !arguments.boolFlag("without-gc")); + tickCounter.addTickTask(this.activeTickMonitor); } else { + tickCounter.removeTickTask(this.activeTickMonitor); this.activeTickMonitor.close(); this.activeTickMonitor = null; - platform.sendPrefixedMessage("&7Tick monitor disabled."); + resp.broadcastPrefixed("&7Tick monitor disabled."); } }) .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--threshold", "--without-gc")) @@ -66,16 +68,16 @@ public class TickMonitoringModule implements CommandModule { } private class ReportingTickMonitor extends TickMonitor { - private final SparkPlatform platform; + private final CommandResponseHandler resp; - ReportingTickMonitor(SparkPlatform platform, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) { + ReportingTickMonitor(CommandResponseHandler resp, TickCounter tickCounter, int percentageChangeThreshold, boolean monitorGc) { super(tickCounter, percentageChangeThreshold, monitorGc); - this.platform = platform; + this.resp = resp; } @Override protected void sendMessage(String message) { - this.platform.sendPrefixedMessage(message); + this.resp.broadcastPrefixed(message); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java new file mode 100644 index 0000000..6200c17 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java @@ -0,0 +1,77 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.memory; + +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.lang.reflect.Method; +import java.nio.file.Path; + +/** + * Utility for creating .hprof memory heap snapshots. + */ +public final class HeapDump { + + private HeapDump() {} + + /** The object name of the com.sun.management.HotSpotDiagnosticMXBean */ + private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=HotSpotDiagnostic"; + + /** + * Creates a heap dump at the given output path. + * + * @param outputPath the path to write the snapshot to + * @param live if true dump only live objects i.e. objects that are reachable from others + * @throws Exception catch all + */ + public static void dumpHeap(Path outputPath, boolean live) throws Exception { + String outputPathString = outputPath.toAbsolutePath().normalize().