diff options
16 files changed, 801 insertions, 477 deletions
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java index a11eaf4..122ebf1 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java @@ -20,7 +20,7 @@ package me.lucko.spark.bukkit; -import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.sampler.ThreadDumper; import me.lucko.spark.sampler.TickCounter; @@ -32,7 +32,7 @@ import org.bukkit.plugin.java.JavaPlugin; public class SparkBukkitPlugin extends JavaPlugin { - private final CommandHandler<CommandSender> commandHandler = new CommandHandler<CommandSender>() { + private final SparkPlatform<CommandSender> sparkPlatform = new SparkPlatform<CommandSender>() { private String colorize(String message) { return ChatColor.translateAlternateColorCodes('&', message); @@ -48,44 +48,44 @@ public class SparkBukkitPlugin extends JavaPlugin { } @Override - protected String getVersion() { + public String getVersion() { return SparkBukkitPlugin.this.getDescription().getVersion(); } @Override - protected String getLabel() { + public String getLabel() { return "spark"; } @Override - protected void sendMessage(CommandSender sender, String message) { + public void sendMessage(CommandSender sender, String message) { sender.sendMessage(colorize(message)); } @Override - protected void sendMessage(String message) { + public void sendMessage(String message) { String msg = colorize(message); broadcast(msg); } @Override - protected void sendLink(String url) { + public void sendLink(String url) { String msg = colorize("&7" + url); broadcast(msg); } @Override - protected void runAsync(Runnable r) { + public void runAsync(Runnable r) { getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r); } @Override - protected ThreadDumper getDefaultThreadDumper() { + public ThreadDumper getDefaultThreadDumper() { return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); } @Override - protected TickCounter newTickCounter() { + public TickCounter newTickCounter() { return new BukkitTickCounter(SparkBukkitPlugin.this); } }; @@ -97,7 +97,7 @@ public class SparkBukkitPlugin extends JavaPlugin { return true; } - this.commandHandler.handleCommand(sender, args); + this.sparkPlatform.executeCommand(sender, args); return true; } } diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java index d5a4ec4..4d54b42 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java @@ -20,7 +20,7 @@ package me.lucko.spark.bungeecord; -import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.sampler.ThreadDumper; import me.lucko.spark.sampler.TickCounter; @@ -35,7 +35,7 @@ import net.md_5.bungee.api.plugin.Plugin; public class SparkBungeeCordPlugin extends Plugin { - private final CommandHandler<CommandSender> commandHandler = new CommandHandler<CommandSender>() { + private final SparkPlatform<CommandSender> sparkPlatform = new SparkPlatform<CommandSender>() { private BaseComponent[] colorize(String message) { return TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message)); } @@ -50,27 +50,27 @@ public class SparkBungeeCordPlugin extends Plugin { } @Override - protected String getVersion() { + public String getVersion() { return SparkBungeeCordPlugin.this.getDescription().getVersion(); } @Override - protected String getLabel() { + public String getLabel() { return "sparkbungee"; } @Override - protected void sendMessage(CommandSender sender, String message) { + public void sendMessage(CommandSender sender, String message) { sender.sendMessage(colorize(message)); } @Override - protected void sendMessage(String message) { + public void sendMessage(String message) { broadcast(colorize(message)); } @Override - protected void sendLink(String url) { + public void sendLink(String url) { TextComponent component = new TextComponent(url); component.setColor(ChatColor.GRAY); component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url)); @@ -78,17 +78,17 @@ public class SparkBungeeCordPlugin extends Plugin { } @Override - protected void runAsync(Runnable r) { + public void runAsync(Runnable r) { getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r); } @Override - protected ThreadDumper getDefaultThreadDumper() { + public ThreadDumper getDefaultThreadDumper() { return ThreadDumper.ALL; } @Override - protected TickCounter newTickCounter() { + public TickCounter newTickCounter() { throw new UnsupportedOperationException(); } }; @@ -105,7 +105,7 @@ public class SparkBungeeCordPlugin extends Plugin { return; } - SparkBungeeCordPlugin.this.commandHandler.handleCommand(sender, args); + SparkBungeeCordPlugin.this.sparkPlatform.executeCommand(sender, args); } }); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java b/spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java deleted file mode 100644 index 701dec1..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java +++ /dev/null @@ -1,418 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) <luck@lucko.me> - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package me.lucko.spark.common; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.SetMultimap; -import com.google.common.collect.Sets; - -import me.lucko.spark.common.http.Bytebin; -import me.lucko.spark.memory.HeapDump; -import me.lucko.spark.monitor.TickMonitor; -import me.lucko.spark.sampler.Sampler; -import me.lucko.spark.sampler.SamplerBuilder; -import me.lucko.spark.sampler.ThreadDumper; -import me.lucko.spark.sampler.ThreadGrouper; -import me.lucko.spark.sampler.TickCounter; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Abstract command handling class used by all platforms. - * - * @param <T> the sender (e.g. CommandSender) type used by the platform - */ -public abstract class CommandHandler<T> { - - /** The URL of the viewer frontend */ - private static final String VIEWER_URL = "https://sparkprofiler.github.io/?"; - /** The prefix used in all messages */ - private static final String PREFIX = "&8[&fspark&8] &7"; - - /** Guards {@link #activeSampler} */ - private final Object[] activeSamplerMutex = new Object[0]; - /** The WarmRoast instance currently running, if any */ - private Sampler activeSampler = null; - /** The tick monitor instance currently running, if any */ - private ReportingTickMonitor activeTickMonitor = null; - - - // abstract methods implemented by each platform - - protected abstract String getVersion(); - protected abstract String getLabel(); - protected abstract void sendMessage(T sender, String message); - protected abstract void sendMessage(String message); - protected abstract void sendLink(String url); - protected abstract void runAsync(Runnable r); - protected abstract ThreadDumper getDefaultThreadDumper(); - protected abstract TickCounter newTickCounter(); - - private void sendPrefixedMessage(T sender, String message) { - sendMessage(sender, PREFIX + message); - } - - private void sendPrefixedMessage(String message) { - sendMessage(PREFIX + message); - } - - public void handleCommand(T sender, String[] args) { - try { - if (args.length == 0) { - sendInfo(sender); - return; - } - - List<String> arguments = new ArrayList<>(Arrays.asList(args)); - switch (arguments.remove(0).toLowerCase()) { - case "start": - handleStart(sender, arguments); - break; - case "info": - handleInfo(sender); - break; - case "cancel": - handleCancel(sender); - break; - case "stop": - case "upload": - case "paste": - handleStop(sender); - break; - case "monitoring": - handleMonitoring(sender, arguments); - break; - case "heap": - case "memory": - handleHeap(sender); - break; - default: - sendInfo(sender); - break; - } - } catch (IllegalArgumentException e) { - sendMessage(sender, "&c" + e.getMessage()); - } - } - - private void sendInfo(T sender) { - sendPrefixedMessage(sender, "&fspark profiler &7v" + getVersion()); - sendMessage(sender, "&b&l> &7/" + getLabel() + " start"); - sendMessage(sender, " &8[&7--timeout&8 <timeout seconds>]"); - sendMessage(sender, " &8[&7--thread&8 <thread name>]"); - sendMessage(sender, " &8[&7--not-combined]"); - sendMessage(sender, " &8[&7--interval&8 <interval millis>]"); - sendMessage(sender, " &8[&7--only-ticks-over&8 <tick length millis>]"); - sendMessage(sender, "&b&l> &7/" + getLabel() + " info"); - sendMessage(sender, "&b&l> &7/" + getLabel() + " stop"); - sendMessage(sender, "&b&l> &7/" + getLabel() + " cancel"); - sendMessage(sender, "&b&l> &7/" + getLabel() + " monitoring"); - sendMessage(sender, " &8[&7--threshold&8 <percentage increase>]"); - } - - private void handleStart(T sender, List<String> args) { - SetMultimap<String, String> arguments = parseArguments(args); - - int timeoutSeconds = parseInt(arguments, "timeout", "d"); - if (timeoutSeconds != -1 && timeoutSeconds <= 10) { - sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10."); - return; - } - - if (timeoutSeconds != -1 && timeoutSeconds < 30) { - sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds."); - } - - int intervalMillis = parseInt(arguments, "interval", "i"); - if (intervalMillis <= 0) { - intervalMillis = 4; - } - - Set<String> threads = Sets.union(arguments.get("thread"), arguments.get("t")); - ThreadDumper threadDumper; - if (threads.isEmpty()) { - // use the server thread - threadDumper = getDefaultThreadDumper(); - } else if (threads.contains("*")) { - threadDumper = ThreadDumper.ALL; - } else { - threadDumper = new ThreadDumper.Specific(threads); - } - - ThreadGrouper threadGrouper; - if (arguments.containsKey("not-combined")) { - threadGrouper = ThreadGrouper.BY_NAME; - } else { - threadGrouper = ThreadGrouper.BY_POOL; - } - - int ticksOver = parseInt(arguments, "only-ticks-over", "o"); - TickCounter tickCounter = null; - if (ticksOver != -1) { - try { - tickCounter = newTickCounter(); - } catch (UnsupportedOperationException e) { - sendPrefixedMessage(sender, "&cTick counting is not supported!"); - return; - } - } - - Sampler sampler; - synchronized (this.activeSamplerMutex) { - if (this.activeSampler != null) { - sendPrefixedMessage(sender, "&7An active sampler is already running."); - return; - } - - sendPrefixedMessage("&7Initializing a new profiler, please wait..."); - - SamplerBuilder builder = new SamplerBuilder(); - builder.threadDumper(threadDumper); - builder.threadGrouper(threadGrouper); - if (timeoutSeconds != -1) { - builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); - } - builder.samplingInterval(intervalMillis); - if (ticksOver != -1) { - builder.ticksOver(ticksOver, tickCounter); - } - sampler = this.activeSampler = builder.start(); - - sendPrefixedMessage("&bProfiler now active!"); - if (timeoutSeconds == -1) { - sendPrefixedMessage("&7Use '/" + getLabel() + " stop' to stop profiling and upload the results."); - } else { - sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."); - } - } - - CompletableFuture<Sampler> future = sampler.getFuture(); - - // send message if profiling fails - future.whenCompleteAsync((s, throwable) -> { - if (throwable != null) { - sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString()); - throwable.printStackTrace(); - } - }); - - // set activeSampler to null when complete. - future.whenCompleteAsync((s, throwable) -> { - synchronized (this.activeSamplerMutex) { - if (sampler == this.activeSampler) { - this.activeSampler = null; - } - } - }); - - // await the result - if (timeoutSeconds != -1) { - future.thenAcceptAsync(s -> { - sendPrefixedMessage("&7The active sampling operation has completed! Uploading results..."); - handleUpload(s); - }); - } - } - - private void handleInfo(T sender) { - synchronized (this.activeSamplerMutex) { - if (this.activeSampler == null) { - sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); - } else { - long timeout = this.activeSampler.getEndTime(); - if (timeout == -1) { - sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout."); - } else { - long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds."); - } - - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; - sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far."); - } - } - } - - private void handleStop(T sender) { - synchronized (this.activeSamplerMutex) { - if (this.activeSampler == null) { - sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); - } else { - this.activeSampler.cancel(); - sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results..."); - handleUpload(this.activeSampler); - this.activeSampler = null; - } - } - } - - private void handleCancel(T sender) { - synchronized (this.activeSamplerMutex) { - if (this.activeSampler == null) { - sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); - } else { - this.activeSampler.cancel(); - this.activeSampler = null; - sendPrefixedMessage("&bThe active sampling task has been cancelled."); - } - } - } - - private void handleUpload(Sampler sampler) { - runAsync(() -> { - byte[] output = sampler.formCompressedDataPayload(); - try { - String pasteId = Bytebin.postCompressedContent(output); - sendPrefixedMessage("&bSampling results:"); - sendLink(VIEWER_URL + pasteId); - } catch (IOException e) { - sendPrefixedMessage("&cAn error occurred whilst uploading the results."); - e.printStackTrace(); - } - }); - } - - private void handleMonitoring(T sender, List<String> args) { - SetMultimap<String, String> arguments = parseArguments(args); - - if (this.activeTickMonitor == null) { - - int threshold = parseInt(arguments, "threshold", "t"); - if (threshold == -1) { - threshold = 100; - } - - try { - TickCounter tickCounter = newTickCounter(); - this.activeTickMonitor = new ReportingTickMonitor(tickCounter, threshold); - } catch (UnsupportedOperationException e) { - sendPrefixedMessage(sender, "&cNot supported!"); - } - } else { - this.activeTickMonitor.close(); - this.activeTickMonitor = null; - sendPrefixedMessage("&7Tick monitor disabled."); - } - } - - private void handleHeap(T sender) { - runAsync(() -> { - sendPrefixedMessage("&7Creating a new heap dump, please wait..."); - - HeapDump heapDump; - try { - heapDump = HeapDump.createNew(); - } catch (Exception e) { - sendPrefixedMessage("&cAn error occurred whilst inspecting the heap."); - e.printStackTrace(); - return; - } - - byte[] output = heapDump.formCompressedDataPayload(); - try { - String pasteId = Bytebin.postCompressedContent(output); - sendPrefixedMessage("&bHeap dump output:"); - sendLink(VIEWER_URL + pasteId); - } catch (IOException e) { - sendPrefixedMessage("&cAn error occurred whilst uploading the data."); - e.printStackTrace(); - } - }); - } - - private class ReportingTickMonitor extends TickMonitor { - ReportingTickMonitor(TickCounter tickCounter, int percentageChangeThreshold) { - super(tickCounter, percentageChangeThreshold); - } - - @Override - protected void sendMessage(String message) { - sendPrefixedMessage(message); - } - } - - private int parseInt(SetMultimap<String, String> arguments, String longArg, String shortArg) { - Iterator<String> it = Sets.union(arguments.get(longArg), arguments.get(shortArg)).iterator(); - if (it.hasNext()) { - try { - return Math.abs(Integer.parseInt(it.next())); - } catch (NumberFormatException e) { - throw new IllegalArgumentException("Invalid input for '" + longArg + "' argument. Please specify a number!"); - } - } - return -1; // undefined - } - - private static final Pattern FLAG_REGEX = Pattern.compile("--(.+)$|-([a-zA-z])$"); - - private static SetMultimap<String, String> parseArguments(List<String> args) { - SetMultimap<String, String> arguments = HashMultimap.create(); - - String flag = null; - List<String> value = null; - - for (int i = 0; i < args.size(); i++) { - String arg = args.get(i); - - Matcher matcher = FLAG_REGEX.matcher(arg); - boolean matches = matcher.matches(); - - if (flag == null || matches) { - if (!matches) { - throw new IllegalArgumentException("Expected flag at position " + i + " but got '" + arg + "' instead!"); - } - - String match = matcher.group(1); - if (match == null) { - match = matcher.group(2); - } - - // store existing value, if present - if (flag != null) { - arguments.put(flag, String.join(" ", value)); - } - - flag = match.toLowerCase(); - value = new ArrayList<>(); - } else { - // part of a value - value.add(arg); - } - } - - // store remaining value, if present - if (flag != null) { - arguments.put(flag, String.join(" ", value)); - } - - return arguments; - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java new file mode 100644 index 0000000..57c205f --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -0,0 +1,115 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common; + +import com.google.common.collect.ImmutableList; + +import me.lucko.spark.common.command.Arguments; +import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.modules.HeapModule; +import me.lucko.spark.common.command.modules.MonitoringModule; +import me.lucko.spark.common.command.modules.SamplerModule; +import me.lucko.spark.sampler.ThreadDumper; +import me.lucko.spark.sampler.TickCounter; + +import java.util.List; + +/** + * Abstract command handling class used by all platforms. + * + * @param <S> the sender (e.g. CommandSender) type used by the platform + */ +public abstract class SparkPlatform<S> { + + /** The URL of the viewer frontend */ + public static final String VIEWER_URL = "https://sparkprofiler.github.io/?"; + /** The prefix used in all messages */ + private static final String PREFIX = "&8[&fspark&8] &7"; + + private static <T> List<Command<T>> prepareCommands() { + ImmutableList.Builder<Command<T>> builder = ImmutableList.builder(); + new SamplerModule<T>().registerCommands(builder::add); + new MonitoringModule<T>().registerCommands(builder::add); + new HeapModule<T>().registerCommands(builder::add); + return builder.build(); + } + + private final List<Command<S>> commands = prepareCommands(); + + // abstract methods implemented by each platform + + public abstract String getVersion(); + public abstract String getLabel(); + public abstract void sendMessage(S sender, String message); + public abstract void sendMessage(String message); + public abstract void sendLink(String url); + public abstract void runAsync(Runnable r); + public abstract ThreadDumper getDefaultThreadDumper(); + public abstract TickCounter newTickCounter(); + + public void sendPrefixedMessage(S sender, String message) { + sendMessage(sender, PREFIX + message); + } + + public void sendPrefixedMessage(String message) { + sendMessage(PREFIX + message); + } + + public void executeCommand(S sender, String[] args) { + if (args.length == 0) { + sendInfo(sender); + return; + } + + Arguments arguments = new Arguments(args); + String alias = arguments.raw().remove(0).toLowerCase(); + + for (Command<S> command : this.commands) { + if (command.aliases().contains(alias)) { + try { + command.executor().execute(this, sender, arguments); + } catch (IllegalArgumentException e) { + sendMessage(sender, "&c" + e.getMessage()); + } + return; + } + } + + sendInfo(sender); + } + + private void sendInfo(S sender) { + // todo automagically generate this + sendPrefixedMessage(sender, "&fspark &7v" + getVersion()); + sendMessage(sender, "&b&l> &7/spark start"); + sendMessage(sender, " &8[&7--timeout&8 <timeout seconds>]"); + sendMessage(sender, " &8[&7--thread&8 <thread name>]"); + sendMessage(sender, " &8[&7--not-combined]"); + sendMessage(sender, " &8[&7--interval&8 <interval millis>]"); + sendMessage(sender, " &8[&7--only-ticks-over&8 <tick length millis>]"); + sendMessage(sender, "&b&l> &7/spark info"); + sendMessage(sender, "&b&l> &7/spark stop"); + sendMessage(sender, "&b&l> &7/spark cancel"); + sendMessage(sender, "&b&l> &7/spark monitoring"); + sendMessage(sender, " &8[&7--threshold&8 <percentage increase>]"); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java new file mode 100644 index 0000000..4189174 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java @@ -0,0 +1,100 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command; + +import com.google.common.collect.HashMultimap; +import com.google.common.collect.SetMultimap; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class Arguments { + private static final Pattern FLAG_REGEX = Pattern.compile("^--(.+)$"); + + private final List<String> rawArgs; + private final SetMultimap<String, String> parsedArgs; + + public Arguments(String[] args) { + this.rawArgs = new ArrayList<>(Arrays.asList(args)); + this.parsedArgs = HashMultimap.create(); + + String flag = null; + List<String> value = null; + + for (int i = 0; i < this.rawArgs.size(); i++) { + String arg = this.rawArgs.get(i); + + Matcher matcher = FLAG_REGEX.matcher(arg); + boolean matches = matcher.matches(); + + if (flag == null || matches) { + if (!matches) { + throw new IllegalArgumentException("Expected flag at position " + i + " but got '" + arg + "' instead!"); + } + + // store existing value, if present + if (flag != null) { + this.parsedArgs.put(flag, String.join(" ", value)); + } + + flag = matcher.group(1).toLowerCase(); + value = new ArrayList<>(); + } else { + // part of a value + value.add(arg); + } + } + + // store remaining value, if present + if (flag != null) { + this.parsedArgs.put(flag, String.join(" ", value)); + } + } + + public List<String> raw() { + return this.rawArgs; + } + + public int intFlag(String key) { + Iterator<String> it = this.parsedArgs.get(key).iterator(); + if (it.hasNext()) { + try { + return Math.abs(Integer.parseInt(it.next())); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid input for '" + key + "' argument. Please specify a number!"); + } + } + return -1; // undefined + } + + public Set<String> stringFlag(String key) { + return this.parsedArgs.get(key); + } + + public boolean boolFlag(String key) { + return this.parsedArgs.containsKey(key); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java new file mode 100644 index 0000000..70dc7e8 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java @@ -0,0 +1,113 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command; + +import com.google.common.collect.ImmutableSet; + +import me.lucko.spark.common.SparkPlatform; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +public class Command<S> { + + public static <S> Builder<S> builder() { + return new Builder<>(); + } + + private final Set<String> aliases; + private final Executor<S> executor; + private final TabCompleter<S> tabCompleter; + + private Command(Set<String> aliases, Executor<S> executor, TabCompleter<S> tabCompleter) { + this.aliases = aliases; + this.executor = executor; + this.tabCompleter = tabCompleter; + } + + public Set<String> aliases() { + return this.aliases; + } + + public Executor<S> executor() { + return this.executor; + } + + public TabCompleter<S> tabCompleter() { + return this.tabCompleter; + } + + public static final class Builder<S> { + private ImmutableSet.Builder<String> aliases = ImmutableSet.builder(); + private Executor<S> executor = null; + private TabCompleter<S> tabCompleter = null; + + Builder() { + + } + + public Builder<S> aliases(String... aliases) { + this.aliases.add(aliases); + return this; + } + + public Builder<S> executor(Executor<S> executor) { + this.executor = Objects.requireNonNull(executor, "executor"); + return this; + } + + public Builder<S> tabCompleter(TabCompleter<S> tabCompleter) { + this.tabCompleter = Objects.requireNonNull(tabCompleter, "tabCompleter"); + return this; + } + + public Command<S> build() { + Set<String> aliases = this.aliases.build(); + if (aliases.isEmpty()) { + throw new IllegalStateException("No aliases defined"); + } + if (this.executor == null) { + throw new IllegalStateException("No defined executor"); + } + if (this.tabCompleter == null) { + this.tabCompleter = TabCompleter.empty(); + } + return new Command<>(aliases, this.executor, this.tabCompleter); + } + } + + @FunctionalInterface + public interface Executor<S> { + void execute(SparkPlatform<S> platform, S sender, Arguments arguments); + } + + @FunctionalInterface + public interface TabCompleter<S> { + static <S> TabCompleter<S> empty() { + return (platform, sender, arguments) -> Collections.emptyList(); + } + + List<String> completions(SparkPlatform<S> platform, S sender, List<String> arguments); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java new file mode 100644 index 0000000..f195ef2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandModule.java @@ -0,0 +1,29 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command; + +import java.util.function.Consumer; + +public interface CommandModule<S> { + + void registerCommands(Consumer<Command<S>> consumer); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java new file mode 100644 index 0000000..e586971 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command.modules; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.common.http.Bytebin; +import me.lucko.spark.memory.HeapDump; + +import java.io.IOException; +import java.util.function.Consumer; + +public class HeapModule<S> implements CommandModule<S> { + + @Override + public void registerCommands(Consumer<Command<S>> consumer) { + consumer.accept(Command.<S>builder() + .aliases("heap", "memory") + .executor((platform, sender, arguments) -> { + platform.runAsync(() -> { + platform.sendPrefixedMessage("&7Creating a new heap dump, please wait..."); + + HeapDump heapDump; + try { + heapDump = HeapDump.createNew(); + } catch (Exception e) { + platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap."); + e.printStackTrace(); + return; + } + + byte[] output = heapDump.formCompressedDataPayload(); + try { + String pasteId = Bytebin.postCompressedContent(output); + platform.sendPrefixedMessage("&bHeap dump output:"); + platform.sendLink(SparkPlatform.VIEWER_URL + pasteId); + } catch (IOException e) { + platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data."); + e.printStackTrace(); + } + }); + }) + .tabCompleter((platform, sender, arguments) -> { + return null; + }) + .build() + ); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java new file mode 100644 index 0000000..eafc567 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java @@ -0,0 +1,80 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command.modules; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.monitor.TickMonitor; +import me.lucko.spark.sampler.TickCounter; + +import java.util.function.Consumer; + +public class MonitoringModule<S> implements CommandModule<S> { + + /** The tick monitor instance currently running, if any */ + private ReportingTickMonitor activeTickMonitor = null; + + @Override + public void registerCommands(Consumer<Command<S>> consumer) { + consumer.accept(Command.<S>builder() + .aliases("monitoring") + .executor((platform, sender, arguments) -> { + if (this.activeTickMonitor == null) { + + int threshold = arguments.intFlag("threshold"); + if (threshold == -1) { + threshold = 100; + } + + try { + TickCounter tickCounter = platform.newTickCounter(); + this.activeTickMonitor = new ReportingTickMonitor(platform, tickCounter, threshold); + } catch (UnsupportedOperationException e) { + platform.sendPrefixedMessage(sender, "&cNot supported!"); + } + } else { + this.activeTickMonitor.close(); + this.activeTickMonitor = null; + platform.sendPrefixedMessage("&7Tick monitor disabled."); + } + }) + .tabCompleter((platform, sender, arguments) -> { + return null; + }) + .build() + ); + } + + private class ReportingTickMonitor extends TickMonitor { + private final SparkPlatform<S> platform; + + ReportingTickMonitor(SparkPlatform<S> platform, TickCounter tickCounter, int percentageChangeThreshold) { + super(tickCounter, percentageChangeThreshold); + this.platform = platform; + } + + @Override + protected void sendMessage(String message) { + platform.sendPrefixedMessage(message); + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java new file mode 100644 index 0000000..853aa5d --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -0,0 +1,236 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command.modules; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.common.http.Bytebin; +import me.lucko.spark.sampler.Sampler; +import me.lucko.spark.sampler.SamplerBuilder; +import me.lucko.spark.sampler.ThreadDumper; +import me.lucko.spark.sampler.ThreadGrouper; +import me.lucko.spark.sampler.TickCounter; + +import java.io.IOException; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +public class SamplerModule<S> implements CommandModule<S> { + + /** Guards {@link #activeSampler} */ + private final Object[] activeSamplerMutex = new Object[0]; + /** The WarmRoast instance currently running, if any */ + private Sampler activeSampler = null; + + @Override + public void registerCommands(Consumer<Command<S>> consumer) { + consumer.accept(Command.<S>builder() + .aliases("start") + .executor((platform, sender, arguments) -> { + int timeoutSeconds = arguments.intFlag("timeout"); + if (timeoutSeconds != -1 && timeoutSeconds <= 10) { + platform.sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10."); + return; + } + + if (timeoutSeconds != -1 && timeoutSeconds < 30) { + platform.sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds."); + } + + int intervalMillis = arguments.intFlag("interval"); + if (intervalMillis <= 0) { + intervalMillis = 4; + } + + Set<String> threads = arguments.stringFlag("thread"); + ThreadDumper threadDumper; + if (threads.isEmpty()) { + // use the server thread + threadDumper = platform.getDefaultThreadDumper(); + } else if (threads.contains("*")) { + threadDumper = ThreadDumper.ALL; + } else { + threadDumper = new ThreadDumper.Specific(threads); + } + + ThreadGrouper threadGrouper; + if (arguments.boolFlag("not-combined")) { + threadGrouper = ThreadGrouper.BY_NAME; + } else { + threadGrouper = ThreadGrouper.BY_POOL; + } + + int ticksOver = arguments.intFlag("only-ticks-over"); + TickCounter tickCounter = null; + if (ticksOver != -1) { + try { + tickCounter = platform.newTickCounter(); + } catch (UnsupportedOperationException e) { + platform.sendPrefixedMessage(sender, "&cTick counting is not supported!"); + return; + } + } + + Sampler sampler; + synchronized (this.activeSamplerMutex) { + if (this.activeSampler != null) { + platform.sendPrefixedMessage(sender, "&7An active sampler is already running."); + return; + } + + platform.sendPrefixedMessage("&7Initializing a new profiler, please wait..."); + + SamplerBuilder builder = new SamplerBuilder(); + builder.threadDumper(threadDumper); + builder.threadGrouper(threadGrouper); + if (timeoutSeconds != -1) { + builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); + } + builder.samplingInterval(intervalMillis); + if (ticksOver != -1) { + builder.ticksOver(ticksOver, tickCounter); + } + sampler = this.activeSampler = builder.start(); + + platform.sendPrefixedMessage("&bProfiler now active!"); + if (timeoutSeconds == -1) { + platform.sendPrefixedMessage("&7Use '/" + platform.getLabel() + " stop' to stop profiling and upload the results."); + } else { + platform.sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."); + } + } + + CompletableFuture<Sampler> future = sampler.getFuture(); + + // send message if profiling fails + future.whenCompleteAsync((s, throwable) -> { + if (throwable != null) { + platform.sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString()); + throwable.printStackTrace(); + } + }); + + // set activeSampler to null when complete. + future.whenCompleteAsync((s, throwable) -> { + synchronized (this.activeSamplerMutex) { + if (sampler == this.activeSampler) { + this.activeSampler = null; + } + } + }); + + // await the result + if (timeoutSeconds != -1) { + future.thenAcceptAsync(s -> { + platform.sendPrefixedMessage("&7The active sampling operation has completed! Uploading results..."); + handleUpload(platform, s); + }); + } + }) + .tabCompleter((platform, sender, arguments) -> { + return null; + }) + .build() + ); + + consumer.accept(Command.<S>builder() + .aliases("info") + .executor((platform, sender, arguments) -> { + synchronized (this.activeSamplerMutex) { + if (this.activeSampler == null) { + platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + } else { + long timeout = this.activeSampler.getEndTime(); + if (timeout == -1) { + platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout."); + } else { + long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; + platform.sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds."); + } + + long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + platform.sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far."); + } + } + }) + .tabCompleter((platform, sender, arguments) -> { + return null; + }) + .build() + ); + + consumer.accept(Command.<S>builder() + .aliases("stop", "upload", "paste") + .executor((platform, sender, arguments) -> { + synchronized (this.activeSamplerMutex) { + if (this.activeSampler == null) { + platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + } else { + this.activeSampler.cancel(); + platform.sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results..."); + handleUpload(platform, this.activeSampler); + this.activeSampler = null; + } + } + }) + .tabCompleter((platform, sender, arguments) -> { + return null; + }) + .build() + ); + + consumer.accept(Command.<S>builder() + .aliases("cancel") + .executor((platform, sender, arguments) -> { + synchronized (this.activeSamplerMutex) { + if (this.activeSampler == null) { + platform.sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + } else { + this.activeSampler.cancel(); + this.activeSampler = null; + platform.sendPrefixedMessage("&bThe active sampling task has been cancelled."); + } + } + }) + .tabCompleter((platform, sender, arguments) -> { + return null; + }) + .build() + ); + } + + private void handleUpload(SparkPlatform<S> platform, Sampler sampler) { + platform.runAsync(() -> { + byte[] output = sampler.formCompressedDataPayload(); + try { + String pasteId = Bytebin.postCompressedContent(output); + platform.sendPrefixedMessage("&bSampling results:"); + platform.sendLink(SparkPlatform.VIEWER_URL + pasteId); + } catch (IOException e) { + platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results."); + e.printStackTrace(); + } + }); + } +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientCommandHandler.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java index 7706b42..d073497 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientCommandHandler.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java @@ -32,10 +32,10 @@ import net.minecraftforge.fml.common.gameevent.TickEvent; import java.util.Collections; import java.util.List; -public class ForgeClientCommandHandler extends ForgeCommandHandler { +public class ForgeClientSparkPlatform extends ForgeSparkPlatform { public static void register() { - ClientCommandHandler.instance.registerCommand(new ForgeClientCommandHandler()); + ClientCommandHandler.instance.registerCommand(new ForgeClientSparkPlatform()); } @Override @@ -44,7 +44,7 @@ public class ForgeClientCommandHandler extends ForgeCommandHandler { } @Override - protected TickCounter newTickCounter() { + public TickCounter newTickCounter() { return new ForgeTickCounter(TickEvent.Type.CLIENT); } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerCommandHandler.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java index 7f119e0..6b64b95 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerCommandHandler.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java @@ -32,7 +32,7 @@ import net.minecraftforge.fml.common.gameevent.TickEvent; import java.util.Collections; import java.util.List; -public class ForgeServerCommandHandler extends ForgeCommandHandler { +public class ForgeServerSparkPlatform extends ForgeSparkPlatform { @Override protected void broadcast(ITextComponent msg) { @@ -47,7 +47,7 @@ public class ForgeServerCommandHandler extends ForgeCommandHandler { } @Override - protected TickCounter newTickCounter() { + public TickCounter newTickCounter() { return new ForgeTickCounter(TickEvent.Type.SERVER); } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandHandler.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java index baf670d..542c782 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandHandler.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java @@ -22,7 +22,7 @@ package me.lucko.spark.forge; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.sampler.ThreadDumper; import net.kyori.text.TextComponent; @@ -47,7 +47,7 @@ import java.util.concurrent.Executors; import javax.annotation.Nullable; @SuppressWarnings("NullableProblems") -public abstract class ForgeCommandHandler extends CommandHandler<ICommandSender> implements ICommand { +public abstract class ForgeSparkPlatform extends SparkPlatform<ICommandSender> implements ICommand { private final ExecutorService worker = Executors.newSingleThreadExecutor( new ThreadFactoryBuilder().setNameFormat("spark-forge-async-worker").build() @@ -67,18 +67,18 @@ public abstract class ForgeCommandHandler extends CommandHandler<ICommandSender> protected abstract void broadcast(ITextComponent msg); @Override - protected void sendMessage(ICommandSender sender, String message) { + public void sendMessage(ICommandSender sender, String message) { sender.sendMessage(colorize(message)); } @Override - protected void sendMessage(String message) { + public void sendMessage(String message) { ITextComponent msg = colorize(message); broadcast(msg); } @Override - protected void sendLink(String url) { + public void sendLink(String url) { TextComponentString msg = new TextComponentString(url); Style style = msg.getStyle(); style.setColor(TextFormatting.GRAY); @@ -89,12 +89,12 @@ public abstract class ForgeCommandHandler extends CommandHandler<ICommandSender> } @Override - protected void runAsync(Runnable r) { + public void runAsync(Runnable r) { worker.execute(r); } @Override - protected ThreadDumper getDefaultThreadDumper() { + public ThreadDumper getDefaultThreadDumper() { return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); } @@ -117,7 +117,7 @@ public abstract class ForgeCommandHandler extends CommandHandler<ICommandSender> return; } - handleCommand(sender, args); + executeCommand(sender, args); } @Override diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java b/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java index fb62718..133ec09 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java @@ -38,13 +38,13 @@ public class SparkForgeMod { @EventHandler public void init(FMLInitializationEvent e) { if (FMLCommonHandler.instance().getSide() == Side.CLIENT) { - ForgeClientCommandHandler.register(); + ForgeClientSparkPlatform.register(); } } @EventHandler public void serverInit(FMLServerStartingEvent e) { - e.registerServerCommand(new ForgeServerCommandHandler()); + e.registerServerCommand(new ForgeServerSparkPlatform()); } } diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java index 7dbd3c7..a9bc99f 100644 --- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java @@ -22,7 +22,7 @@ package me.lucko.spark.sponge; import com.google.inject.Inject; -import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.sampler.ThreadDumper; import me.lucko.spark.sampler.TickCounter; @@ -61,7 +61,7 @@ import javax.annotation.Nullable; ) public class SparkSpongePlugin implements CommandCallable { - private final CommandHandler<CommandSource> commandHandler = new CommandHandler<CommandSource>() { + private final SparkPlatform<CommandSource> sparkPlatform = new SparkPlatform<CommandSource>() { private Text colorize(String message) { return TextSerializers.FORMATTING_CODE.deserialize(message); } @@ -76,28 +76,28 @@ public class SparkSpongePlugin implements CommandCallable { } @Override - protected String getVersion() { + public String getVersion() { return SparkSpongePlugin.class.getAnnotation(Plugin.class).version(); } @Override - protected String getLabel() { + public String getLabel() { return "spark"; } @Override - protected void sendMessage(CommandSource sender, String message) { + public void sendMessage(CommandSource sender, String message) { sender.sendMessage(colorize(message)); } @Override - protected void sendMessage(String message) { + public void sendMessage(String message) { Text msg = colorize(message); broadcast(msg); } @Override - protected void sendLink(String url) { + public void sendLink(String url) { try { Text msg = Text.builder(url) .color(TextColors.GRAY) @@ -110,17 +110,17 @@ public class SparkSpongePlugin implements CommandCallable { } @Override - protected void runAsync(Runnable r) { + public void runAsync(Runnable r) { asyncExecutor.execute(r); } @Override - protected ThreadDumper getDefaultThreadDumper() { + public ThreadDumper getDefaultThreadDumper() { return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); } @Override - protected TickCounter newTickCounter() { + public TickCounter newTickCounter() { return new SpongeTickCounter(SparkSpongePlugin.this); } }; @@ -144,7 +144,7 @@ public class SparkSpongePlugin implements CommandCallable { return CommandResult.empty(); } - commandHandler.handleCommand(source, arguments.split(" ")); + sparkPlatform.executeCommand(source, arguments.split(" ")); return CommandResult.empty(); } diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java index 3d849c6..4cec138 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java @@ -29,7 +29,7 @@ import com.velocitypowered.api.plugin.Plugin; import com.velocitypowered.api.proxy.Player; import com.velocitypowered.api.proxy.ProxyServer; -import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.sampler.ThreadDumper; import me.lucko.spark.sampler.TickCounter; @@ -50,7 +50,7 @@ public class SparkVelocityPlugin { private final ProxyServer proxy; - private final CommandHandler<CommandSource> commandHandler = new CommandHandler<CommandSource>() { + private final SparkPlatform<CommandSource> sparkPlatform = new SparkPlatform<CommandSource>() { @SuppressWarnings("deprecation") private TextComponent colorize(String message) { return ComponentSerializers.LEGACY.deserialize(message, '&'); @@ -66,27 +66,27 @@ public class SparkVelocityPlugin { } @Override - protected String getVersion() { + public String getVersion() { return SparkVelocityPlugin.class.getAnnotation(Plugin.class).version(); } @Override - protected String getLabel() { + public String getLabel() { return "sparkvelocity"; } @Override - protected void sendMessage(CommandSource sender, String message) { + public void sendMessage(CommandSource sender, String message) { sender.sendMessage(colorize(message)); } @Override - protected void sendMessage(String message) { + public void sendMessage(String message) { broadcast(colorize(message)); } @Override - protected void sendLink(String url) { + public void sendLink(String url) { TextComponent msg = TextComponent.builder(url) .color(TextColor.GRAY) .clickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url)) @@ -95,17 +95,17 @@ public class SparkVelocityPlugin { } @Override - protected void runAsync(Runnable r) { + public void runAsync(Runnable r) { SparkVelocityPlugin.this.proxy.getScheduler().buildTask(SparkVelocityPlugin.this, r).schedule(); } @Override - protected ThreadDumper getDefaultThreadDumper() { + public ThreadDumper getDefaultThreadDumper() { return ThreadDumper.ALL; } @Override - protected TickCounter newTickCounter() { + public TickCounter newTickCounter() { throw new UnsupportedOperationException(); } }; @@ -124,7 +124,7 @@ public class SparkVelocityPlugin { return; } - SparkVelocityPlugin.this.commandHandler.handleCommand(sender, args); + SparkVelocityPlugin.this.sparkPlatform.executeCommand(sender, args); }, "sparkvelocity", "vprofiler"); } } |