aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.gradle4
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java165
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java18
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java182
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java (renamed from spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java (renamed from spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java (renamed from spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java10
13 files changed, 387 insertions, 72 deletions
diff --git a/build.gradle b/build.gradle
index f262c3f..c5130a1 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,6 +1,6 @@
allprojects {
group = 'me.lucko'
- version = '1.1-SNAPSHOT'
+ version = '1.3-SNAPSHOT'
}
subprojects {
@@ -8,7 +8,7 @@ subprojects {
apply plugin: 'maven'
ext {
- pluginVersion = '1.2.0'
+ pluginVersion = '1.3.0'
pluginDescription = 'spark is a performance profiling plugin based on sk89q\'s WarmRoast profiler'
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
new file mode 100644
index 0000000..08d65df
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
@@ -0,0 +1,165 @@
+/*
+ * This file is part of helper, licensed under the MIT License.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package me.lucko.spark.bukkit;
+
+import com.google.common.base.Preconditions;
+import org.bukkit.Bukkit;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandExecutor;
+import org.bukkit.command.CommandMap;
+import org.bukkit.command.PluginCommand;
+import org.bukkit.command.SimpleCommandMap;
+import org.bukkit.command.TabCompleter;
+import org.bukkit.plugin.Plugin;
+import org.bukkit.plugin.SimplePluginManager;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.util.Iterator;
+import java.util.Map;
+
+/**
+ * Utility for interacting with the server's {@link CommandMap} instance.
+ */
+public final class CommandMapUtil {
+
+ private static final Constructor<PluginCommand> COMMAND_CONSTRUCTOR;
+ private static final Field COMMAND_MAP_FIELD;
+ private static final Field KNOWN_COMMANDS_FIELD;
+
+ static {
+ Constructor<PluginCommand> commandConstructor;
+ try {
+ commandConstructor = PluginCommand.class.getDeclaredConstructor(String.class, Plugin.class);
+ commandConstructor.setAccessible(true);
+ } catch (NoSuchMethodException e) {
+ throw new RuntimeException(e);
+ }
+ COMMAND_CONSTRUCTOR = commandConstructor;
+
+ Field commandMapField;
+ try {
+ commandMapField = SimplePluginManager.class.getDeclaredField("commandMap");
+ commandMapField.setAccessible(true);
+ } catch (NoSuchFieldException e) {
+ throw new RuntimeException(e);
+ }
+ COMMAND_MAP_FIELD = commandMapField;
+
+ Field knownCommandsField;
+ try {
+ knownCommandsField = SimpleCommandMap.class.getDeclaredField("knownCommands");
+ knownCommandsField.setAccessible(true);
+ } catch (NoSuchFieldException e) {
+ throw new RuntimeException(e);
+ }
+ KNOWN_COMMANDS_FIELD = knownCommandsField;
+ }
+
+ private static CommandMap getCommandMap() {
+ try {
+ return (CommandMap) COMMAND_MAP_FIELD.get(Bukkit.getServer().getPluginManager());
+ } catch (Exception e) {
+ throw new RuntimeException("Could not get CommandMap", e);
+ }
+ }
+
+ private static Map<String, Command> getKnownCommandMap() {
+ try {
+ //noinspection unchecked
+ return (Map<String, Command>) KNOWN_COMMANDS_FIELD.get(getCommandMap());
+ } catch (Exception e) {
+ throw new RuntimeException("Could not get known commands map", e);
+ }
+ }
+
+ /**
+ * Registers a CommandExecutor with the server
+ *
+ * @param plugin the plugin instance
+ * @param command the command instance
+ * @param aliases the command aliases
+ * @param <T> the command executor class type
+ * @return the command executor
+ */
+ public static <T extends CommandExecutor> T registerCommand(Plugin plugin, T command, String... aliases) {
+ Preconditions.checkArgument(aliases.length != 0, "No aliases");
+ for (String alias : aliases) {
+ try {
+ PluginCommand cmd = COMMAND_CONSTRUCTOR.newInstance(alias, plugin);
+
+ getCommandMap().register(plugin.getDescription().getName(), cmd);
+ getKnownCommandMap().put(plugin.getDescription().getName().toLowerCase() + ":" + alias.toLowerCase(), cmd);
+ getKnownCommandMap().put(alias.toLowerCase(), cmd);
+ cmd.setLabel(alias.toLowerCase());
+
+ cmd.setExecutor(command);
+ if (command instanceof TabCompleter) {
+ cmd.setTabCompleter((TabCompleter) command);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ return command;
+ }
+
+ /**
+ * Unregisters a CommandExecutor with the server
+ *
+ * @param command the command instance
+ * @param <T> the command executor class type
+ * @return the command executor
+ */
+ public static <T extends CommandExecutor> T unregisterCommand(T command) {
+ CommandMap map = getCommandMap();
+ try {
+ //noinspection unchecked
+ Map<String, Command> knownCommands = (Map<String, Command>) KNOWN_COMMANDS_FIELD.get(map);
+
+ Iterator<Command> iterator = knownCommands.values().iterator();
+ while (iterator.hasNext()) {
+ Command cmd = iterator.next();
+ if (cmd instanceof PluginCommand) {
+ CommandExecutor executor = ((PluginCommand) cmd).getExecutor();
+ if (command == executor) {
+ cmd.unregister(map);
+ iterator.remove();
+ }
+ }
+ }
+ } catch (Exception e) {
+ throw new RuntimeException("Could not unregister command", e);
+ }
+
+ return command;
+ }
+
+ private CommandMapUtil() {
+ throw new UnsupportedOperationException("This class cannot be instantiated");
+ }
+
+} \ No newline at end of file
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
index 1d6160a..1fcb571 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
@@ -22,6 +22,8 @@ package me.lucko.spark.bukkit;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.command.CommandResponseHandler;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.TickCounter;
import org.bukkit.ChatColor;
@@ -42,6 +44,22 @@ public class SparkBukkitPlugin extends JavaPlugin implements SparkPlugin<Command
@Override
public void onEnable() {
this.platform.enable();
+
+ // override Spigot's TPS command with our own.
+ if (getConfig().getBoolean("override-tps-command", true)) {
+ CommandMapUtil.registerCommand(this, (sender, command, label, args) -> {
+ if (!sender.hasPermission("spark") && !sender.hasPermission("spark.tps") && !sender.hasPermission("bukkit.command.tps")) {
+ sender.sendMessage(ChatColor.RED + "You do not have permission to use this command.");
+ return true;
+ }
+
+ CommandResponseHandler<CommandSender> resp = new CommandResponseHandler<>(this.platform, sender);
+ TpsCalculator tpsCalculator = this.platform.getTpsCalculator();
+ resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m:");
+ resp.replyPrefixed(" " + tpsCalculator.toFormattedString());
+ return true;
+ }, "tps");
+ }
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 8eb4565..7a33c39 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandResponseHandler;
+import me.lucko.spark.common.command.modules.HealthModule;
import me.lucko.spark.common.command.modules.MemoryModule;
-import me.lucko.spark.common.command.modules.MonitoringModule;
import me.lucko.spark.common.command.modules.SamplerModule;
import me.lucko.spark.common.command.modules.TickMonitoringModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
@@ -66,7 +66,7 @@ public class SparkPlatform<S> {
ImmutableList.Builder<Command<S>> commandsBuilder = ImmutableList.builder();
new SamplerModule<S>().registerCommands(commandsBuilder::add);
- new MonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new HealthModule<S>().registerCommands(commandsBuilder::add);
new TickMonitoringModule<S>().registerCommands(commandsBuilder::add);
new MemoryModule<S>().registerCommands(commandsBuilder::add);
this.commands = commandsBuilder.build();
@@ -115,6 +115,7 @@ public class SparkPlatform<S> {
try {
command.executor().execute(this, sender, resp, new Arguments(rawArgs));
} catch (IllegalArgumentException e) {
+ e.printStackTrace();
resp.replyPrefixed("&c" + e.getMessage());
}
return;
@@ -147,7 +148,7 @@ public class SparkPlatform<S> {
private void sendUsage(CommandResponseHandler<S> sender) {
sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion());
for (Command<S> command : this.commands) {
- sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
+ sender.reply("&6&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
for (Command.ArgumentInfo arg : command.arguments()) {
if (arg.requiresParameter()) {
sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
index a5a7391..0df2950 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -28,7 +28,7 @@ import java.util.function.Consumer;
public class CommandResponseHandler<S> {
/** The prefix used in all messages */
- private static final String PREFIX = "&8[&fspark&8] &7";
+ private static final String PREFIX = "&8[&e&l⚡&8] &7";
private final SparkPlatform<S> platform;
private final S sender;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
new file mode 100644
index 0000000..8e4178a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -0,0 +1,182 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import com.google.common.base.Strings;
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.tabcomplete.TabCompleter;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryPoolMXBean;
+import java.lang.management.MemoryType;
+import java.lang.management.MemoryUsage;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Consumer;
+
+public class HealthModule<S> implements CommandModule<S> {
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("tps")
+ .executor((platform, sender, resp, arguments) -> {
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator != null) {
+ resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m:");
+ resp.replyPrefixed(" " + tpsCalculator.toFormattedString());
+ } else {
+ resp.replyPrefixed("Not supported!");
+ }
+ })
+ .tabCompleter(Command.TabCompleter.empty())
+ .build()
+ );
+
+ consumer.accept(Command.<S>builder()
+ .aliases("healthreport", "health")
+ .argumentUsage("memory", null)
+ .executor((platform, sender, resp, arguments) -> {
+ resp.replyPrefixed("&7Generating server health report...");
+ platform.getPlugin().runAsync(() -> {
+ List<String> report = new ArrayList<>(15);
+ report.add("");
+
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator != null) {
+ report.add("&8&l>&6 TPS from last 5s, 10s, 1m, 5m, 15m:");
+ report.add(" " + tpsCalculator.toFormattedString());
+ report.add("");
+ }
+
+ MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+
+ MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
+ report.add("&8&l>&6 Memory usage: ");
+ report.add(" &f" + formatBytes(heapUsage.getUsed()) + " &7/ &f" + formatBytes(heapUsage.getMax()) +
+ " &7(&a" + percent(heapUsage.getUsed(), heapUsage.getMax()) + "&7)");
+ report.add(" " + generateMemoryUsageDiagram(heapUsage, 40));
+ report.add("");
+
+ if (arguments.boolFlag("memory")) {
+ MemoryUsage nonHeapUsage = memoryMXBean.getNonHeapMemoryUsage();
+ report.add("&8&l>&6 Non-heap memory usage: ");
+ report.add(" &f" + formatBytes(nonHeapUsage.getUsed()));
+ report.add("");
+
+ List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans();
+ for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) {
+ if (memoryPool.getType() != MemoryType.HEAP) {
+ continue;
+ }
+
+ MemoryUsage usage = memoryPool.getUsage();
+ MemoryUsage collectionUsage = memoryPool.getCollectionUsage();
+
+ if (usage.getMax() == -1) {
+ usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted());
+ }
+
+ report.add("&8&l>&6 " + memoryPool.getName() + " pool usage: ");
+ report.add(" &f" + formatBytes(usage.getUsed()) + " &7/ &f" + formatBytes(usage.getMax()) +
+ " &7(&a" + percent(usage.getUsed(), usage.getMax()) + "&7)");
+ report.add(" " + generateMemoryPoolDiagram(usage, collectionUsage,40));
+
+
+ if (collectionUsage != null) {
+ report.add(" &c- &7Usage at last GC: &f" + formatBytes(collectionUsage.getUsed()));
+ }
+ report.add("");
+ }
+ }
+
+ report.forEach(resp::reply);
+ });
+ })
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory"))
+ .build()
+ );
+ }
+
+ private static String percent(double value, double max) {
+ double percent = (value * 100d) / max;
+ return (int) percent + "%";
+ }
+
+ private static String generateMemoryUsageDiagram(MemoryUsage usage, int length) {
+ double used = usage.getUsed();
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ String line = "&7" + Strings.repeat("/", usedChars);
+ if (committedChars > usedChars) {
+ line += Strings.repeat(" ", (committedChars - usedChars) - 1) + "&e|";
+ }
+ if (length > committedChars) {
+ line += Strings.repeat(" ", (length - committedChars));
+ }
+
+ return "&8[" + line + "&8]";
+ }
+
+ private static String generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
+ double used = usage.getUsed();
+ double collectionUsed = used;
+ if (collectionUsage != null) {
+ collectionUsed = collectionUsage.getUsed();
+ }
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int collectionUsedChars = (int) ((collectionUsed * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ String line = "&7" + Strings.repeat("/", collectionUsedChars);
+ if (usedChars > collectionUsedChars) {
+ line += "&c|&7" + Strings.repeat("/", (usedChars - collectionUsedChars) - 1);
+ }
+ if (committedChars > usedChars) {
+ line += Strings.repeat(" ", (committedChars - usedChars) - 1) + "&e|";
+ }
+ if (length > committedChars) {
+ line += Strings.repeat(" ", (length - committedChars));
+ }
+
+ return "&8[" + line + "&8]";
+ }
+
+ private static String formatBytes(long bytes) {
+ if (bytes == 0) {
+ return "0 bytes";
+ }
+ String[] sizes = new String[]{"bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"};
+ int sizeIndex = (int) (Math.log(bytes) / Math.log(1024));
+ return String.format("%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + sizes[sizeIndex];
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
index 2cb2e07..583b1b6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
@@ -24,8 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.common.memory.HeapDump;
-import me.lucko.spark.common.memory.HeapDumpSummary;
+import me.lucko.spark.common.heapdump.HeapDump;
+import me.lucko.spark.common.heapdump.HeapDumpSummary;
import okhttp3.MediaType;
import java.io.IOException;
@@ -64,7 +64,7 @@ public class MemoryModule<S> implements CommandModule<S> {
byte[] output = heapDump.formCompressedDataPayload();
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
- resp.broadcastPrefixed("&bHeap dump summmary output:");
+ resp.broadcastPrefixed("&6Heap dump summmary output:");
resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
resp.broadcastPrefixed("&cAn error occurred whilst uploading the data.");
@@ -108,7 +108,7 @@ public class MemoryModule<S> implements CommandModule<S> {
return;
}
- resp.broadcastPrefixed("&bHeap dump written to: " + file.toString());
+ resp.broadcastPrefixed("&6Heap dump written to: " + file.toString());
});
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live"))
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
deleted file mode 100644
index b543e1d..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.command.modules;
-
-import me.lucko.spark.common.command.Command;
-import me.lucko.spark.common.command.CommandModule;
-import me.lucko.spark.common.monitor.tick.TpsCalculator;
-
-import java.util.function.Consumer;
-
-public class MonitoringModule<S> implements CommandModule<S> {
-
- @Override
- public void registerCommands(Consumer<Command<S>> consumer) {
- consumer.accept(Command.<S>builder()
- .aliases("tps")
- .executor((platform, sender, resp, arguments) -> {
- TpsCalculator tpsCalculator = platform.getTpsCalculator();
- if (tpsCalculator == null) {
- resp.replyPrefixed("TPS data is not available.");
- return;
- }
-
- String formattedTpsString = tpsCalculator.toFormattedString();
- resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m");
- resp.replyPrefixed(formattedTpsString);
- })
- .tabCompleter(Command.TabCompleter.empty())
- .build()
- );
- }
-
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index a0f171c..99f8007 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -137,7 +137,7 @@ public class SamplerModule<S> implements CommandModule<S> {
}
sampler = this.activeSampler = builder.start();
- resp.broadcastPrefixed("&bProfiler now active!");
+ resp.broadcastPrefixed("&6Profiler now active!");
if (timeoutSeconds == -1) {
resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results.");
} else {
@@ -234,7 +234,7 @@ public class SamplerModule<S> implements CommandModule<S> {
} else {
this.activeSampler.cancel();
this.activeSampler = null;
- resp.broadcastPrefixed("&bThe active sampling task has been cancelled.");
+ resp.broadcastPrefixed("&6The active sampling task has been cancelled.");
}
}
})
@@ -247,7 +247,7 @@ public class SamplerModule<S> implements CommandModule<S> {
byte[] output = sampler.formCompressedDataPayload();
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
- resp.broadcastPrefixed("&bSampling results:");
+ resp.broadcastPrefixed("&6Sampling results:");
resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
resp.broadcastPrefixed("&cAn error occurred whilst uploading the results.");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java
index 6200c17..189f89e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.memory;
+package me.lucko.spark.common.heapdump;
import javax.management.JMX;
import javax.management.MBeanServer;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index f3f62db..c95e9ab 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.memory;
+package me.lucko.spark.common.heapdump;
import com.google.gson.stream.JsonWriter;
import me.lucko.spark.common.util.TypeDescriptors;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java
index 93a5fd8..d750b1a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.monitor.gc;
+package me.lucko.spark.common.monitor.memory;
import com.sun.management.GarbageCollectionNotificationInfo;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
index 034e876..5693df6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
@@ -21,7 +21,7 @@
package me.lucko.spark.common.monitor.tick;
import com.sun.management.GarbageCollectionNotificationInfo;
-import me.lucko.spark.common.monitor.gc.GarbageCollectionMonitor;
+import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor;
import me.lucko.spark.common.sampler.TickCounter;
import java.text.DecimalFormat;
@@ -91,7 +91,7 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec
// move onto the next state
if (this.averageTickTime.getCount() >= 120) {
- sendMessage("&bAnalysis is now complete.");
+ sendMessage("&6Analysis is now complete.");
sendMessage("&f> &7Max: " + df.format(this.averageTickTime.getMax()) + "ms");
sendMessage("&f> &7Min: " + df.format(this.averageTickTime.getMin()) + "ms");
sendMessage("&f> &7Avg: " + df.format(this.averageTickTime.getAverage()) + "ms");
@@ -111,8 +111,8 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec
double percentageChange = (increase * 100d) / this.avg;
if (percentageChange > this.percentageChangeThreshold) {
- sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
- "&7 ms. (&b" + df.format(percentageChange) + "% &7increase from average)");
+ sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &6" + df.format(diff) +
+ "&7 ms. (&6" + df.format(percentageChange) + "% &7increase from average)");
}
}
}
@@ -132,7 +132,7 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec
gcType = "Old Gen GC";
}
- sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &b" +
+ sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &6" +
df.format(data.getGcInfo().getDuration()) + "&7 ms. (type = " + gcType + ")");
}