aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2019-04-17 00:58:28 +0100
committerLuck <git@lucko.me>2019-04-17 00:58:28 +0100
commit394c59d375811e4b1e0f23a528ef85d8c4d0e5a0 (patch)
tree2e9486f564a2a518fc938dfc0d5b58ae067789f1 /spark-common/src
parentecd4cec8545460a4fc4ca65b911c2503a00cd8e7 (diff)
downloadspark-394c59d375811e4b1e0f23a528ef85d8c4d0e5a0.tar.gz
spark-394c59d375811e4b1e0f23a528ef85d8c4d0e5a0.tar.bz2
spark-394c59d375811e4b1e0f23a528ef85d8c4d0e5a0.zip
Add /spark health command to report tps / memory usage stats
Diffstat (limited to 'spark-common/src')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java182
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java (renamed from spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java (renamed from spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java (renamed from spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java10
10 files changed, 202 insertions, 70 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 8eb4565..7a33c39 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandResponseHandler;
+import me.lucko.spark.common.command.modules.HealthModule;
import me.lucko.spark.common.command.modules.MemoryModule;
-import me.lucko.spark.common.command.modules.MonitoringModule;
import me.lucko.spark.common.command.modules.SamplerModule;
import me.lucko.spark.common.command.modules.TickMonitoringModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
@@ -66,7 +66,7 @@ public class SparkPlatform<S> {
ImmutableList.Builder<Command<S>> commandsBuilder = ImmutableList.builder();
new SamplerModule<S>().registerCommands(commandsBuilder::add);
- new MonitoringModule<S>().registerCommands(commandsBuilder::add);
+ new HealthModule<S>().registerCommands(commandsBuilder::add);
new TickMonitoringModule<S>().registerCommands(commandsBuilder::add);
new MemoryModule<S>().registerCommands(commandsBuilder::add);
this.commands = commandsBuilder.build();
@@ -115,6 +115,7 @@ public class SparkPlatform<S> {
try {
command.executor().execute(this, sender, resp, new Arguments(rawArgs));
} catch (IllegalArgumentException e) {
+ e.printStackTrace();
resp.replyPrefixed("&c" + e.getMessage());
}
return;
@@ -147,7 +148,7 @@ public class SparkPlatform<S> {
private void sendUsage(CommandResponseHandler<S> sender) {
sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion());
for (Command<S> command : this.commands) {
- sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
+ sender.reply("&6&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0));
for (Command.ArgumentInfo arg : command.arguments()) {
if (arg.requiresParameter()) {
sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
index a5a7391..0df2950 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -28,7 +28,7 @@ import java.util.function.Consumer;
public class CommandResponseHandler<S> {
/** The prefix used in all messages */
- private static final String PREFIX = "&8[&fspark&8] &7";
+ private static final String PREFIX = "&8[&e&l⚡&8] &7";
private final SparkPlatform<S> platform;
private final S sender;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
new file mode 100644
index 0000000..8e4178a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -0,0 +1,182 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.command.modules;
+
+import com.google.common.base.Strings;
+import me.lucko.spark.common.command.Command;
+import me.lucko.spark.common.command.CommandModule;
+import me.lucko.spark.common.command.tabcomplete.TabCompleter;
+import me.lucko.spark.common.monitor.tick.TpsCalculator;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryPoolMXBean;
+import java.lang.management.MemoryType;
+import java.lang.management.MemoryUsage;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Consumer;
+
+public class HealthModule<S> implements CommandModule<S> {
+
+ @Override
+ public void registerCommands(Consumer<Command<S>> consumer) {
+ consumer.accept(Command.<S>builder()
+ .aliases("tps")
+ .executor((platform, sender, resp, arguments) -> {
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator != null) {
+ resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m:");
+ resp.replyPrefixed(" " + tpsCalculator.toFormattedString());
+ } else {
+ resp.replyPrefixed("Not supported!");
+ }
+ })
+ .tabCompleter(Command.TabCompleter.empty())
+ .build()
+ );
+
+ consumer.accept(Command.<S>builder()
+ .aliases("healthreport", "health")
+ .argumentUsage("memory", null)
+ .executor((platform, sender, resp, arguments) -> {
+ resp.replyPrefixed("&7Generating server health report...");
+ platform.getPlugin().runAsync(() -> {
+ List<String> report = new ArrayList<>(15);
+ report.add("");
+
+ TpsCalculator tpsCalculator = platform.getTpsCalculator();
+ if (tpsCalculator != null) {
+ report.add("&8&l>&6 TPS from last 5s, 10s, 1m, 5m, 15m:");
+ report.add(" " + tpsCalculator.toFormattedString());
+ report.add("");
+ }
+
+ MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+
+ MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
+ report.add("&8&l>&6 Memory usage: ");
+ report.add(" &f" + formatBytes(heapUsage.getUsed()) + " &7/ &f" + formatBytes(heapUsage.getMax()) +
+ " &7(&a" + percent(heapUsage.getUsed(), heapUsage.getMax()) + "&7)");
+ report.add(" " + generateMemoryUsageDiagram(heapUsage, 40));
+ report.add("");
+
+ if (arguments.boolFlag("memory")) {
+ MemoryUsage nonHeapUsage = memoryMXBean.getNonHeapMemoryUsage();
+ report.add("&8&l>&6 Non-heap memory usage: ");
+ report.add(" &f" + formatBytes(nonHeapUsage.getUsed()));
+ report.add("");
+
+ List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans();
+ for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) {
+ if (memoryPool.getType() != MemoryType.HEAP) {
+ continue;
+ }
+
+ MemoryUsage usage = memoryPool.getUsage();
+ MemoryUsage collectionUsage = memoryPool.getCollectionUsage();
+
+ if (usage.getMax() == -1) {
+ usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted());
+ }
+
+ report.add("&8&l>&6 " + memoryPool.getName() + " pool usage: ");
+ report.add(" &f" + formatBytes(usage.getUsed()) + " &7/ &f" + formatBytes(usage.getMax()) +
+ " &7(&a" + percent(usage.getUsed(), usage.getMax()) + "&7)");
+ report.add(" " + generateMemoryPoolDiagram(usage, collectionUsage,40));
+
+
+ if (collectionUsage != null) {
+ report.add(" &c- &7Usage at last GC: &f" + formatBytes(collectionUsage.getUsed()));
+ }
+ report.add("");
+ }
+ }
+
+ report.forEach(resp::reply);
+ });
+ })
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory"))
+ .build()
+ );
+ }
+
+ private static String percent(double value, double max) {
+ double percent = (value * 100d) / max;
+ return (int) percent + "%";
+ }
+
+ private static String generateMemoryUsageDiagram(MemoryUsage usage, int length) {
+ double used = usage.getUsed();
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ String line = "&7" + Strings.repeat("/", usedChars);
+ if (committedChars > usedChars) {
+ line += Strings.repeat(" ", (committedChars - usedChars) - 1) + "&e|";
+ }
+ if (length > committedChars) {
+ line += Strings.repeat(" ", (length - committedChars));
+ }
+
+ return "&8[" + line + "&8]";
+ }
+
+ private static String generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
+ double used = usage.getUsed();
+ double collectionUsed = used;
+ if (collectionUsage != null) {
+ collectionUsed = collectionUsage.getUsed();
+ }
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int collectionUsedChars = (int) ((collectionUsed * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ String line = "&7" + Strings.repeat("/", collectionUsedChars);
+ if (usedChars > collectionUsedChars) {
+ line += "&c|&7" + Strings.repeat("/", (usedChars - collectionUsedChars) - 1);
+ }
+ if (committedChars > usedChars) {
+ line += Strings.repeat(" ", (committedChars - usedChars) - 1) + "&e|";
+ }
+ if (length > committedChars) {
+ line += Strings.repeat(" ", (length - committedChars));
+ }
+
+ return "&8[" + line + "&8]";
+ }
+
+ private static String formatBytes(long bytes) {
+ if (bytes == 0) {
+ return "0 bytes";
+ }
+ String[] sizes = new String[]{"bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"};
+ int sizeIndex = (int) (Math.log(bytes) / Math.log(1024));
+ return String.format("%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + sizes[sizeIndex];
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
index 2cb2e07..583b1b6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
@@ -24,8 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.common.memory.HeapDump;
-import me.lucko.spark.common.memory.HeapDumpSummary;
+import me.lucko.spark.common.heapdump.HeapDump;
+import me.lucko.spark.common.heapdump.HeapDumpSummary;
import okhttp3.MediaType;
import java.io.IOException;
@@ -64,7 +64,7 @@ public class MemoryModule<S> implements CommandModule<S> {
byte[] output = heapDump.formCompressedDataPayload();
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
- resp.broadcastPrefixed("&bHeap dump summmary output:");
+ resp.broadcastPrefixed("&6Heap dump summmary output:");
resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
resp.broadcastPrefixed("&cAn error occurred whilst uploading the data.");
@@ -108,7 +108,7 @@ public class MemoryModule<S> implements CommandModule<S> {
return;
}
- resp.broadcastPrefixed("&bHeap dump written to: " + file.toString());
+ resp.broadcastPrefixed("&6Heap dump written to: " + file.toString());
});
})
.tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live"))
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
deleted file mode 100644
index b543e1d..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.command.modules;
-
-import me.lucko.spark.common.command.Command;
-import me.lucko.spark.common.command.CommandModule;
-import me.lucko.spark.common.monitor.tick.TpsCalculator;
-
-import java.util.function.Consumer;
-
-public class MonitoringModule<S> implements CommandModule<S> {
-
- @Override
- public void registerCommands(Consumer<Command<S>> consumer) {
- consumer.accept(Command.<S>builder()
- .aliases("tps")
- .executor((platform, sender, resp, arguments) -> {
- TpsCalculator tpsCalculator = platform.getTpsCalculator();
- if (tpsCalculator == null) {
- resp.replyPrefixed("TPS data is not available.");
- return;
- }
-
- String formattedTpsString = tpsCalculator.toFormattedString();
- resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m");
- resp.replyPrefixed(formattedTpsString);
- })
- .tabCompleter(Command.TabCompleter.empty())
- .build()
- );
- }
-
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index a0f171c..99f8007 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -137,7 +137,7 @@ public class SamplerModule<S> implements CommandModule<S> {
}
sampler = this.activeSampler = builder.start();
- resp.broadcastPrefixed("&bProfiler now active!");
+ resp.broadcastPrefixed("&6Profiler now active!");
if (timeoutSeconds == -1) {
resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results.");
} else {
@@ -234,7 +234,7 @@ public class SamplerModule<S> implements CommandModule<S> {
} else {
this.activeSampler.cancel();
this.activeSampler = null;
- resp.broadcastPrefixed("&bThe active sampling task has been cancelled.");
+ resp.broadcastPrefixed("&6The active sampling task has been cancelled.");
}
}
})
@@ -247,7 +247,7 @@ public class SamplerModule<S> implements CommandModule<S> {
byte[] output = sampler.formCompressedDataPayload();
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key();
- resp.broadcastPrefixed("&bSampling results:");
+ resp.broadcastPrefixed("&6Sampling results:");
resp.broadcastLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
resp.broadcastPrefixed("&cAn error occurred whilst uploading the results.");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java
index 6200c17..189f89e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.memory;
+package me.lucko.spark.common.heapdump;
import javax.management.JMX;
import javax.management.MBeanServer;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index f3f62db..c95e9ab 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.memory;
+package me.lucko.spark.common.heapdump;
import com.google.gson.stream.JsonWriter;
import me.lucko.spark.common.util.TypeDescriptors;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java
index 93a5fd8..d750b1a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.monitor.gc;
+package me.lucko.spark.common.monitor.memory;
import com.sun.management.GarbageCollectionNotificationInfo;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
index 034e876..5693df6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java
@@ -21,7 +21,7 @@
package me.lucko.spark.common.monitor.tick;
import com.sun.management.GarbageCollectionNotificationInfo;
-import me.lucko.spark.common.monitor.gc.GarbageCollectionMonitor;
+import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor;
import me.lucko.spark.common.sampler.TickCounter;
import java.text.DecimalFormat;
@@ -91,7 +91,7 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec
// move onto the next state
if (this.averageTickTime.getCount() >= 120) {
- sendMessage("&bAnalysis is now complete.");
+ sendMessage("&6Analysis is now complete.");
sendMessage("&f> &7Max: " + df.format(this.averageTickTime.getMax()) + "ms");
sendMessage("&f> &7Min: " + df.format(this.averageTickTime.getMin()) + "ms");
sendMessage("&f> &7Avg: " + df.format(this.averageTickTime.getAverage()) + "ms");
@@ -111,8 +111,8 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec
double percentageChange = (increase * 100d) / this.avg;
if (percentageChange > this.percentageChangeThreshold) {
- sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &b" + df.format(diff) +
- "&7 ms. (&b" + df.format(percentageChange) + "% &7increase from average)");
+ sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &6" + df.format(diff) +
+ "&7 ms. (&6" + df.format(percentageChange) + "% &7increase from average)");
}
}
}
@@ -132,7 +132,7 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec
gcType = "Old Gen GC";
}
- sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &b" +
+ sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &6" +
df.format(data.getGcInfo().getDuration()) + "&7 ms. (type = " + gcType + ")");
}