From 394c59d375811e4b1e0f23a528ef85d8c4d0e5a0 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 17 Apr 2019 00:58:28 +0100 Subject: Add /spark health command to report tps / memory usage stats --- .../java/me/lucko/spark/common/SparkPlatform.java | 7 +- .../common/command/CommandResponseHandler.java | 2 +- .../spark/common/command/modules/HealthModule.java | 182 +++++++++++++++++++++ .../spark/common/command/modules/MemoryModule.java | 8 +- .../common/command/modules/MonitoringModule.java | 51 ------ .../common/command/modules/SamplerModule.java | 6 +- .../me/lucko/spark/common/heapdump/HeapDump.java | 77 +++++++++ .../spark/common/heapdump/HeapDumpSummary.java | 171 +++++++++++++++++++ .../me/lucko/spark/common/memory/HeapDump.java | 77 --------- .../lucko/spark/common/memory/HeapDumpSummary.java | 171 ------------------- .../monitor/gc/GarbageCollectionMonitor.java | 90 ---------- .../monitor/memory/GarbageCollectionMonitor.java | 90 ++++++++++ .../spark/common/monitor/tick/TickMonitor.java | 10 +- 13 files changed, 537 insertions(+), 405 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java (limited to 'spark-common/src/main/java') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 8eb4565..7a33c39 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandResponseHandler; +import me.lucko.spark.common.command.modules.HealthModule; import me.lucko.spark.common.command.modules.MemoryModule; -import me.lucko.spark.common.command.modules.MonitoringModule; import me.lucko.spark.common.command.modules.SamplerModule; import me.lucko.spark.common.command.modules.TickMonitoringModule; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; @@ -66,7 +66,7 @@ public class SparkPlatform { ImmutableList.Builder> commandsBuilder = ImmutableList.builder(); new SamplerModule().registerCommands(commandsBuilder::add); - new MonitoringModule().registerCommands(commandsBuilder::add); + new HealthModule().registerCommands(commandsBuilder::add); new TickMonitoringModule().registerCommands(commandsBuilder::add); new MemoryModule().registerCommands(commandsBuilder::add); this.commands = commandsBuilder.build(); @@ -115,6 +115,7 @@ public class SparkPlatform { try { command.executor().execute(this, sender, resp, new Arguments(rawArgs)); } catch (IllegalArgumentException e) { + e.printStackTrace(); resp.replyPrefixed("&c" + e.getMessage()); } return; @@ -147,7 +148,7 @@ public class SparkPlatform { private void sendUsage(CommandResponseHandler sender) { sender.replyPrefixed("&fspark &7v" + getPlugin().getVersion()); for (Command command : this.commands) { - sender.reply("&b&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0)); + sender.reply("&6&l> &7/" + getPlugin().getLabel() + " " + command.aliases().get(0)); for (Command.ArgumentInfo arg : command.arguments()) { if (arg.requiresParameter()) { sender.reply(" &8[&7--" + arg.argumentName() + "&8 <" + arg.parameterDescription() + ">]"); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java index a5a7391..0df2950 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java @@ -28,7 +28,7 @@ import java.util.function.Consumer; public class CommandResponseHandler { /** The prefix used in all messages */ - private static final String PREFIX = "&8[&fspark&8] &7"; + private static final String PREFIX = "&8[&e&l⚡&8] &7"; private final SparkPlatform platform; private final S sender; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java new file mode 100644 index 0000000..8e4178a --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -0,0 +1,182 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.command.modules; + +import com.google.common.base.Strings; +import me.lucko.spark.common.command.Command; +import me.lucko.spark.common.command.CommandModule; +import me.lucko.spark.common.command.tabcomplete.TabCompleter; +import me.lucko.spark.common.monitor.tick.TpsCalculator; + +import java.lang.management.ManagementFactory; +import java.lang.management.MemoryMXBean; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.MemoryType; +import java.lang.management.MemoryUsage; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +public class HealthModule implements CommandModule { + + @Override + public void registerCommands(Consumer> consumer) { + consumer.accept(Command.builder() + .aliases("tps") + .executor((platform, sender, resp, arguments) -> { + TpsCalculator tpsCalculator = platform.getTpsCalculator(); + if (tpsCalculator != null) { + resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m:"); + resp.replyPrefixed(" " + tpsCalculator.toFormattedString()); + } else { + resp.replyPrefixed("Not supported!"); + } + }) + .tabCompleter(Command.TabCompleter.empty()) + .build() + ); + + consumer.accept(Command.builder() + .aliases("healthreport", "health") + .argumentUsage("memory", null) + .executor((platform, sender, resp, arguments) -> { + resp.replyPrefixed("&7Generating server health report..."); + platform.getPlugin().runAsync(() -> { + List report = new ArrayList<>(15); + report.add(""); + + TpsCalculator tpsCalculator = platform.getTpsCalculator(); + if (tpsCalculator != null) { + report.add("&8&l>&6 TPS from last 5s, 10s, 1m, 5m, 15m:"); + report.add(" " + tpsCalculator.toFormattedString()); + report.add(""); + } + + MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); + + MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage(); + report.add("&8&l>&6 Memory usage: "); + report.add(" &f" + formatBytes(heapUsage.getUsed()) + " &7/ &f" + formatBytes(heapUsage.getMax()) + + " &7(&a" + percent(heapUsage.getUsed(), heapUsage.getMax()) + "&7)"); + report.add(" " + generateMemoryUsageDiagram(heapUsage, 40)); + report.add(""); + + if (arguments.boolFlag("memory")) { + MemoryUsage nonHeapUsage = memoryMXBean.getNonHeapMemoryUsage(); + report.add("&8&l>&6 Non-heap memory usage: "); + report.add(" &f" + formatBytes(nonHeapUsage.getUsed())); + report.add(""); + + List memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans(); + for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) { + if (memoryPool.getType() != MemoryType.HEAP) { + continue; + } + + MemoryUsage usage = memoryPool.getUsage(); + MemoryUsage collectionUsage = memoryPool.getCollectionUsage(); + + if (usage.getMax() == -1) { + usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted()); + } + + report.add("&8&l>&6 " + memoryPool.getName() + " pool usage: "); + report.add(" &f" + formatBytes(usage.getUsed()) + " &7/ &f" + formatBytes(usage.getMax()) + + " &7(&a" + percent(usage.getUsed(), usage.getMax()) + "&7)"); + report.add(" " + generateMemoryPoolDiagram(usage, collectionUsage,40)); + + + if (collectionUsage != null) { + report.add(" &c- &7Usage at last GC: &f" + formatBytes(collectionUsage.getUsed())); + } + report.add(""); + } + } + + report.forEach(resp::reply); + }); + }) + .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory")) + .build() + ); + } + + private static String percent(double value, double max) { + double percent = (value * 100d) / max; + return (int) percent + "%"; + } + + private static String generateMemoryUsageDiagram(MemoryUsage usage, int length) { + double used = usage.getUsed(); + double committed = usage.getCommitted(); + double max = usage.getMax(); + + int usedChars = (int) ((used * length) / max); + int committedChars = (int) ((committed * length) / max); + + String line = "&7" + Strings.repeat("/", usedChars); + if (committedChars > usedChars) { + line += Strings.repeat(" ", (committedChars - usedChars) - 1) + "&e|"; + } + if (length > committedChars) { + line += Strings.repeat(" ", (length - committedChars)); + } + + return "&8[" + line + "&8]"; + } + + private static String generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) { + double used = usage.getUsed(); + double collectionUsed = used; + if (collectionUsage != null) { + collectionUsed = collectionUsage.getUsed(); + } + double committed = usage.getCommitted(); + double max = usage.getMax(); + + int usedChars = (int) ((used * length) / max); + int collectionUsedChars = (int) ((collectionUsed * length) / max); + int committedChars = (int) ((committed * length) / max); + + String line = "&7" + Strings.repeat("/", collectionUsedChars); + if (usedChars > collectionUsedChars) { + line += "&c|&7" + Strings.repeat("/", (usedChars - collectionUsedChars) - 1); + } + if (committedChars > usedChars) { + line += Strings.repeat(" ", (committedChars - usedChars) - 1) + "&e|"; + } + if (length > committedChars) { + line += Strings.repeat(" ", (length - committedChars)); + } + + return "&8[" + line + "&8]"; + } + + private static String formatBytes(long bytes) { + if (bytes == 0) { + return "0 bytes"; + } + String[] sizes = new String[]{"bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"}; + int sizeIndex = (int) (Math.log(bytes) / Math.log(1024)); + return String.format("%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + sizes[sizeIndex]; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java index 2cb2e07..583b1b6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java @@ -24,8 +24,8 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; import me.lucko.spark.common.command.tabcomplete.TabCompleter; -import me.lucko.spark.common.memory.HeapDump; -import me.lucko.spark.common.memory.HeapDumpSummary; +import me.lucko.spark.common.heapdump.HeapDump; +import me.lucko.spark.common.heapdump.HeapDumpSummary; import okhttp3.MediaType; import java.io.IOException; @@ -64,7 +64,7 @@ public class MemoryModule implements CommandModule { byte[] output = heapDump.formCompressedDataPayload(); try { String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key(); - resp.broadcastPrefixed("&bHeap dump summmary output:"); + resp.broadcastPrefixed("&6Heap dump summmary output:"); resp.broadcastLink(SparkPlatform.VIEWER_URL + key); } catch (IOException e) { resp.broadcastPrefixed("&cAn error occurred whilst uploading the data."); @@ -108,7 +108,7 @@ public class MemoryModule implements CommandModule { return; } - resp.broadcastPrefixed("&bHeap dump written to: " + file.toString()); + resp.broadcastPrefixed("&6Heap dump written to: " + file.toString()); }); }) .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--run-gc-before", "--include-non-live")) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java deleted file mode 100644 index b543e1d..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.command.modules; - -import me.lucko.spark.common.command.Command; -import me.lucko.spark.common.command.CommandModule; -import me.lucko.spark.common.monitor.tick.TpsCalculator; - -import java.util.function.Consumer; - -public class MonitoringModule implements CommandModule { - - @Override - public void registerCommands(Consumer> consumer) { - consumer.accept(Command.builder() - .aliases("tps") - .executor((platform, sender, resp, arguments) -> { - TpsCalculator tpsCalculator = platform.getTpsCalculator(); - if (tpsCalculator == null) { - resp.replyPrefixed("TPS data is not available."); - return; - } - - String formattedTpsString = tpsCalculator.toFormattedString(); - resp.replyPrefixed("TPS from last 5s, 10s, 1m, 5m, 15m"); - resp.replyPrefixed(formattedTpsString); - }) - .tabCompleter(Command.TabCompleter.empty()) - .build() - ); - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index a0f171c..99f8007 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -137,7 +137,7 @@ public class SamplerModule implements CommandModule { } sampler = this.activeSampler = builder.start(); - resp.broadcastPrefixed("&bProfiler now active!"); + resp.broadcastPrefixed("&6Profiler now active!"); if (timeoutSeconds == -1) { resp.broadcastPrefixed("&7Use '/" + platform.getPlugin().getLabel() + " stop' to stop profiling and upload the results."); } else { @@ -234,7 +234,7 @@ public class SamplerModule implements CommandModule { } else { this.activeSampler.cancel(); this.activeSampler = null; - resp.broadcastPrefixed("&bThe active sampling task has been cancelled."); + resp.broadcastPrefixed("&6The active sampling task has been cancelled."); } } }) @@ -247,7 +247,7 @@ public class SamplerModule implements CommandModule { byte[] output = sampler.formCompressedDataPayload(); try { String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, JSON_TYPE, false).key(); - resp.broadcastPrefixed("&bSampling results:"); + resp.broadcastPrefixed("&6Sampling results:"); resp.broadcastLink(SparkPlatform.VIEWER_URL + key); } catch (IOException e) { resp.broadcastPrefixed("&cAn error occurred whilst uploading the results."); diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java new file mode 100644 index 0000000..189f89e --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java @@ -0,0 +1,77 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.lang.reflect.Method; +import java.nio.file.Path; + +/** + * Utility for creating .hprof memory heap snapshots. + */ +public final class HeapDump { + + private HeapDump() {} + + /** The object name of the com.sun.management.HotSpotDiagnosticMXBean */ + private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=HotSpotDiagnostic"; + + /** + * Creates a heap dump at the given output path. + * + * @param outputPath the path to write the snapshot to + * @param live if true dump only live objects i.e. objects that are reachable from others + * @throws Exception catch all + */ + public static void dumpHeap(Path outputPath, boolean live) throws Exception { + String outputPathString = outputPath.toAbsolutePath().normalize().toString(); + + if (isOpenJ9()) { + Class dumpClass = Class.forName("com.ibm.jvm.Dump"); + Method heapDumpMethod = dumpClass.getMethod("heapDumpToFile", String.class); + heapDumpMethod.invoke(null, outputPathString); + } else { + MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer(); + ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN); + + HotSpotDiagnosticMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, HotSpotDiagnosticMXBean.class); + proxy.dumpHeap(outputPathString, live); + } + } + + public static boolean isOpenJ9() { + try { + Class.forName("com.ibm.jvm.Dump"); + return true; + } catch (ClassNotFoundException e) { + return false; + } + } + + public interface HotSpotDiagnosticMXBean { + void dumpHeap(String outputFile, boolean live) throws IOException; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java new file mode 100644 index 0000000..c95e9ab --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -0,0 +1,171 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import com.google.gson.stream.JsonWriter; +import me.lucko.spark.common.util.TypeDescriptors; + +import javax.management.JMX; +import javax.management.MBeanServer; +import javax.management.ObjectName; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.lang.management.ManagementFactory; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.zip.GZIPOutputStream; + +/** + * Represents a "heap dump summary" from the VM. + * + *

Contains a number of entries, corresponding to types of objects in the virtual machine + * and their recorded impact on memory usage.

+ */ +public final class HeapDumpSummary { + + /** The object name of the com.sun.management.DiagnosticCommandMBean */ + private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=DiagnosticCommand"; + /** A regex pattern representing the expected format of the raw heap output */ + private static final Pattern OUTPUT_FORMAT = Pattern.compile("^\\s*(\\d+):\\s*(\\d+)\\s*(\\d+)\\s*([^\\s]+).*$"); + + /** + * Obtains the raw heap data output from the DiagnosticCommandMBean. + * + * @return the raw output + * @throws Exception lots could go wrong! + */ + private static String getRawHeapData() throws Exception { + MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer(); + ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN); + + DiagnosticCommandMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, DiagnosticCommandMXBean.class); + return proxy.gcClassHistogram(new String[0]); + } + + /** + * Creates a new heap dump based on the current VM. + * + * @return the created heap dump + * @throws RuntimeException if an error occurred whilst requesting a heap dump from the VM + */ + public static HeapDumpSummary createNew() { + String rawOutput; + try { + rawOutput = getRawHeapData(); + } catch (Exception e) { + throw new RuntimeException("Unable to get heap dump", e); + } + + return new HeapDumpSummary(Arrays.stream(rawOutput.split("\n")) + .map(line -> { + Matcher matcher = OUTPUT_FORMAT.matcher(line); + if (!matcher.matches()) { + return null; + } + + return new Entry( + Integer.parseInt(matcher.group(1)), + Integer.parseInt(matcher.group(2)), + Long.parseLong(matcher.group(3)), + TypeDescriptors.getJavaType(matcher.group(4)) + ); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList())); + } + + /** The entries in this heap dump */ + private final List entries; + + private HeapDumpSummary(List entries) { + this.entries = entries; + } + + private void writeOutput(JsonWriter writer) throws IOException { + writer.beginObject(); + writer.name("type").value("heap"); + writer.name("entries").beginArray(); + for (Entry entry : this.entries) { + writer.beginObject(); + writer.name("#").value(entry.getOrder()); + writer.name("i").value(entry.getInstances()); + writer.name("s").value(entry.getBytes()); + writer.name("t").value(entry.getType()); + writer.endObject(); + } + writer.endArray(); + writer.endObject(); + } + + public byte[] formCompressedDataPayload() { + ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); + try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) { + try (JsonWriter jsonWriter = new JsonWriter(writer)) { + writeOutput(jsonWriter); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + return byteOut.toByteArray(); + } + + public static final class Entry { + private final int order; + private final int instances; + private final long bytes; + private final String type; + + Entry(int order, int instances, long bytes, String type) { + this.order = order; + this.instances = instances; + this.bytes = bytes; + this.type = type; + } + + public int getOrder() { + return this.order; + } + + public int getInstances() { + return this.instances; + } + + public long getBytes() { + return this.bytes; + } + + public String getType() { + return this.type; + } + } + + public interface DiagnosticCommandMXBean { + String gcClassHistogram(String[] args); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java deleted file mode 100644 index 6200c17..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDump.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.memory; - -import javax.management.JMX; -import javax.management.MBeanServer; -import javax.management.ObjectName; -import java.io.IOException; -import java.lang.management.ManagementFactory; -import java.lang.reflect.Method; -import java.nio.file.Path; - -/** - * Utility for creating .hprof memory heap snapshots. - */ -public final class HeapDump { - - private HeapDump() {} - - /** The object name of the com.sun.management.HotSpotDiagnosticMXBean */ - private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=HotSpotDiagnostic"; - - /** - * Creates a heap dump at the given output path. - * - * @param outputPath the path to write the snapshot to - * @param live if true dump only live objects i.e. objects that are reachable from others - * @throws Exception catch all - */ - public static void dumpHeap(Path outputPath, boolean live) throws Exception { - String outputPathString = outputPath.toAbsolutePath().normalize().toString(); - - if (isOpenJ9()) { - Class dumpClass = Class.forName("com.ibm.jvm.Dump"); - Method heapDumpMethod = dumpClass.getMethod("heapDumpToFile", String.class); - heapDumpMethod.invoke(null, outputPathString); - } else { - MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer(); - ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN); - - HotSpotDiagnosticMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, HotSpotDiagnosticMXBean.class); - proxy.dumpHeap(outputPathString, live); - } - } - - public static boolean isOpenJ9() { - try { - Class.forName("com.ibm.jvm.Dump"); - return true; - } catch (ClassNotFoundException e) { - return false; - } - } - - public interface HotSpotDiagnosticMXBean { - void dumpHeap(String outputFile, boolean live) throws IOException; - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java deleted file mode 100644 index f3f62db..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/memory/HeapDumpSummary.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.memory; - -import com.google.gson.stream.JsonWriter; -import me.lucko.spark.common.util.TypeDescriptors; - -import javax.management.JMX; -import javax.management.MBeanServer; -import javax.management.ObjectName; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.lang.management.ManagementFactory; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import java.util.zip.GZIPOutputStream; - -/** - * Represents a "heap dump summary" from the VM. - * - *

Contains a number of entries, corresponding to types of objects in the virtual machine - * and their recorded impact on memory usage.

- */ -public final class HeapDumpSummary { - - /** The object name of the com.sun.management.DiagnosticCommandMBean */ - private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=DiagnosticCommand"; - /** A regex pattern representing the expected format of the raw heap output */ - private static final Pattern OUTPUT_FORMAT = Pattern.compile("^\\s*(\\d+):\\s*(\\d+)\\s*(\\d+)\\s*([^\\s]+).*$"); - - /** - * Obtains the raw heap data output from the DiagnosticCommandMBean. - * - * @return the raw output - * @throws Exception lots could go wrong! - */ - private static String getRawHeapData() throws Exception { - MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer(); - ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN); - - DiagnosticCommandMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, DiagnosticCommandMXBean.class); - return proxy.gcClassHistogram(new String[0]); - } - - /** - * Creates a new heap dump based on the current VM. - * - * @return the created heap dump - * @throws RuntimeException if an error occurred whilst requesting a heap dump from the VM - */ - public static HeapDumpSummary createNew() { - String rawOutput; - try { - rawOutput = getRawHeapData(); - } catch (Exception e) { - throw new RuntimeException("Unable to get heap dump", e); - } - - return new HeapDumpSummary(Arrays.stream(rawOutput.split("\n")) - .map(line -> { - Matcher matcher = OUTPUT_FORMAT.matcher(line); - if (!matcher.matches()) { - return null; - } - - return new Entry( - Integer.parseInt(matcher.group(1)), - Integer.parseInt(matcher.group(2)), - Long.parseLong(matcher.group(3)), - TypeDescriptors.getJavaType(matcher.group(4)) - ); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList())); - } - - /** The entries in this heap dump */ - private final List entries; - - private HeapDumpSummary(List entries) { - this.entries = entries; - } - - private void writeOutput(JsonWriter writer) throws IOException { - writer.beginObject(); - writer.name("type").value("heap"); - writer.name("entries").beginArray(); - for (Entry entry : this.entries) { - writer.beginObject(); - writer.name("#").value(entry.getOrder()); - writer.name("i").value(entry.getInstances()); - writer.name("s").value(entry.getBytes()); - writer.name("t").value(entry.getType()); - writer.endObject(); - } - writer.endArray(); - writer.endObject(); - } - - public byte[] formCompressedDataPayload() { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) { - try (JsonWriter jsonWriter = new JsonWriter(writer)) { - writeOutput(jsonWriter); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - return byteOut.toByteArray(); - } - - public static final class Entry { - private final int order; - private final int instances; - private final long bytes; - private final String type; - - Entry(int order, int instances, long bytes, String type) { - this.order = order; - this.instances = instances; - this.bytes = bytes; - this.type = type; - } - - public int getOrder() { - return this.order; - } - - public int getInstances() { - return this.instances; - } - - public long getBytes() { - return this.bytes; - } - - public String getType() { - return this.type; - } - } - - public interface DiagnosticCommandMXBean { - String gcClassHistogram(String[] args); - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java deleted file mode 100644 index 93a5fd8..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/gc/GarbageCollectionMonitor.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.monitor.gc; - -import com.sun.management.GarbageCollectionNotificationInfo; - -import javax.management.ListenerNotFoundException; -import javax.management.Notification; -import javax.management.NotificationEmitter; -import javax.management.NotificationListener; -import javax.management.openmbean.CompositeData; -import java.lang.management.GarbageCollectorMXBean; -import java.lang.management.ManagementFactory; -import java.util.ArrayList; -import java.util.List; - -public class GarbageCollectionMonitor implements NotificationListener, AutoCloseable { - - private final List listeners = new ArrayList<>(); - private final List emitters = new ArrayList<>(); - - public GarbageCollectionMonitor() { - List beans = ManagementFactory.getGarbageCollectorMXBeans(); - for (GarbageCollectorMXBean bean : beans) { - if (!(bean instanceof NotificationEmitter)) { - continue; - } - - NotificationEmitter notificationEmitter = (NotificationEmitter) bean; - notificationEmitter.addNotificationListener(this, null, null); - this.emitters.add(notificationEmitter); - } - } - - public void addListener(Listener listener) { - this.listeners.add(listener); - } - - public void removeListener(Listener listener) { - this.listeners.remove(listener); - } - - @Override - public void handleNotification(Notification notification, Object handback) { - if (!notification.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) { - return; - } - - GarbageCollectionNotificationInfo data = GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData()); - for (Listener listener : this.listeners) { - listener.onGc(data); - } - } - - @Override - public void close() { - for (NotificationEmitter e : this.emitters) { - try { - e.removeNotificationListener(this); - } catch (ListenerNotFoundException ex) { - ex.printStackTrace(); - } - } - this.emitters.clear(); - this.listeners.clear(); - } - - public interface Listener { - void onGc(GarbageCollectionNotificationInfo data); - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java new file mode 100644 index 0000000..d750b1a --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectionMonitor.java @@ -0,0 +1,90 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.memory; + +import com.sun.management.GarbageCollectionNotificationInfo; + +import javax.management.ListenerNotFoundException; +import javax.management.Notification; +import javax.management.NotificationEmitter; +import javax.management.NotificationListener; +import javax.management.openmbean.CompositeData; +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.List; + +public class GarbageCollectionMonitor implements NotificationListener, AutoCloseable { + + private final List listeners = new ArrayList<>(); + private final List emitters = new ArrayList<>(); + + public GarbageCollectionMonitor() { + List beans = ManagementFactory.getGarbageCollectorMXBeans(); + for (GarbageCollectorMXBean bean : beans) { + if (!(bean instanceof NotificationEmitter)) { + continue; + } + + NotificationEmitter notificationEmitter = (NotificationEmitter) bean; + notificationEmitter.addNotificationListener(this, null, null); + this.emitters.add(notificationEmitter); + } + } + + public void addListener(Listener listener) { + this.listeners.add(listener); + } + + public void removeListener(Listener listener) { + this.listeners.remove(listener); + } + + @Override + public void handleNotification(Notification notification, Object handback) { + if (!notification.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) { + return; + } + + GarbageCollectionNotificationInfo data = GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData()); + for (Listener listener : this.listeners) { + listener.onGc(data); + } + } + + @Override + public void close() { + for (NotificationEmitter e : this.emitters) { + try { + e.removeNotificationListener(this); + } catch (ListenerNotFoundException ex) { + ex.printStackTrace(); + } + } + this.emitters.clear(); + this.listeners.clear(); + } + + public interface Listener { + void onGc(GarbageCollectionNotificationInfo data); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java index 034e876..5693df6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickMonitor.java @@ -21,7 +21,7 @@ package me.lucko.spark.common.monitor.tick; import com.sun.management.GarbageCollectionNotificationInfo; -import me.lucko.spark.common.monitor.gc.GarbageCollectionMonitor; +import me.lucko.spark.common.monitor.memory.GarbageCollectionMonitor; import me.lucko.spark.common.sampler.TickCounter; import java.text.DecimalFormat; @@ -91,7 +91,7 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec // move onto the next state if (this.averageTickTime.getCount() >= 120) { - sendMessage("&bAnalysis is now complete."); + sendMessage("&6Analysis is now complete."); sendMessage("&f> &7Max: " + df.format(this.averageTickTime.getMax()) + "ms"); sendMessage("&f> &7Min: " + df.format(this.averageTickTime.getMin()) + "ms"); sendMessage("&f> &7Avg: " + df.format(this.averageTickTime.getAverage()) + "ms"); @@ -111,8 +111,8 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec double percentageChange = (increase * 100d) / this.avg; if (percentageChange > this.percentageChangeThreshold) { - sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &b" + df.format(diff) + - "&7 ms. (&b" + df.format(percentageChange) + "% &7increase from average)"); + sendMessage("&7Tick &8#" + counter.getCurrentTick() + " &7lasted &6" + df.format(diff) + + "&7 ms. (&6" + df.format(percentageChange) + "% &7increase from average)"); } } } @@ -132,7 +132,7 @@ public abstract class TickMonitor implements TickCounter.TickTask, GarbageCollec gcType = "Old Gen GC"; } - sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &b" + + sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7included &4GC &7lasting &6" + df.format(data.getGcInfo().getDuration()) + "&7 ms. (type = " + gcType + ")"); } -- cgit