aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common/heapdump
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2019-04-17 00:58:28 +0100
committerLuck <git@lucko.me>2019-04-17 00:58:28 +0100
commit394c59d375811e4b1e0f23a528ef85d8c4d0e5a0 (patch)
tree2e9486f564a2a518fc938dfc0d5b58ae067789f1 /spark-common/src/main/java/me/lucko/spark/common/heapdump
parentecd4cec8545460a4fc4ca65b911c2503a00cd8e7 (diff)
downloadspark-394c59d375811e4b1e0f23a528ef85d8c4d0e5a0.tar.gz
spark-394c59d375811e4b1e0f23a528ef85d8c4d0e5a0.tar.bz2
spark-394c59d375811e4b1e0f23a528ef85d8c4d0e5a0.zip
Add /spark health command to report tps / memory usage stats
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/heapdump')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java77
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java171
2 files changed, 248 insertions, 0 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java
new file mode 100644
index 0000000..189f89e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDump.java
@@ -0,0 +1,77 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.heapdump;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.lang.reflect.Method;
+import java.nio.file.Path;
+
+/**
+ * Utility for creating .hprof memory heap snapshots.
+ */
+public final class HeapDump {
+
+ private HeapDump() {}
+
+ /** The object name of the com.sun.management.HotSpotDiagnosticMXBean */
+ private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=HotSpotDiagnostic";
+
+ /**
+ * Creates a heap dump at the given output path.
+ *
+ * @param outputPath the path to write the snapshot to
+ * @param live if true dump only live objects i.e. objects that are reachable from others
+ * @throws Exception catch all
+ */
+ public static void dumpHeap(Path outputPath, boolean live) throws Exception {
+ String outputPathString = outputPath.toAbsolutePath().normalize().toString();
+
+ if (isOpenJ9()) {
+ Class<?> dumpClass = Class.forName("com.ibm.jvm.Dump");
+ Method heapDumpMethod = dumpClass.getMethod("heapDumpToFile", String.class);
+ heapDumpMethod.invoke(null, outputPathString);
+ } else {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN);
+
+ HotSpotDiagnosticMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, HotSpotDiagnosticMXBean.class);
+ proxy.dumpHeap(outputPathString, live);
+ }
+ }
+
+ public static boolean isOpenJ9() {
+ try {
+ Class.forName("com.ibm.jvm.Dump");
+ return true;
+ } catch (ClassNotFoundException e) {
+ return false;
+ }
+ }
+
+ public interface HotSpotDiagnosticMXBean {
+ void dumpHeap(String outputFile, boolean live) throws IOException;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
new file mode 100644
index 0000000..c95e9ab
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -0,0 +1,171 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.heapdump;
+
+import com.google.gson.stream.JsonWriter;
+import me.lucko.spark.common.util.TypeDescriptors;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.management.ManagementFactory;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Represents a "heap dump summary" from the VM.
+ *
+ * <p>Contains a number of entries, corresponding to types of objects in the virtual machine
+ * and their recorded impact on memory usage.</p>
+ */
+public final class HeapDumpSummary {
+
+ /** The object name of the com.sun.management.DiagnosticCommandMBean */
+ private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=DiagnosticCommand";
+ /** A regex pattern representing the expected format of the raw heap output */
+ private static final Pattern OUTPUT_FORMAT = Pattern.compile("^\\s*(\\d+):\\s*(\\d+)\\s*(\\d+)\\s*([^\\s]+).*$");
+
+ /**
+ * Obtains the raw heap data output from the DiagnosticCommandMBean.
+ *
+ * @return the raw output
+ * @throws Exception lots could go wrong!
+ */
+ private static String getRawHeapData() throws Exception {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN);
+
+ DiagnosticCommandMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, DiagnosticCommandMXBean.class);
+ return proxy.gcClassHistogram(new String[0]);
+ }
+
+ /**
+ * Creates a new heap dump based on the current VM.
+ *
+ * @return the created heap dump
+ * @throws RuntimeException if an error occurred whilst requesting a heap dump from the VM
+ */
+ public static HeapDumpSummary createNew() {
+ String rawOutput;
+ try {
+ rawOutput = getRawHeapData();
+ } catch (Exception e) {
+ throw new RuntimeException("Unable to get heap dump", e);
+ }
+
+ return new HeapDumpSummary(Arrays.stream(rawOutput.split("\n"))
+ .map(line -> {
+ Matcher matcher = OUTPUT_FORMAT.matcher(line);
+ if (!matcher.matches()) {
+ return null;
+ }
+
+ return new Entry(
+ Integer.parseInt(matcher.group(1)),
+ Integer.parseInt(matcher.group(2)),
+ Long.parseLong(matcher.group(3)),
+ TypeDescriptors.getJavaType(matcher.group(4))
+ );
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toList()));
+ }
+
+ /** The entries in this heap dump */
+ private final List<Entry> entries;
+
+ private HeapDumpSummary(List<Entry> entries) {
+ this.entries = entries;
+ }
+
+ private void writeOutput(JsonWriter writer) throws IOException {
+ writer.beginObject();
+ writer.name("type").value("heap");
+ writer.name("entries").beginArray();
+ for (Entry entry : this.entries) {
+ writer.beginObject();
+ writer.name("#").value(entry.getOrder());
+ writer.name("i").value(entry.getInstances());
+ writer.name("s").value(entry.getBytes());
+ writer.name("t").value(entry.getType());
+ writer.endObject();
+ }
+ writer.endArray();
+ writer.endObject();
+ }
+
+ public byte[] formCompressedDataPayload() {
+ ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
+ try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) {
+ try (JsonWriter jsonWriter = new JsonWriter(writer)) {
+ writeOutput(jsonWriter);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ return byteOut.toByteArray();
+ }
+
+ public static final class Entry {
+ private final int order;
+ private final int instances;
+ private final long bytes;
+ private final String type;
+
+ Entry(int order, int instances, long bytes, String type) {
+ this.order = order;
+ this.instances = instances;
+ this.bytes = bytes;
+ this.type = type;
+ }
+
+ public int getOrder() {
+ return this.order;
+ }
+
+ public int getInstances() {
+ return this.instances;
+ }
+
+ public long getBytes() {
+ return this.bytes;
+ }
+
+ public String getType() {
+ return this.type;
+ }
+ }
+
+ public interface DiagnosticCommandMXBean {
+ String gcClassHistogram(String[] args);
+ }
+
+}