diff options
| author | Hannes Greule <SirYwell@users.noreply.github.com> | 2024-11-21 10:15:32 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-11-21 09:15:32 +0000 |
| commit | 368ba2fdcf38f256812d558ea81f62bd6f7a43cc (patch) | |
| tree | 09ec0342135744c8b750354680d0df112bab2f94 | |
| parent | ddb0097142a0ef2635f53695b0b92fda855adb4a (diff) | |
| download | spark-368ba2fdcf38f256812d558ea81f62bd6f7a43cc.tar.gz spark-368ba2fdcf38f256812d558ea81f62bd6f7a43cc.tar.bz2 spark-368ba2fdcf38f256812d558ea81f62bd6f7a43cc.zip | |
Upgrade async-profiler to v3 (#386)
20 files changed, 406 insertions, 264 deletions
@@ -37,8 +37,8 @@ It is: It works by sampling statistical data about the systems activity, and constructing a call graph based on this data. The call graph is then displayed in an online viewer for further analysis by the user. There are two different profiler engines: -* Native `AsyncGetCallTrace` + `perf_events` - uses [async-profiler](https://github.com/jvm-profiling-tools/async-profiler) (*only available on Linux x86_64 systems*) -* Built-in Java `ThreadMXBean` - an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q. +* Native/Async - uses the [async-profiler](https://github.com/async-profiler/async-profiler) library (*only available on Linux & macOS systems*) +* Java - uses `ThreadMXBean`, an improved version of the popular [WarmRoast profiler](https://github.com/sk89q/WarmRoast) by sk89q. ### :zap: Memory Inspection diff --git a/build.gradle b/build.gradle index 5d15ff2..cc6d1d2 100644 --- a/build.gradle +++ b/build.gradle @@ -21,6 +21,21 @@ subprojects { patchVersion = determinePatchVersion() pluginVersion = baseVersion + '.' + patchVersion pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' + + applyExcludes = { Jar jarTask -> + jarTask.exclude 'module-info.class' + jarTask.exclude 'META-INF/maven/**' + jarTask.exclude 'META-INF/proguard/**' + jarTask.exclude 'META-INF/LICENSE' + jarTask.exclude 'META-INF/NOTICE' + // protobuf + jarTask.exclude '**/*.proto' + jarTask.exclude '**/*.proto.bin' + // async-profiler + jarTask.exclude 'linux-arm64/**' + jarTask.exclude 'linux-x64/**' + jarTask.exclude 'macos/**' + } } tasks.withType(JavaCompile).configureEach { diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index da46dc3..aace9b0 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -45,11 +45,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index 8438d5e..95da3ae 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -31,11 +31,7 @@ shadowJar { relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws' - exclude 'module-info.class' - exclude 'META-INF/maven/**' - exclude 'META-INF/proguard/**' - exclude '**/*.proto' - exclude '**/*.proto.bin' + project.applyExcludes(delegate) } artifacts { diff --git a/spark-common/build.gradle b/spark-common/build.gradle index ba3d2b4..f444c34 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -12,7 +12,7 @@ license { dependencies { api project(':spark-api') - implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3' + implementation 'tools.profiler:async-profiler:3.0' // spark native version: 3cf733d implementation 'org.ow2.asm:asm:9.7' implementation 'net.bytebuddy:byte-buddy-agent:1.14.17' implementation 'com.google.protobuf:protobuf-javalite:4.28.2' diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index 84aaa95..02645ff 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -144,18 +144,8 @@ public class AsyncProfilerAccess { String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); String jvm = System.getProperty("java.vm.name"); - // openj9 not supported by async-profiler at the moment - if (jvm.contains("OpenJ9")) { - throw new UnsupportedJvmException(jvm); - } - - if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { - arch = "amd64-musl"; - } - Table<String, String, String> supported = ImmutableTable.<String, String, String>builder() .put("linux", "amd64", "linux/amd64") - .put("linux", "amd64-musl", "linux/amd64-musl") .put("linux", "aarch64", "linux/aarch64") .put("macosx", "amd64", "macos") .put("macosx", "aarch64", "macos") @@ -242,20 +232,4 @@ public class AsyncProfilerAccess { super("A runtime error occurred whilst loading the native library", cause); } } - - // Checks if the system is using musl instead of glibc - private static boolean isLinuxMusl() { - try { - InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`") - .start() - .getInputStream(); - - BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); - String output = reader.lines().collect(Collectors.joining()); - return output.contains("musl"); // shrug - } catch (Throwable e) { - // ignore - return false; - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 172e29e..22699dd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -169,8 +169,12 @@ public class AsyncSampler extends AbstractSampler { } this.scheduler.schedule(() -> { - stop(false); - this.future.complete(this); + try { + stop(false); + this.future.complete(this); + } catch (Exception e) { + this.future.completeExceptionally(e); + } }, delay, TimeUnit.MILLISECONDS); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java index 3cef7d3..169abc8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java @@ -20,9 +20,11 @@ package me.lucko.spark.common.sampler.async; +import com.google.common.collect.ImmutableMap; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import java.nio.charset.StandardCharsets; +import java.util.Map; /** * Represents a profile "segment". @@ -83,7 +85,9 @@ public class ProfileSegment { String threadState = UNKNOWN_THREAD_STATE; if (sample instanceof JfrReader.ExecutionSample) { JfrReader.ExecutionSample executionSample = (JfrReader.ExecutionSample) sample; - threadState = reader.threadStates.get(executionSample.threadState); + + Map<Integer, String> threadStateLookup = reader.enums.getOrDefault("jdk.types.ThreadState", ImmutableMap.of()); + threadState = threadStateLookup.getOrDefault(executionSample.threadState, UNKNOWN_THREAD_STATE); } return new ProfileSegment(sample.tid, threadName, stack, value, threadState); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java index 60f6543..4a327f2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java @@ -1,17 +1,6 @@ /* - * Copyright 2020 Andrei Pangin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright The async-profiler authors + * SPDX-License-Identifier: Apache-2.0 */ package me.lucko.spark.common.sampler.async.jfr; @@ -37,9 +26,11 @@ public class Dictionary<T> { size = 0; } + // spark start public int size() { return this.size; } + // spark end public void put(long key, T value) { if (key == 0) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java index ea4985e..5bb8a30 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java @@ -1,17 +1,6 @@ /* - * Copyright 2020 Andrei Pangin - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright The async-profiler authors + * SPDX-License-Identifier: Apache-2.0 */ package me.lucko.spark.common.sampler.async.jfr; @@ -20,10 +9,14 @@ import me.lucko.spark.common.sampler.async.AsyncStackTraceElement; import java.io.Closeable; import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collections; @@ -39,41 +32,55 @@ public class JfrReader implements Closeable { private static final int CHUNK_HEADER_SIZE = 68; private static final int CHUNK_SIGNATURE = 0x464c5200; + private static final byte STATE_NEW_CHUNK = 0; + private static final byte STATE_READING = 1; + private static final byte STATE_EOF = 2; + private static final byte STATE_INCOMPLETE = 3; + private final FileChannel ch; private ByteBuffer buf; + private final long fileSize; private long filePosition; + private byte state; - public boolean incomplete; public long startNanos = Long.MAX_VALUE; public long endNanos = Long.MIN_VALUE; public long startTicks = Long.MAX_VALUE; + public long chunkStartNanos; + public long chunkEndNanos; + public long chunkStartTicks; public long ticksPerSec; + public boolean stopAtNewChunk; public final Dictionary<JfrClass> types = new Dictionary<>(); public final Map<String, JfrClass> typesByName = new HashMap<>(); - public final Map<Long, String> threads = new HashMap<>(); // spark + public final Map<Long, String> threads = new HashMap<>(); // spark - convert to map public final Dictionary<ClassRef> classes = new Dictionary<>(); + public final Dictionary<String> strings = new Dictionary<>(); public final Dictionary<byte[]> symbols = new Dictionary<>(); public final Dictionary<MethodRef> methods = new Dictionary<>(); public final Dictionary<StackTrace> stackTraces = new Dictionary<>(); - public final Dictionary<AsyncStackTraceElement> stackFrames = new Dictionary<>(); // spark - public final Map<Integer, String> frameTypes = new HashMap<>(); - public final Map<Integer, String> threadStates = new HashMap<>(); + public final Dictionary<AsyncStackTraceElement> stackFrames = new Dictionary<>(); // spark - add field public final Map<String, String> settings = new HashMap<>(); + public final Map<String, Map<Integer, String>> enums = new HashMap<>(); + + private final Dictionary<Constructor<? extends Event>> customEvents = new Dictionary<>(); private int executionSample; private int nativeMethodSample; + private int wallClockSample; private int allocationInNewTLAB; private int allocationOutsideTLAB; private int allocationSample; + private int liveObject; private int monitorEnter; private int threadPark; private int activeSetting; - private boolean activeSettingHasStack; public JfrReader(Path path) throws IOException { // spark - Path instead of String this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String this.buf = ByteBuffer.allocateDirect(BUFFER_SIZE); + this.fileSize = ch.size(); buf.flip(); ensureBytes(CHUNK_HEADER_SIZE); @@ -82,15 +89,52 @@ public class JfrReader implements Closeable { } } + public JfrReader(ByteBuffer buf) throws IOException { + this.ch = null; + this.buf = buf; + this.fileSize = buf.limit(); + + buf.order(ByteOrder.BIG_ENDIAN); + if (!readChunk(0)) { + throw new IOException("Incomplete JFR file"); + } + } + @Override public void close() throws IOException { - ch.close(); + if (ch != null) { + ch.close(); + } + } + + public boolean eof() { + return state >= STATE_EOF; + } + + public boolean incomplete() { + return state == STATE_INCOMPLETE; } public long durationNanos() { return endNanos - startNanos; } + public <E extends Event> void registerEvent(String name, Class<E> eventClass) { + JfrClass type = typesByName.get(name); + if (type != null) { + try { + customEvents.put(type.id, eventClass.getConstructor(JfrReader.class)); + } catch (NoSuchMethodException e) { + throw new IllegalArgumentException("No suitable constructor found"); + } + } + } + + // Similar to eof(), but parses the next chunk header + public boolean hasMoreChunks() throws IOException { + return state == STATE_NEW_CHUNK ? readChunk(buf.position()) : state == STATE_READING; + } + public List<Event> readAllEvents() throws IOException { return readAllEvents(null); } @@ -116,41 +160,58 @@ public class JfrReader implements Closeable { int type = getVarint(); if (type == 'L' && buf.getInt(pos) == CHUNK_SIGNATURE) { - if (readChunk(pos)) { + if (state != STATE_NEW_CHUNK && stopAtNewChunk) { + buf.position(pos); + state = STATE_NEW_CHUNK; + } else if (readChunk(pos)) { continue; } - break; + return null; } if (type == executionSample || type == nativeMethodSample) { - if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(); + if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(false); + } else if (type == wallClockSample) { + if (cls == null || cls == ExecutionSample.class) return (E) readExecutionSample(true); } else if (type == allocationInNewTLAB) { if (cls == null || cls == AllocationSample.class) return (E) readAllocationSample(true); } else if (type == allocationOutsideTLAB || type == allocationSample) { if (cls == null || cls == AllocationSample.class) return (E) readAllocationSample(false); + } else if (type == liveObject) { + if (cls == null || cls == LiveObject.class) return (E) readLiveObject(); } else if (type == monitorEnter) { if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false); } else if (type == threadPark) { if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true); } else if (type == activeSetting) { readActiveSetting(); - } - - if ((pos += size) <= buf.limit()) { - buf.position(pos); } else { - seek(filePosition + pos); + Constructor<? extends Event> customEvent = customEvents.get(type); + if (customEvent != null && (cls == null || cls == customEvent.getDeclaringClass())) { + try { + return (E) customEvent.newInstance(this); + } catch (ReflectiveOperationException e) { + throw new IllegalStateException(e); + } finally { + seek(filePosition + pos + size); + } + } } + + seek(filePosition + pos + size); } + + state = STATE_EOF; return null; } - private ExecutionSample readExecutionSample() { + private ExecutionSample readExecutionSample(boolean hasSamples) { long time = getVarlong(); int tid = getVarint(); int stackTraceId = getVarint(); int threadState = getVarint(); - return new ExecutionSample(time, tid, stackTraceId, threadState); + int samples = hasSamples ? getVarint() : 1; + return new ExecutionSample(time, tid, stackTraceId, threadState, samples); } private AllocationSample readAllocationSample(boolean tlab) { @@ -163,6 +224,16 @@ public class JfrReader implements Closeable { return new AllocationSample(time, tid, stackTraceId, classId, allocationSize, tlabSize); } + private LiveObject readLiveObject() { + long time = getVarlong(); + int tid = getVarint(); + int stackTraceId = getVarint(); + int classId = getVarint(); + long allocationSize = getVarlong(); + long allocatimeTime = getVarlong(); + return new LiveObject(time, tid, stackTraceId, classId, allocationSize, allocatimeTime); + } + private ContendedLock readContendedLock(boolean hasTimeout) { long time = getVarlong(); long duration = getVarlong(); @@ -176,11 +247,12 @@ public class JfrReader implements Closeable { } private void readActiveSetting() { - long time = getVarlong(); - long duration = getVarlong(); - int tid = getVarint(); - if (activeSettingHasStack) getVarint(); - long id = getVarlong(); + for (JfrField field : typesByName.get("jdk.ActiveSetting").fields) { + getVarlong(); + if ("id".equals(field.name)) { + break; + } + } String name = getString(); String value = getString(); settings.put(name, value); @@ -196,27 +268,38 @@ public class JfrReader implements Closeable { throw new IOException("Unsupported JFR version: " + (version >>> 16) + "." + (version & 0xffff)); } + long chunkStart = filePosition + pos; + long chunkSize = buf.getLong(pos + 8); + if (chunkStart + chunkSize > fileSize) { + state = STATE_INCOMPLETE; + return false; + } + long cpOffset = buf.getLong(pos + 16); long metaOffset = buf.getLong(pos + 24); if (cpOffset == 0 || metaOffset == 0) { - incomplete = true; + state = STATE_INCOMPLETE; return false; } - startNanos = Math.min(startNanos, buf.getLong(pos + 32)); - endNanos = Math.max(endNanos, buf.getLong(pos + 32) + buf.getLong(pos + 40)); - startTicks = Math.min(startTicks, buf.getLong(pos + 48)); + chunkStartNanos = buf.getLong(pos + 32); + chunkEndNanos = buf.getLong(pos + 32) + buf.getLong(pos + 40); + chunkStartTicks = buf.getLong(pos + 48); ticksPerSec = buf.getLong(pos + 56); + startNanos = Math.min(startNanos, chunkStartNanos); + endNanos = Math.max(endNanos, chunkEndNanos); + startTicks = Math.min(startTicks, chunkStartTicks); + types.clear(); typesByName.clear(); - long chunkStart = filePosition + pos; readMeta(chunkStart + metaOffset); readConstantPool(chunkStart + cpOffset); cacheEventTypes(); seek(chunkStart + CHUNK_HEADER_SIZE); + state = STATE_READING; return true; } @@ -224,7 +307,8 @@ public class JfrReader implements Closeable { seek(metaOffset); ensureBytes(5); - ensureBytes(getVarint() - buf.position()); + int posBeforeSize = buf.position(); + ensureBytes(getVarint() - (buf.position() - posBeforeSize)); getVarint(); getVarlong(); getVarlong(); @@ -277,7 +361,8 @@ public class JfrReader implements Closeable { seek(cpOffset); ensureBytes(5); - ensureBytes(getVarint() - buf.position()); + int posBeforeSize = buf.position(); + ensureBytes(getVarint() - (buf.position() - posBeforeSize)); getVarint(); getVarlong(); getVarlong(); @@ -298,10 +383,13 @@ public class JfrReader implements Closeable { buf.position(buf.position() + (CHUNK_HEADER_SIZE + 3)); break; case "java.lang.Thread": - readThreads(type.field("group") != null); + readThreads(type.fields.size()); break; case "java.lang.Class": - readClasses(type.field("hidden") != null); + readClasses(type.fields.size()); + break; + case "java.lang.String": + readStrings(); break; case "jdk.types.Symbol": readSymbols(); @@ -312,31 +400,29 @@ public class JfrReader implements Closeable { case "jdk.types.StackTrace": readStackTraces(); break; - case "jdk.types.FrameType": - readMap(frameTypes); - break; - case "jdk.types.ThreadState": - readMap(threadStates); - break; default: - readOtherConstants(type.fields); + if (type.simpleType && type.fields.size() == 1) { + readEnumValues(type.name); + } else { + readOtherConstants(type.fields); + } } } - private void readThreads(boolean hasGroup) { - int count = getVarint(); //threads.preallocate(getVarint()); + private void readThreads(int fieldCount) { + int count = getVarint(); // spark - don't preallocate for (int i = 0; i < count; i++) { long id = getVarlong(); String osName = getString(); int osThreadId = getVarint(); String javaName = getString(); long javaThreadId = getVarlong(); - if (hasGroup) getVarlong(); + readFields(fieldCount - 4); threads.put(id, javaName != null ? javaName : osName); } } - private void readClasses(boolean hasHidden) { + private void readClasses(int fieldCount) { int count = classes.preallocate(getVarint()); for (int i = 0; i < count; i++) { long id = getVarlong(); @@ -344,7 +430,7 @@ public class JfrReader implements Closeable { long name = getVarlong(); long pkg = getVarlong(); int modifiers = getVarint(); - if (hasHidden) getVarint(); + readFields(fieldCount - 4); classes.put(id, new ClassRef(name)); } } @@ -360,7 +446,7 @@ public class JfrReader implements Closeable { int hidden = getVarint(); methods.put(id, new MethodRef(cls, name, sig)); } - stackFrames.preallocate(count); // spark + stackFrames.preallocate(count); // spark - preallocate frames size } private void readStackTraces() { @@ -388,6 +474,13 @@ public class JfrReader implements Closeable { return new StackTrace(methods, types, locations); } + private void readStrings() { + int count = strings.preallocate(getVarint()); + for (int i = 0; i < count; i++) { + strings.put(getVarlong(), getString()); + } + } + private void readSymbols() { int count = symbols.preallocate(getVarint()); for (int i = 0; i < count; i++) { @@ -399,11 +492,13 @@ public class JfrReader implements Closeable { } } - private void readMap(Map<Integer, String> map) { + private void readEnumValues(String typeName) { + HashMap<Integer, String> map = new HashMap<>(); int count = getVarint(); for (int i = 0; i < count; i++) { - map.put(getVarint(), getString()); + map.put((int) getVarlong(), getString()); } + enums.put(typeName, map); } private void readOtherConstants(List<JfrField> fields) { @@ -432,16 +527,28 @@ public class JfrReader implements Closeable { } } + private void readFields(int count) { + while (count-- > 0) { + getVarlong(); + } + } + private void cacheEventTypes() { executionSample = getTypeId("jdk.ExecutionSample"); nativeMethodSample = getTypeId("jdk.NativeMethodSample"); + wallClockSample = getTypeId("profiler.WallClockSample"); allocationInNewTLAB = getTypeId("jdk.ObjectAllocationInNewTLAB"); allocationOutsideTLAB = getTypeId("jdk.ObjectAllocationOutsideTLAB"); allocationSample = getTypeId("jdk.ObjectAllocationSample"); + liveObject = getTypeId("profiler.LiveObject"); monitorEnter = getTypeId("jdk.JavaMonitorEnter"); threadPark = getTypeId("jdk.ThreadPark"); activeSetting = getTypeId("jdk.ActiveSetting"); - activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null; + + registerEvent("jdk.CPULoad", CPULoad.class); + registerEvent("jdk.GCHeapSummary", GCHeapSummary.class); + registerEvent("jdk.ObjectCount", ObjectCount.class); + registerEvent("jdk.ObjectCountAfterGC", ObjectCount.class); } private int getTypeId(String typeName) { @@ -449,7 +556,23 @@ public class JfrReader implements Closeable { return type != null ? type.id : -1; } - private int getVarint() { + public int getEnumKey(String typeName, String value) { + Map<Integer, String> enumValues = enums.get(typeName); + if (enumValues != null) { + for (Map.Entry<Integer, String> entry : enumValues.entrySet()) { + if (value.equals(entry.getValue())) { + return entry.getKey(); + } + } + } + return -1; + } + + public String getEnumValue(String typeName, int key) { + return enums.get(typeName).get(key); + } + + public int getVarint() { int result = 0; for (int shift = 0; ; shift += 7) { byte b = buf.get(); @@ -460,7 +583,7 @@ public class JfrReader implements Closeable { } } - private long getVarlong() { + public long getVarlong() { long result = 0; for (int shift = 0; shift < 56; shift += 7) { byte b = buf.get(); @@ -472,12 +595,22 @@ public class JfrReader implements Closeable { return result | (buf.get() & 0xffL) << 56; } - private String getString() { + public float getFloat() { + return buf.getFloat(); + } + + public double getDouble() { + return buf.getDouble(); + } + + public String getString() { switch (buf.get()) { case 0: return null; case 1: return ""; + case 2: + return strings.get(getVarlong()); case 3: return new String(getBytes(), StandardCharsets.UTF_8); case 4: { @@ -494,16 +627,21 @@ public class JfrReader implements Closeable { } } - private byte[] getBytes() { + public byte[] getBytes() { byte[] bytes = new byte[getVarint()]; buf.get(bytes); return bytes; } private void seek(long pos) throws IOException { - filePosition = pos; - ch.position(pos); - buf.rewind().flip(); + long bufPosition = pos - filePosition; + if (bufPosition >= 0 && bufPosition <= buf.limit()) { + buf.position((int) bufPosition); + } else { + filePosition = pos; + ch.position(pos); + buf.rewind().flip(); + } } private boolean ensureBytes(int needed) throws IOException { @@ -511,6 +649,10 @@ public class JfrReader implements Closeable { return true; } + if (ch == null) { + return false; + } + filePosition += buf.position(); if (buf.capacity() < needed) { @@ -544,11 +686,13 @@ public class JfrReader implements Closeable { static class JfrClass extends Element { final int id; + final boolean simpleType; final String name; final List<JfrField> fields; JfrClass(Map<String, String> attributes) { this.id = Integer.parseInt(attributes.get("id")); + this.simpleType = "true".equals(attributes.get("simpleType")); this.name = attributes.get("name"); this.fields = new ArrayList<>(2); } @@ -560,7 +704,7 @@ public class JfrReader implements Closeable { } } - JfrField field(String name) { + public JfrField field(String name) { |
