From bfbbcb3e68e019da4657ef0da22b889de656ae3f Mon Sep 17 00:00:00 2001
From: Luck
Date: Tue, 28 Dec 2021 18:12:33 +0000
Subject: Include platform and system statistics in profiler viewer payload
---
.../java/me/lucko/spark/common/SparkPlatform.java | 7 +
.../spark/common/api/GarbageCollectorInfo.java | 6 +-
.../spark/common/command/modules/HealthModule.java | 35 ++--
.../common/command/modules/HeapAnalysisModule.java | 85 +---------
.../common/command/modules/SamplerModule.java | 4 +-
.../spark/common/heapdump/HeapDumpSummary.java | 16 +-
.../lucko/spark/common/monitor/disk/DiskUsage.java | 63 ++++++++
.../monitor/memory/GarbageCollectorStatistics.java | 10 ++
.../spark/common/monitor/memory/MemoryInfo.java | 74 +++++++++
.../spark/common/monitor/tick/TickStatistics.java | 10 +-
.../common/platform/AbstractPlatformInfo.java | 17 --
.../lucko/spark/common/platform/PlatformInfo.java | 38 +----
.../platform/PlatformStatisticsProvider.java | 165 +++++++++++++++++++
.../spark/common/sampler/AbstractSampler.java | 14 ++
.../me/lucko/spark/common/sampler/Sampler.java | 6 +-
.../lucko/spark/common/sampler/ThreadDumper.java | 2 +-
.../lucko/spark/common/sampler/ThreadGrouper.java | 2 +-
.../common/sampler/aggregator/DataAggregator.java | 2 +-
.../common/sampler/async/AsyncDataAggregator.java | 2 +-
.../common/sampler/async/AsyncProfilerAccess.java | 2 +-
.../spark/common/sampler/async/AsyncSampler.java | 16 +-
.../spark/common/sampler/java/JavaSampler.java | 12 +-
.../common/sampler/java/SimpleDataAggregator.java | 2 +-
.../common/sampler/java/TickedDataAggregator.java | 2 +-
.../spark/common/sampler/node/StackTraceNode.java | 6 +-
.../spark/common/sampler/node/ThreadNode.java | 6 +-
.../me/lucko/spark/common/util/Compression.java | 100 ++++++++++++
spark-common/src/main/proto/spark/spark.proto | 176 +++++++++++----------
spark-common/src/main/proto/spark/spark_heap.proto | 27 ++++
.../src/main/proto/spark/spark_sampler.proto | 71 +++++++++
.../src/main/resources/linux/libasyncProfiler.so | Bin 398099 -> 0 bytes
.../src/main/resources/macosx/libasyncProfiler.so | Bin 599568 -> 0 bytes
.../main/resources/spark/linux/libasyncProfiler.so | Bin 0 -> 398099 bytes
.../resources/spark/macosx/libasyncProfiler.so | Bin 0 -> 599568 bytes
34 files changed, 706 insertions(+), 272 deletions(-)
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
create mode 100644 spark-common/src/main/proto/spark/spark_heap.proto
create mode 100644 spark-common/src/main/proto/spark/spark_sampler.proto
delete mode 100755 spark-common/src/main/resources/linux/libasyncProfiler.so
delete mode 100755 spark-common/src/main/resources/macosx/libasyncProfiler.so
create mode 100755 spark-common/src/main/resources/spark/linux/libasyncProfiler.so
create mode 100755 spark-common/src/main/resources/spark/macosx/libasyncProfiler.so
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 57f8732..a721adc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -41,6 +41,7 @@ import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.PlatformStatisticsProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
@@ -96,6 +97,7 @@ public class SparkPlatform {
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
+ private final PlatformStatisticsProvider statisticsProvider;
private Map startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
private final AtomicBoolean enabled = new AtomicBoolean(false);
@@ -132,6 +134,7 @@ public class SparkPlatform {
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
this.tickStatistics = this.tickHook != null ? new TickStatistics() : null;
+ this.statisticsProvider = new PlatformStatisticsProvider(this);
}
public void enable() {
@@ -214,6 +217,10 @@ public class SparkPlatform {
return this.tickReporter;
}
+ public PlatformStatisticsProvider getStatisticsProvider() {
+ return this.statisticsProvider;
+ }
+
public ClassSourceLookup createClassSourceLookup() {
return this.plugin.createClassSourceLookup();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
index 8d289aa..fc14c67 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
@@ -36,10 +36,8 @@ public class GarbageCollectorInfo implements GarbageCollector {
this.name = name;
this.totalCollections = stats.getCollectionCount();
this.totalTime = stats.getCollectionTime();
-
- double totalTimeDouble = this.totalTime;
- this.averageTime = this.totalCollections == 0 ? 0 : totalTimeDouble / this.totalCollections;
- this.averageFrequency = this.totalCollections == 0 ? 0 : (long) ((serverUptime - totalTimeDouble) / this.totalCollections);
+ this.averageTime = stats.getAverageCollectionTime();
+ this.averageFrequency = stats.getAverageCollectionFrequency(serverUptime);
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index 51fa905..b036d21 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -30,6 +30,7 @@ import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.disk.DiskUsage;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.RollingAverage;
@@ -38,15 +39,11 @@ import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.TextComponent;
import net.kyori.adventure.text.format.TextColor;
-import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
-import java.nio.file.FileStore;
-import java.nio.file.Files;
-import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import java.util.function.Consumer;
@@ -65,8 +62,6 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class HealthModule implements CommandModule {
- private static final double MSPT_95_PERCENTILE = 0.95d;
-
@Override
public void registerCommands(Consumer consumer) {
consumer.accept(Command.builder()
@@ -150,11 +145,7 @@ public class HealthModule implements CommandModule {
addDetailedMemoryStats(report, memoryMXBean);
}
- try {
- addDiskStats(report);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ addDiskStats(report);
resp.reply(report);
}
@@ -309,10 +300,14 @@ public class HealthModule implements CommandModule {
}
}
- private static void addDiskStats(List report) throws IOException {
- FileStore fileStore = Files.getFileStore(Paths.get("."));
- long totalSpace = fileStore.getTotalSpace();
- long usedSpace = totalSpace - fileStore.getUsableSpace();
+ private static void addDiskStats(List report) {
+ long total = DiskUsage.getTotal();
+ long used = DiskUsage.getUsed();
+
+ if (total == 0 || used == 0) {
+ return;
+ }
+
report.add(text()
.append(text(">", DARK_GRAY, BOLD))
.append(space())
@@ -321,18 +316,18 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(text(FormatUtil.formatBytes(usedSpace), WHITE))
+ .append(text(FormatUtil.formatBytes(used), WHITE))
.append(space())
.append(text("/", GRAY))
.append(space())
- .append(text(FormatUtil.formatBytes(totalSpace), WHITE))
+ .append(text(FormatUtil.formatBytes(total), WHITE))
.append(text(" "))
.append(text("(", GRAY))
- .append(text(FormatUtil.percent(usedSpace, totalSpace), GREEN))
+ .append(text(FormatUtil.percent(used, total), GREEN))
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateDiskUsageDiagram(usedSpace, totalSpace, 40)).build());
+ report.add(text().content(" ").append(generateDiskUsageDiagram(used, total, 40)).build());
report.add(empty());
}
@@ -355,7 +350,7 @@ public class HealthModule implements CommandModule {
.append(text('/', GRAY))
.append(formatTickDuration(average.median()))
.append(text('/', GRAY))
- .append(formatTickDuration(average.percentile(MSPT_95_PERCENTILE)))
+ .append(formatTickDuration(average.percentile95th()))
.append(text('/', GRAY))
.append(formatTickDuration(average.max()))
.build();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 70f6c3c..491ec1e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -30,20 +30,15 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.heapdump.HeapDump;
import me.lucko.spark.common.heapdump.HeapDumpSummary;
+import me.lucko.spark.common.util.Compression;
import me.lucko.spark.common.util.FormatUtil;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkHeapProtos;
import net.kyori.adventure.text.event.ClickEvent;
-import org.tukaani.xz.LZMA2Options;
-import org.tukaani.xz.LZMAOutputStream;
-import org.tukaani.xz.XZOutputStream;
-
import okhttp3.MediaType;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
@@ -51,7 +46,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.LongConsumer;
-import java.util.zip.GZIPOutputStream;
import static net.kyori.adventure.text.Component.text;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
@@ -98,7 +92,7 @@ public class HeapAnalysisModule implements CommandModule {
return;
}
- SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender);
+ SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender);
boolean saveToFile = false;
if (arguments.boolFlag("save-to-file")) {
@@ -175,11 +169,11 @@ public class HeapAnalysisModule implements CommandModule {
platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
- CompressionMethod compressionMethod = null;
+ Compression compressionMethod = null;
Iterator compressArgs = arguments.stringFlag("compress").iterator();
if (compressArgs.hasNext()) {
try {
- compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
+ compressionMethod = Compression.valueOf(compressArgs.next().toUpperCase());
} catch (IllegalArgumentException e) {
// ignore
}
@@ -194,7 +188,7 @@ public class HeapAnalysisModule implements CommandModule {
}
}
- private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException {
+ private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, Compression method) throws IOException {
resp.broadcastPrefixed(text("Compressing heap dump, please wait..."));
long size = Files.size(file);
@@ -244,71 +238,4 @@ public class HeapAnalysisModule implements CommandModule {
);
}
- public enum CompressionMethod {
- GZIP {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- XZ {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- LZMA {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- };
-
- public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
-
- private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
- byte[] buf = new byte[1024 * 64];
- long total = 0;
- long iterations = 0;
- while (true) {
- int r = from.read(buf);
- if (r == -1) {
- break;
- }
- to.write(buf, 0, r);
- total += r;
-
- // report progress every 5MB
- if (iterations++ % ((1024 / 64) * 5) == 0) {
- progress.accept(total);
- }
- }
- return total;
- }
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 2dd07c9..26f20e7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -40,7 +40,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.MethodDisambiguator;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
import net.kyori.adventure.text.event.ClickEvent;
@@ -305,7 +305,7 @@ public class SamplerModule implements CommandModule {
}
private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
- SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
+ SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
boolean saveToFile = false;
if (saveToFileFlag) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index 34fd6c4..7bb411d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -20,11 +20,11 @@
package me.lucko.spark.common.heapdump;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.proto.SparkProtos;
-import me.lucko.spark.proto.SparkProtos.HeapData;
-import me.lucko.spark.proto.SparkProtos.HeapEntry;
+import me.lucko.spark.proto.SparkHeapProtos.HeapData;
+import me.lucko.spark.proto.SparkHeapProtos.HeapEntry;
+import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata;
import org.objectweb.asm.Type;
@@ -125,10 +125,12 @@ public final class HeapDumpSummary {
this.entries = entries;
}
- public HeapData toProto(PlatformInfo platformInfo, CommandSender creator) {
+ public HeapData toProto(SparkPlatform platform, CommandSender creator) {
HeapData.Builder proto = HeapData.newBuilder();
- proto.setMetadata(SparkProtos.HeapMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
+ proto.setMetadata(HeapMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null))
+ .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
.setCreator(creator.toData().toProto())
.build()
);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
new file mode 100644
index 0000000..4450fcd
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
@@ -0,0 +1,63 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.disk;
+
+import java.io.IOException;
+import java.nio.file.FileStore;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/**
+ * Exposes the system disk usage.
+ */
+public enum DiskUsage {
+ ;
+
+ private static final FileStore FILE_STORE;
+
+ static {
+ FileStore fileStore = null;
+ try {
+ fileStore = Files.getFileStore(Paths.get("."));
+ } catch (IOException e) {
+ // ignore
+ }
+ FILE_STORE = fileStore;
+ }
+
+ public static long getUsed() {
+ try {
+ long total = FILE_STORE.getTotalSpace();
+ return total - FILE_STORE.getUsableSpace();
+ } catch (IOException e) {
+ return 0;
+ }
+ }
+
+ public static long getTotal() {
+ try {
+ return FILE_STORE.getTotalSpace();
+ } catch (IOException e) {
+ return 0;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
index c831ea1..cfd12a1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
@@ -74,6 +74,8 @@ public class GarbageCollectorStatistics {
this(bean.getCollectionCount(), bean.getCollectionTime());
}
+ // all times in milliseconds
+
public long getCollectionCount() {
return this.collectionCount;
}
@@ -82,6 +84,14 @@ public class GarbageCollectorStatistics {
return this.collectionTime;
}
+ public double getAverageCollectionTime() {
+ return this.collectionCount == 0 ? 0 : (double) this.collectionTime / this.collectionCount;
+ }
+
+ public long getAverageCollectionFrequency(long serverUptime) {
+ return this.collectionCount == 0 ? 0 : (long) ((serverUptime - (double) this.collectionTime) / this.collectionCount);
+ }
+
public GarbageCollectorStatistics subtract(GarbageCollectorStatistics other) {
if (other == ZERO || (other.collectionCount == 0 && other.collectionTime == 0)) {
return this;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
new file mode 100644
index 0000000..4ed9b1c
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -0,0 +1,74 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.memory;
+
+import java.lang.management.ManagementFactory;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+
+public enum MemoryInfo {
+ ;
+
+ /** The object name of the com.sun.management.OperatingSystemMXBean */
+ private static final String OPERATING_SYSTEM_BEAN = "java.lang:type=OperatingSystem";
+ /** The OperatingSystemMXBean instance */
+ private static final OperatingSystemMXBean BEAN;
+
+ static {
+ try {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(OPERATING_SYSTEM_BEAN);
+ BEAN = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, OperatingSystemMXBean.class);
+ } catch (Exception e) {
+ throw new UnsupportedOperationException("OperatingSystemMXBean is not supported by the system", e);
+ }
+ }
+
+ public static long getUsedSwap() {
+ return BEAN.getTotalSwapSpaceSize() - BEAN.getFreeSwapSpaceSize();
+ }
+
+ public static long getTotalSwap() {
+ return BEAN.getTotalSwapSpaceSize();
+ }
+
+ public static long getUsedPhysicalMemory() {
+ return BEAN.getTotalPhysicalMemorySize() - BEAN.getFreePhysicalMemorySize();
+ }
+
+ public static long getTotalPhysicalMemory() {
+ return BEAN.getTotalPhysicalMemorySize();
+ }
+
+ public static long getTotalVirtualMemory() {
+ return BEAN.getCommittedVirtualMemorySize();
+ }
+
+ public interface OperatingSystemMXBean {
+ long getCommittedVirtualMemorySize();
+ long getTotalSwapSpaceSize();
+ long getFreeSwapSpaceSize();
+ long getFreePhysicalMemorySize();
+ long getTotalPhysicalMemorySize();
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
index 31b58e9..bd2b834 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
@@ -56,7 +56,8 @@ public class TickStatistics implements TickHook.Callback, TickReporter.Callback
private boolean durationSupported = false;
private final RollingAverage tickDuration10Sec = new RollingAverage(TPS * 10);
private final RollingAverage tickDuration1Min = new RollingAverage(TPS * 60);
- private final RollingAverage[] tickDurationAverages = {this.tickDuration10Sec, this.tickDuration1Min};
+ private final RollingAverage tickDuration5Min = new RollingAverage(TPS * 60 * 5);
+ private final RollingAverage[] tickDurationAverages = {this.tickDuration10Sec, this.tickDuration1Min, this.tickDuration5Min};
private long last = 0;
@@ -131,6 +132,13 @@ public class TickStatistics implements TickHook.Callback, TickReporter.Callback
return this.tickDuration1Min;
}
+ public RollingAverage duration5Min() {
+ if (!this.durationSupported) {
+ return null;
+ }
+ return this.tickDuration5Min;
+ }
+
/**
* Rolling average calculator.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
deleted file mode 100644
index 645d5b2..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package me.lucko.spark.common.platform;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryUsage;
-
-public abstract class AbstractPlatformInfo implements PlatformInfo {
-
- @Override
- public int getNCpus() {
- return Runtime.getRuntime().availableProcessors();
- }
-
- @Override
- public MemoryUsage getHeapUsage() {
- return ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
- }
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
index 80fb85f..eb1b25d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
@@ -20,13 +20,12 @@
package me.lucko.spark.common.platform;
-import me.lucko.spark.proto.SparkProtos;
import me.lucko.spark.proto.SparkProtos.PlatformMetadata;
-import java.lang.management.MemoryUsage;
-
public interface PlatformInfo {
+ int DATA_VERSION = 1;
+
Type getType();
String getName();
@@ -35,18 +34,13 @@ public interface PlatformInfo {
String getMinecraftVersion();
- int getNCpus();
-
- MemoryUsage getHeapUsage();
-
default int getSparkVersion() {
// does not necessarily correspond to the plugin/mod version
- // this is like a data version I suppose
- return 1;
+ return DATA_VERSION;
}
default Data toData() {
- return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getNCpus(), getHeapUsage(), getSparkVersion());
+ return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getSparkVersion());
}
enum Type {
@@ -70,17 +64,13 @@ public interface PlatformInfo {
private final String name;
private final String version;
private final String minecraftVersion;
- private final int nCpus;
- private final MemoryUsage heapUsage;
private final int sparkVersion;
- public Data(Type type, String name, String version, String minecraftVersion, int nCpus, MemoryUsage heapUsage, int sparkVersion) {
+ public Data(Type type, String name, String version, String minecraftVersion, int sparkVersion) {
this.type = type;
this.name = name;
this.version = version;
this.minecraftVersion = minecraftVersion;
- this.nCpus = nCpus;
- this.heapUsage = heapUsage;
this.sparkVersion = sparkVersion;
}
@@ -100,33 +90,15 @@ public interface PlatformInfo {
return this.minecraftVersion;
}
- public int getNCpus() {
- return this.nCpus;
- }
-
- public MemoryUsage getHeapUsage() {
- return this.heapUsage;
- }
-
public int getSparkVersion() {
return this.sparkVersion;
}
- public SparkProtos.MemoryUsage getHeapUsageProto() {
- return SparkProtos.MemoryUsage.newBuilder()
- .setUsed(this.heapUsage.getUsed())
- .setCommitted(this.heapUsage.getCommitted())
- .setMax(this.heapUsage.getMax())
- .build();
- }
-
public PlatformMetadata toProto() {
PlatformMetadata.Builder proto = PlatformMetadata.newBuilder()
.setType(this.type.toProto())
.setName(this.name)
.setVersion(this.version)
- .setNCpus(this.nCpus)
- .setHeapUsage(getHeapUsageProto())
.setSparkVersion(this.sparkVersion);
if (this.minecraftVersion != null) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
new file mode 100644
index 0000000..5608fcc
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -0,0 +1,165 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.platform;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.memory.MemoryInfo;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
+import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryUsage;
+import java.util.Map;
+
+public class PlatformStatisticsProvider {
+ private final SparkPlatform platform;
+
+ public PlatformStatisticsProvider(SparkPlatform platform) {
+ this.platform = platform;
+ }
+
+ public SystemStatistics getSystemStatistics() {
+ SystemStatistics.Builder builder = SystemStatistics.newBuilder()
+ .setCpu(SystemStatistics.Cpu.newBuilder()
+ .setThreads(Runtime.getRuntime().availableProcessors())
+ .setProcessUsage(SystemStatistics.Cpu.Usage.newBuilder()
+ .setLast1M(CpuMonitor.processLoad1MinAvg())
+ .setLast15M(CpuMonitor.processLoad15MinAvg())
+ .build()
+ )
+ .setSystemUsage(SystemStatistics.Cpu.Usage.newBuilder()
+ .setLast1M(CpuMonitor.systemLoad1MinAvg())
+ .setLast15M(CpuMonitor.systemLoad15MinAvg())
+ .build()
+ )
+ .build()
+ )
+ .setMemory(SystemStatistics.Memory.newBuilder()
+ .setPhysical(SystemStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(MemoryInfo.getUsedPhysicalMemory())
+ .setTotal(MemoryInfo.getTotalPhysicalMemory())
+ .build()
+ )
+ .setSwap(SystemStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(MemoryInfo.getUsedSwap())
+ .setTotal(MemoryInfo.getTotalSwap())
+ .build()
+ )
+ .build()
+ )
+ .setDisk(SystemStatistics.Disk.newBuilder()
+ .setTotal(DiskUsage.getTotal())
+ .setUsed(DiskUsage.getUsed())
+ .build()
+ )
+ .setOs(SystemStatistics.Os.newBuilder()
+ .setArch(System.getProperty("os.arch"))
+ .setName(System.getProperty("os.name"))
+ .setVersion(System.getProperty("os.version"))
+ .build()
+ )
+ .setJava(SystemStatistics.Java.newBuilder()
+ .setVendor(System.getProperty("java.vendor"))
+ .setVersion(System.getProperty("java.version"))
+ .setVendorVersion(System.getProperty("java.vendor.version"))
+ .build()
+ );
+
+ long uptime = ManagementFactory.getRuntimeMXBean().getUptime();
+ builder.setUptime(uptime);
+
+ Map gcStats = GarbageCollectorStatistics.pollStats();
+ gcStats.forEach((name, statistics) -> builder.putGc(
+ name,
+ SystemStatistics.Gc.newBuilder()
+ .setTotal(statistics.getCollectionCount())
+ .setAvgTime(statistics.getAverageCollectionTime())
+ .setAvgFrequency(statistics.getAverageCollectionFrequency(uptime))
+ .build()
+ ));
+
+ return builder.build();
+ }
+
+ public PlatformStatistics getPlatformStatistics(Map startingGcStatistics) {
+ PlatformStatistics.Builder builder = PlatformStatistics.newBuilder();
+
+ MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+ builder.setMemory(PlatformStatistics.Memory.newBuilder()
+ .setHeap(PlatformStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(memoryUsage.getUsed())
+ .setTotal(memoryUsage.getCommitted())
+ .build()
+ )
+ .build()
+ );
+
+ long uptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime();
+ builder.setUptime(uptime);
+
+ if (startingGcStatistics != null) {
+ Map gcStats = GarbageCollectorStatistics.pollStatsSubtractInitial(startingGcStatistics);
+ gcStats.forEach((name, statistics) -> builder.putGc(
+ name,
+ PlatformStatistics.Gc.newBuilder()
+ .setTotal(statistics.getCollectionCount())
+ .setAvgTime(statistics.getAverageCollectionTime())
+ .setAvgFrequency(statistics.getAverageCollectionFrequency(uptime))
+ .build()
+ ));
+ }
+
+ TickStatistics tickStatistics = this.platform.getTickStatistics();
+ if (tickStatistics != null) {
+ builder.setTps(PlatformStatistics.Tps.newBuilder()
+ .setLast1M(tickStatistics.tps1Min())
+ .setLast5M(tickStatistics.tps5Min())
+ .setLast15M(tickStatistics.tps15Min())
+ .build()
+ );
+ if (tickStatistics.isDurationSupported()) {
+ builder.setMspt(PlatformStatistics.Mspt.newBuilder()
+ .setLast1M(msptValues(tickStatistics.duration1Min()))
+ .setLast5M(msptValues(tickStatistics.duration5Min()))
+ .build()
+ );
+ }
+ }
+
+ return builder.build();
+ }
+
+ private static PlatformStatistics.Mspt.Values msptValues(RollingAverage rollingAverage) {
+ return PlatformStatistics.Mspt.Values.newBuilder()
+ .setMean(rollingAverage.mean())
+ .setMax(rollingAverage.max())
+ .setMin(rollingAverage.min())
+ .setMedian(rollingAverage.median())
+ .setPercentile95(rollingAverage.percentile95th())
+ .build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index bae93b1..568b59d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -20,6 +20,9 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+
+import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
@@ -42,6 +45,9 @@ public abstract class AbstractSampler implements Sampler {
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture future = new CompletableFuture<>();
+ /** The garbage collector statistics when profiling started */
+ protected Map initialGcStats;
+
protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
this.interval = interval;
this.threadDumper = threadDumper;
@@ -65,4 +71,12 @@ public abstract class AbstractSampler implements Sampler {
public CompletableFuture getFuture() {
return this.future;
}
+
+ protected void recordInitialGcStats() {
+ this.initialGcStats = GarbageCollectorStatistics.pollStats();
+ }
+
+ protected Map getInitialGcStats() {
+ return this.initialGcStats;
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index b71aaee..d27b2fc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.proto.SparkProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.util.Comparator;
import java.util.Map;
@@ -68,6 +68,6 @@ public interface Sampler {
CompletableFuture getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
+ SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index e99114a..5cc41b9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -22,7 +22,7 @@
package me.lucko.spark.common.sampler;
import me.lucko.spark.common.util.ThreadFinder;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index e63ebc8..225f768 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -20,7 +20,7 @@
package me.lucko.spark.common.sampler;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 8b90639..3b1d349 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -21,7 +21,7 @@
package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.util.Map;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
index 594d56e..3de3943 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
@@ -24,7 +24,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator;
import me.lucko.spark.common.sampler.node.StackTraceNode;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
/**
* Data aggregator for {@link AsyncSampler}.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index f1d7209..06db795 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -102,7 +102,7 @@ public enum AsyncProfilerAccess {
}
// extract the profiler binary from the spark jar file
- String resource = os + "/libasyncProfiler.so";
+ String resource = "spark/" + os + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 1837cbc..db8802c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -22,8 +22,8 @@ package me.lucko.spark.common.sampler.async;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.AbstractSampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
@@ -32,7 +32,8 @@ import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.TemporaryFiles;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import one.profiler.AsyncProfiler;
@@ -117,6 +118,7 @@ public class AsyncSampler extends AbstractSampler {
}
}
+ recordInitialGcStats();
scheduleTimeout();
}
@@ -154,9 +156,11 @@ public class AsyncSampler extends AbstractSampler {
}
@Override
- public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()))
+ .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
.setCreator(creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
@@ -167,7 +171,7 @@ public class AsyncSampler extends AbstractSampler {
metadata.setComment(comment);
}
- SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder();
+ SamplerData.Builder proto = SamplerData.newBuilder();
proto.setMetadata(metadata.build());
aggregateOutput();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 02d5f01..c873f9f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -23,8 +23,8 @@ package me.lucko.spark.common.sampler.java;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.AbstractSampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
@@ -32,8 +32,8 @@ import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.proto.SparkProtos.SamplerData;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
@@ -129,9 +129,11 @@ public class JavaSampler extends AbstractSampler implements Runnable {
}
@Override
- public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()))
+ .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
.setCreator(creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
index e7113a1..39e21aa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
@@ -22,7 +22,7 @@ package me.lucko.spark.common.sampler.java;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.concurrent.ExecutorService;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index 018a3b8..ac34d01 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -24,7 +24,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.ArrayList;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index f935fb2..54217be 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -22,7 +22,7 @@
package me.lucko.spark.common.sampler.node;
import me.lucko.spark.common.util.MethodDisambiguator;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -65,8 +65,8 @@ public final class StackTraceNode extends AbstractNode implements Comparable
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.util;
+
+import org.tukaani.xz.LZMA2Options;
+import org.tukaani.xz.LZMAOutputStream;
+import org.tukaani.xz.XZOutputStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.function.LongConsumer;
+import java.util.zip.GZIPOutputStream;
+
+public enum Compression {
+ GZIP {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ },
+ XZ {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ },
+ LZMA {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ };
+
+ public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
+
+ private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
+ byte[] buf = new byte[1024 * 64];
+ long total = 0;
+ long iterations = 0;
+ while (true) {
+ int r = from.read(buf);
+ if (r == -1) {
+ break;
+ }
+ to.write(buf, 0, r);
+ total += r;
+
+ // report progress every 5MB
+ if (iterations++ % ((1024 / 64) * 5) == 0) {
+ progress.accept(total);
+ }
+ }
+ return total;
+ }
+}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index 4305a51..b4f9efb 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -5,34 +5,17 @@ package spark;
option java_package = "me.lucko.spark.proto";
option java_outer_classname = "SparkProtos";
-message CommandSenderMetadata {
- Type type = 1;
- string name = 2;
- string unique_id = 3;
-
- enum Type {
- OTHER = 0;
- PLAYER = 1;
- }
-}
-
-message MemoryUsage {
- int64 used = 1;
- int64 committed = 2;
- int64 max = 3;
-}
-
message PlatformMetadata {
Type type = 1;
string name = 2;
string version = 3;
string minecraft_version = 4; // optional
-
- int32 n_cpus = 5;
- MemoryUsage heapUsage = 6;
-
int32 spark_version = 7;
+ // replaced
+ reserved 5, 6;
+ reserved "n_cpus", "heap_usage";
+
enum Type {
SERVER = 0;
CLIENT = 1;
@@ -40,80 +23,109 @@ message PlatformMetadata {
}
}
-message HeapData {
- HeapMetadata metadata = 1;
- repeated HeapEntry entries = 2;
-}
+message SystemStatistics {
+ Cpu cpu = 1;
+ Memory memory = 2;
+ map gc = 3;
+ Disk disk = 4;
+ Os os = 5;
+ Java java = 6;
+ int64 uptime = 7;
+
+ message Cpu {
+ int32 threads = 1;
+ Usage process_usage = 2;
+ Usage system_usage = 3;
+
+ message Usage {
+ double last1m = 1;
+ double last15m = 2;
+ }
+ }
-message HeapMetadata {
- CommandSenderMetadata creator = 1;
- PlatformMetadata platform_metadata = 2;
-}
+ message Memory {
+ MemoryPool physical = 1;
+ MemoryPool swap = 2;
-message HeapEntry {
- int32 order = 1;
- int32 instances = 2;
- int64 size = 3;
- string type = 4;
-}
+ message MemoryPool {
+ int64 used = 1;
+ int64 total = 2;
+ }
+ }
-message SamplerData {
- SamplerMetadata metadata = 1;
- repeated ThreadNode threads = 2;
- map class_sources = 3; // optional
+ message Gc {
+ int64 total = 1;
+ double avg_time = 2;
+ double avg_frequency = 3;
+ }
+
+ message Disk {
+ int64 used = 1;
+ int64 total = 2;
+ }
+
+ message Os {
+ string arch = 1;
+ string name = 2;
+ string version = 3;
+ }
+
+ message Java {
+ string vendor = 1;
+ string version = 2;
+ string vendor_version = 3;
+ }
}
-message SamplerMetadata {
- CommandSenderMetadata creator = 1;
- int64 start_time = 2;
- int32 interval = 3;
- ThreadDumper thread_dumper = 4;
- DataAggregator data_aggregator = 5;
- string comment = 6;
- PlatformMetadata platform_metadata = 7;
-
- message ThreadDumper {
- Type type = 1;
- repeated int64 ids = 2; // optional
- repeated string patterns = 3; // optional
-
- enum Type {
- ALL = 0;
- SPECIFIC = 1;
- REGEX = 2;
+message PlatformStatistics {
+ Memory memory = 1;
+ map gc = 2;
+ int64 uptime = 3;
+ Tps tps = 4; // optional
+ Mspt mspt = 5; // optional
+
+ message Memory {
+ MemoryPool heap = 1;
+
+ message MemoryPool {
+ int64 used = 1;
+ int64 total = 2;
}
}
- message DataAggregator {
- Type type = 1;
- ThreadGrouper thread_grouper = 2;
- int64 tick_length_threshold = 3; // optional
+ message Gc {
+ int64 total = 1;
+ double avg_time = 2;
+ double avg_frequency = 3;
+ }
- enum Type {
- SIMPLE = 0;
- TICKED = 1;
- }
+ message Tps {
+ double last1m = 1;
+ double last5m = 2;
+ double last15m = 3;
+ }
+
+ message Mspt {
+ Values last1m = 1;
+ Values last5m = 2;
- enum ThreadGrouper {
- BY_NAME = 0;
- BY_POOL = 1;
- AS_ONE = 2;
+ message Values {
+ double mean = 1;
+ double max = 2;
+ double min = 3;
+ double median = 4;
+ double percentile95 = 5;
}
}
}
-message StackTraceNode {
- double time = 1;
- repeated StackTraceNode children = 2;
- string class_name = 3;
- string method_name = 4;
- int32 parent_line_number = 5; // optional
- int32 line_number = 6; // optional
- string method_desc = 7; // optional
-}
+message CommandSenderMetadata {
+ Type type = 1;
+ string name = 2;
+ string unique_id = 3;
-message ThreadNode {
- string name = 1;
- double time = 2;
- repeated StackTraceNode children = 3;
+ enum Type {
+ OTHER = 0;
+ PLAYER = 1;
+ }
}
diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-common/src/main/proto/spark/spark_heap.proto
new file mode 100644
index 0000000..59f2b85
--- /dev/null
+++ b/spark-common/src/main/proto/spark/spark_heap.proto
@@ -0,0 +1,27 @@
+syntax = "proto3";
+
+package spark;
+
+import "spark/spark.proto";
+
+option java_package = "me.lucko.spark.proto";
+option java_outer_classname = "SparkHeapProtos";
+
+message HeapData {
+ HeapMetadata metadata = 1;
+ repeated HeapEntry entries = 2;
+}
+
+message HeapMetadata {
+ CommandSenderMetadata creator = 1;
+ PlatformMetadata platform_metadata = 2;
+ PlatformStatistics platform_statistics = 3;
+ SystemStatistics system_statistics = 4;
+}
+
+message HeapEntry {
+ int32 order = 1;
+ int32 instances = 2;
+ int64 size = 3;
+ string type = 4;
+}
diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto
new file mode 100644
index 0000000..c7cecc3
--- /dev/null
+++ b/spark-common/src/main/proto/spark/spark_sampler.proto
@@ -0,0 +1,71 @@
+syntax = "proto3";
+
+package spark;
+
+import "spark/spark.proto";
+
+option java_package = "me.lucko.spark.proto";
+option java_outer_classname = "SparkSamplerProtos";
+
+message SamplerData {
+ SamplerMetadata metadata = 1;
+ repeated ThreadNode threads = 2;
+ map class_sources = 3; // optional
+}
+
+message SamplerMetadata {
+ CommandSenderMetadata creator = 1;
+ int64 start_time = 2;
+ int32 interval = 3;
+ ThreadDumper thread_dumper = 4;
+ DataAggregator data_aggregator = 5;
+ string comment = 6;
+ PlatformMetadata platform_metadata = 7;
+ PlatformStatistics platform_statistics = 8;
+ SystemStatistics system_statistics = 9;
+
+ message ThreadDumper {
+ Type type = 1;
+ repeated int64 ids = 2; // optional
+ repeated string patterns = 3; // optional
+
+ enum Type {
+ ALL = 0;
+ SPECIFIC = 1;
+ REGEX = 2;
+ }
+ }
+
+ message DataAggregator {
+ Type type = 1;
+ ThreadGrouper thread_grouper = 2;
+ int64 tick_length_threshold = 3; // optional
+
+ enum Type {
+ SIMPLE = 0;
+ TICKED = 1;
+ }
+
+ enum ThreadGrouper {
+ BY_NAME = 0;
+ BY_POOL = 1;
+ AS_ONE = 2;
+ }
+ }
+}
+
+message ThreadNode {
+ string name = 1;
+ double time = 2;
+ repeated StackTraceNode children = 3;
+}
+
+message StackTraceNode {
+ double time = 1;
+ repeated StackTraceNode children = 2;
+ string class_name = 3;
+ string method_name = 4;
+ int32 parent_line_number = 5; // optional
+ int32 line_number = 6; // optional
+ string method_desc = 7; // optional
+}
diff --git a/spark-common/src/main/resources/linux/libasyncProfiler.so b/spark-common/src/main/resources/linux/libasyncProfiler.so
deleted file mode 100755
index ddee900..0000000
Binary files a/spark-common/src/main/resources/linux/libasyncProfiler.so and /dev/null differ
diff --git a/spark-common/src/main/resources/macosx/libasyncProfiler.so b/spark-common/src/main/resources/macosx/libasyncProfiler.so
deleted file mode 100755
index 75daf6e..0000000
Binary files a/spark-common/src/main/resources/macosx/libasyncProfiler.so and /dev/null differ
diff --git a/spark-common/src/main/resources/spark/linux/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/libasyncProfiler.so
new file mode 100755
index 0000000..ddee900
Binary files /dev/null and b/spark-common/src/main/resources/spark/linux/libasyncProfiler.so differ
diff --git a/spark-common/src/main/resources/spark/macosx/libasyncProfiler.so b/spark-common/src/main/resources/spark/macosx/libasyncProfiler.so
new file mode 100755
index 0000000..75daf6e
Binary files /dev/null and b/spark-common/src/main/resources/spark/macosx/libasyncProfiler.so differ
--
cgit
From 92669a73490e526459457f4dbd5fff116a628870 Mon Sep 17 00:00:00 2001
From: Luck
Date: Tue, 28 Dec 2021 20:06:02 +0000
Subject: Fix java vendor system property lookup NPE (#152)
---
.../me/lucko/spark/common/platform/PlatformStatisticsProvider.java | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index 5608fcc..a9d83b2 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -82,9 +82,9 @@ public class PlatformStatisticsProvider {
.build()
)
.setJava(SystemStatistics.Java.newBuilder()
- .setVendor(System.getProperty("java.vendor"))
- .setVersion(System.getProperty("java.version"))
- .setVendorVersion(System.getProperty("java.vendor.version"))
+ .setVendor(System.getProperty("java.vendor", "unknown"))
+ .setVersion(System.getProperty("java.version", "unknown"))
+ .setVendorVersion(System.getProperty("java.vendor.version", "unknown"))
.build()
);
--
cgit
From 7e96297d27f78b14354ee1391dc3808b240b19f7 Mon Sep 17 00:00:00 2001
From: Luck
Date: Tue, 28 Dec 2021 22:31:20 +0000
Subject: Use try catch for system stat collection
---
.../spark/common/heapdump/HeapDumpSummary.java | 23 ++++++---
.../spark/common/sampler/AbstractSampler.java | 57 +++++++++++++++++++++-
.../spark/common/sampler/async/AsyncSampler.java | 35 +------------
.../spark/common/sampler/java/JavaSampler.java | 35 +------------
4 files changed, 76 insertions(+), 74 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index 7bb411d..c0980e7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -126,14 +126,23 @@ public final class HeapDumpSummary {
}
public HeapData toProto(SparkPlatform platform, CommandSender creator) {
- HeapData.Builder proto = HeapData.newBuilder();
- proto.setMetadata(HeapMetadata.newBuilder()
+ HeapMetadata.Builder metadata = HeapMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
- .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null))
- .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
- .setCreator(creator.toData().toProto())
- .build()
- );
+ .setCreator(creator.toData().toProto());
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ HeapData.Builder proto = HeapData.newBuilder();
+ proto.setMetadata(metadata);
for (Entry entry : this.entries) {
proto.addEntries(entry.toProto());
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index 568b59d..34abdfa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -20,8 +20,19 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
-
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.node.MergeMode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -79,4 +90,48 @@ public abstract class AbstractSampler implements Sampler {
protected Map getInitialGcStats() {
return this.initialGcStats;
}
+
+ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
+ SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setCreator(creator.toData().toProto())
+ .setStartTime(this.startTime)
+ .setInterval(this.interval)
+ .setThreadDumper(this.threadDumper.getMetadata())
+ .setDataAggregator(dataAggregator.getMetadata());
+
+ if (comment != null) {
+ metadata.setComment(comment);
+ }
+
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ proto.setMetadata(metadata);
+ }
+
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator super Map.Entry> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ List> data = new ArrayList<>(dataAggregator.getData().entrySet());
+ data.sort(outputOrder);
+
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
+
+ for (Map.Entry entry : data) {
+ proto.addThreads(entry.getValue().toProto(mergeMode));
+ classSourceVisitor.visit(entry.getValue());
+ }
+
+ if (classSourceVisitor.hasMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getMapping());
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index db8802c..62325ae 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -33,7 +33,6 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.TemporaryFiles;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import one.profiler.AsyncProfiler;
@@ -41,7 +40,6 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
@@ -157,39 +155,10 @@ public class AsyncSampler extends AbstractSampler {
@Override
public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
- .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()))
- .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
SamplerData.Builder proto = SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
aggregateOutput();
-
- List> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index c873f9f..d2959bd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -33,14 +33,11 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
-import java.util.ArrayList;
import java.util.Comparator;
-import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -130,37 +127,9 @@ public class JavaSampler extends AbstractSampler implements Runnable {
@Override
public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
- .setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()))
- .setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
SamplerData.Builder proto = SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
- List> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
--
cgit
From 1dd973f7317734d47dcb9879070daee76ca4b6b7 Mon Sep 17 00:00:00 2001
From: Luck
Date: Tue, 28 Dec 2021 22:31:49 +0000
Subject: Add timeout thread to detect stuck commands
---
.../java/me/lucko/spark/common/SparkPlatform.java | 49 ++++++++++++++++++++++
.../spark/common/util/SparkThreadFactory.java | 42 +++++++++++++++++++
.../spark/fabric/plugin/FabricSparkPlugin.java | 8 +---
.../lucko/spark/forge/plugin/ForgeSparkPlugin.java | 8 +---
4 files changed, 95 insertions(+), 12 deletions(-)
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index a721adc..53454aa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -64,7 +64,9 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
+import java.util.logging.Level;
import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
@@ -262,12 +264,59 @@ public class SparkPlatform {
}
public void executeCommand(CommandSender sender, String[] args) {
+ AtomicReference executorThread = new AtomicReference<>();
+ AtomicReference timeoutThread = new AtomicReference<>();
+ AtomicBoolean completed = new AtomicBoolean(false);
+
+ // execute the command
this.plugin.executeAsync(() -> {
+ executorThread.set(Thread.currentThread());
this.commandExecuteLock.lock();
try {
executeCommand0(sender, args);
} finally {
this.commandExecuteLock.unlock();
+ executorThread.set(null);
+ completed.set(true);
+
+ Thread timeout = timeoutThread.get();
+ if (timeout != null) {
+ timeout.interrupt();
+ }
+ }
+ });
+
+ // schedule a task to detect timeouts
+ this.plugin.executeAsync(() -> {
+ timeoutThread.set(Thread.currentThread());
+ try {
+ for (int i = 1; i <= 3; i++) {
+ try {
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+
+ if (completed.get()) {
+ return;
+ }
+
+ Thread executor = executorThread.get();
+ if (executor == null) {
+ getPlugin().log(Level.WARNING, "A command execution has not completed after " +
+ (i * 5) + " seconds but there is no executor present. Perhaps the executor shutdown?");
+
+ } else {
+ String stackTrace = Arrays.stream(executor.getStackTrace())
+ .map(el -> " " + el.toString())
+ .collect(Collectors.joining("\n"));
+
+ getPlugin().log(Level.WARNING, "A command execution has not completed after " +
+ (i * 5) + " seconds, it might be stuck. Trace: \n" + stackTrace);
+ }
+ }
+ } finally {
+ timeoutThread.set(null);
}
});
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
new file mode 100644
index 0000000..9c7309d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
@@ -0,0 +1,42 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.util;
+
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class SparkThreadFactory implements ThreadFactory {
+ private static final AtomicInteger poolNumber = new AtomicInteger(1);
+ private final AtomicInteger threadNumber = new AtomicInteger(1);
+ private final String namePrefix;
+
+ public SparkThreadFactory() {
+ this.namePrefix = "spark-worker-pool-" +
+ poolNumber.getAndIncrement() +
+ "-thread-";
+ }
+
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread(r, this.namePrefix + this.threadNumber.getAndIncrement());
+ t.setDaemon(true);
+ return t;
+ }
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
index 4bcfce4..7b0af11 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
@@ -35,6 +35,7 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.util.SparkThreadFactory;
import me.lucko.spark.fabric.FabricClassSourceLookup;
import me.lucko.spark.fabric.FabricSparkMod;
@@ -60,12 +61,7 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
protected FabricSparkPlugin(FabricSparkMod mod) {
this.mod = mod;
this.logger = LogManager.getLogger("spark");
- this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-fabric-async-worker");
- thread.setDaemon(true);
- return thread;
- });
+ this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory());
this.platform = new SparkPlatform(this);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
index f8b7559..7805935 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
@@ -35,6 +35,7 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.util.SparkThreadFactory;
import me.lucko.spark.forge.ForgeClassSourceLookup;
import me.lucko.spark.forge.ForgeSparkMod;
@@ -79,12 +80,7 @@ public abstract class ForgeSparkPlugin implements SparkPlugin {
protected ForgeSparkPlugin(ForgeSparkMod mod) {
this.mod = mod;
this.logger = LogManager.getLogger("spark");
- this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-forge-async-worker");
- thread.setDaemon(true);
- return thread;
- });
+ this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory());
this.platform = new SparkPlatform(this);
}
--
cgit
From ae8725a7dd07e58ac9fcccec0d545b46ec347d9c Mon Sep 17 00:00:00 2001
From: Luck
Date: Wed, 29 Dec 2021 23:31:10 +0000
Subject: Fallback to itimer profiler mode inside containers
---
.../common/sampler/async/AsyncProfilerAccess.java | 54 +++++++++++++++++++---
.../spark/common/sampler/async/AsyncSampler.java | 2 +-
2 files changed, 48 insertions(+), 8 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index 06db795..3dfbbbf 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -45,22 +45,31 @@ public enum AsyncProfilerAccess {
/** An instance of the async-profiler Java API. */
private final AsyncProfiler profiler;
+ /** The event to use for profiling */
+ private final ProfilingEvent profilingEvent;
+
/** If profiler is null, contains the reason why setup failed */
private final Exception setupException;
AsyncProfilerAccess() {
AsyncProfiler profiler;
+ ProfilingEvent profilingEvent = null;
Exception setupException = null;
try {
profiler = load();
- ensureCpuEventSupported(profiler);
+ if (isEventSupported(profiler, ProfilingEvent.CPU, false)) {
+ profilingEvent = ProfilingEvent.CPU;
+ } else if (isEventSupported(profiler, ProfilingEvent.ITIMER, true)) {
+ profilingEvent = ProfilingEvent.ITIMER;
+ }
} catch (Exception e) {
profiler = null;
setupException = e;
}
this.profiler = profiler;
+ this.profilingEvent = profilingEvent;
this.setupException = setupException;
}
@@ -71,11 +80,18 @@ public enum AsyncProfilerAccess {
return this.profiler;
}
+ public ProfilingEvent getProfilingEvent() {
+ return this.profilingEvent;
+ }
+
public boolean checkSupported(SparkPlatform platform) {
if (this.setupException != null) {
if (this.setupException instanceof UnsupportedSystemException) {
platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" +
this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
+ } else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) {
+ platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed.");
+ platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc");
} else {
platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage());
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler");
@@ -118,7 +134,7 @@ public enum AsyncProfilerAccess {
try {
return AsyncProfiler.getInstance(extractPath.toAbsolutePath().toString());
} catch (UnsatisfiedLinkError e) {
- throw new RuntimeException("A runtime error occurred whilst loading the native library", e);
+ throw new NativeLoadingException(e);
}
}
@@ -126,12 +142,30 @@ public enum AsyncProfilerAccess {
* Checks the {@code profiler} to ensure the CPU event is supported.
*
* @param profiler the profiler instance
- * @throws Exception if the event is not supported
+ * @return if the event is supported
*/
- private static void ensureCpuEventSupported(AsyncProfiler profiler) throws Exception {
- String resp = profiler.execute("check,event=cpu").trim();
- if (!resp.equalsIgnoreCase("ok")) {
- throw new UnsupportedOperationException("CPU event is not supported");
+ private static boolean isEventSupported(AsyncProfiler profiler, ProfilingEvent event, boolean throwException) {
+ try {
+ String resp = profiler.execute("check,event=" + event).trim();
+ if (resp.equalsIgnoreCase("ok")) {
+ return true;
+ } else if (throwException) {
+ throw new IllegalArgumentException(resp);
+ }
+ } catch (Exception e) {
+ if (throwException) {
+ throw new RuntimeException("Event " + event + " is not supported", e);
+ }
+ }
+ return false;
+ }
+
+ enum ProfilingEvent {
+ CPU, ITIMER;
+
+ @Override
+ public String toString() {
+ return name().toLowerCase(Locale.ROOT);
}
}
@@ -140,4 +174,10 @@ public enum AsyncProfilerAccess {
super(os + '/' + arch);
}
}
+
+ private static final class NativeLoadingException extends RuntimeException {
+ public NativeLoadingException(Throwable cause) {
+ super("A runtime error occurred whilst loading the native library", cause);
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 62325ae..5d587a0 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -99,7 +99,7 @@ public class AsyncSampler extends AbstractSampler {
throw new RuntimeException("Unable to create temporary output file", e);
}
- String command = "start,event=cpu,interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
+ String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
if (this.threadDumper instanceof ThreadDumper.Specific) {
command += ",filter";
}
--
cgit
From 22da90504795bf79e9acff893212b2c7c1de3392 Mon Sep 17 00:00:00 2001
From: Luck
Date: Wed, 29 Dec 2021 23:34:23 +0000
Subject: Increment data version
---
.../src/main/java/me/lucko/spark/common/platform/PlatformInfo.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
index eb1b25d..082389d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
@@ -24,7 +24,7 @@ import me.lucko.spark.proto.SparkProtos.PlatformMetadata;
public interface PlatformInfo {
- int DATA_VERSION = 1;
+ int DATA_VERSION = 2;
Type getType();
--
cgit
From 2003a5c8dffa52d1bd8b923a0f899141ae816fbd Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 30 Dec 2021 12:03:05 +0000
Subject: Include JVM startup args
---
.../me/lucko/spark/common/platform/PlatformStatisticsProvider.java | 6 +++++-
spark-common/src/main/proto/spark/spark.proto | 1 +
2 files changed, 6 insertions(+), 1 deletion(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index a9d83b2..a16c643 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -32,6 +32,7 @@ import me.lucko.spark.proto.SparkProtos.SystemStatistics;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
+import java.lang.management.RuntimeMXBean;
import java.util.Map;
public class PlatformStatisticsProvider {
@@ -42,6 +43,8 @@ public class PlatformStatisticsProvider {
}
public SystemStatistics getSystemStatistics() {
+ RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
+
SystemStatistics.Builder builder = SystemStatistics.newBuilder()
.setCpu(SystemStatistics.Cpu.newBuilder()
.setThreads(Runtime.getRuntime().availableProcessors())
@@ -85,10 +88,11 @@ public class PlatformStatisticsProvider {
.setVendor(System.getProperty("java.vendor", "unknown"))
.setVersion(System.getProperty("java.version", "unknown"))
.setVendorVersion(System.getProperty("java.vendor.version", "unknown"))
+ .setVmArgs(String.join(" ", runtimeBean.getInputArguments()))
.build()
);
- long uptime = ManagementFactory.getRuntimeMXBean().getUptime();
+ long uptime = runtimeBean.getUptime();
builder.setUptime(uptime);
Map gcStats = GarbageCollectorStatistics.pollStats();
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index b4f9efb..678df3a 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -74,6 +74,7 @@ message SystemStatistics {
string vendor = 1;
string version = 2;
string vendor_version = 3;
+ string vm_args = 4;
}
}
--
cgit
From 732110084a44e5d18dee5b6744cc44c9cdd41fb9 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 30 Dec 2021 12:54:30 +0000
Subject: Include the name of the CPU model in stats for Linux systems
---
.../me/lucko/spark/common/monitor/cpu/CpuInfo.java | 69 ++++++++++++++++++++++
.../platform/PlatformStatisticsProvider.java | 2 +
spark-common/src/main/proto/spark/spark.proto | 1 +
3 files changed, 72 insertions(+)
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
new file mode 100644
index 0000000..a179904
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -0,0 +1,69 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.cpu;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+
+/**
+ * Small utility to query the CPU model on Linux systems.
+ */
+public enum CpuInfo {
+ ;
+
+ private static final Pattern SPACE_COLON_SPACE_PATTERN = Pattern.compile("\\s+:\\s");
+
+ /**
+ * Queries the CPU model.
+ *
+ * @return the cpu model
+ */
+ public static String queryCpuModel() {
+ List cpuInfo = readFile("/proc/cpuinfo");
+ for (String line : cpuInfo) {
+ String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line);
+
+ if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) {
+ return splitLine[1];
+ }
+ }
+ return "";
+ }
+
+ private static List readFile(String file) {
+ Path path = Paths.get(file);
+ if (Files.isReadable(path)) {
+ try {
+ return Files.readAllLines(path, StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+ return new ArrayList<>();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index a16c643..fce45ec 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -21,6 +21,7 @@
package me.lucko.spark.common.platform;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuInfo;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.disk.DiskUsage;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
@@ -58,6 +59,7 @@ public class PlatformStatisticsProvider {
.setLast15M(CpuMonitor.systemLoad15MinAvg())
.build()
)
+ .setModelName(CpuInfo.queryCpuModel())
.build()
)
.setMemory(SystemStatistics.Memory.newBuilder()
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index 678df3a..520f19e 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -36,6 +36,7 @@ message SystemStatistics {
int32 threads = 1;
Usage process_usage = 2;
Usage system_usage = 3;
+ string modelName = 4; // optional
message Usage {
double last1m = 1;
--
cgit
From b41bc1f191b8b02cb9f048858a18b299474bb5b6 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 30 Dec 2021 13:03:51 +0000
Subject: Bump version
---
build.gradle | 2 +-
spark-common/src/main/proto/spark/spark.proto | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
(limited to 'spark-common/src')
diff --git a/build.gradle b/build.gradle
index 4acf08d..0133b4f 100644
--- a/build.gradle
+++ b/build.gradle
@@ -13,7 +13,7 @@ subprojects {
apply plugin: 'java-library'
ext {
- pluginVersion = '1.7.0'
+ pluginVersion = '1.7.1'
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index 520f19e..ae04cd7 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -36,7 +36,7 @@ message SystemStatistics {
int32 threads = 1;
Usage process_usage = 2;
Usage system_usage = 3;
- string modelName = 4; // optional
+ string model_name = 4; // optional
message Usage {
double last1m = 1;
--
cgit
From 03bfa12bffa4f3116bf8ddac8f6bd528210651c9 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 30 Dec 2021 21:34:18 +0000
Subject: Use wallclock sampling instead of itimer
---
build.gradle | 2 +-
.../spark/common/sampler/async/AsyncProfilerAccess.java | 16 ++++++++++++----
2 files changed, 13 insertions(+), 5 deletions(-)
(limited to 'spark-common/src')
diff --git a/build.gradle b/build.gradle
index 0133b4f..fd6813f 100644
--- a/build.gradle
+++ b/build.gradle
@@ -13,7 +13,7 @@ subprojects {
apply plugin: 'java-library'
ext {
- pluginVersion = '1.7.1'
+ pluginVersion = '1.7.2'
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index 3dfbbbf..d1c8393 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.util.TemporaryFiles;
import one.profiler.AsyncProfiler;
+import one.profiler.Events;
import java.io.InputStream;
import java.net.URL;
@@ -60,8 +61,8 @@ public enum AsyncProfilerAccess {
profiler = load();
if (isEventSupported(profiler, ProfilingEvent.CPU, false)) {
profilingEvent = ProfilingEvent.CPU;
- } else if (isEventSupported(profiler, ProfilingEvent.ITIMER, true)) {
- profilingEvent = ProfilingEvent.ITIMER;
+ } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) {
+ profilingEvent = ProfilingEvent.WALL;
}
} catch (Exception e) {
profiler = null;
@@ -161,11 +162,18 @@ public enum AsyncProfilerAccess {
}
enum ProfilingEvent {
- CPU, ITIMER;
+ CPU(Events.CPU),
+ WALL(Events.WALL);
+
+ private final String id;
+
+ ProfilingEvent(String id) {
+ this.id = id;
+ }
@Override
public String toString() {
- return name().toLowerCase(Locale.ROOT);
+ return this.id;
}
}
--
cgit
From 077590bf3af2c3e27d05f138b3023e86e56fe34a Mon Sep 17 00:00:00 2001
From: Luck
Date: Sun, 9 Jan 2022 17:11:22 +0000
Subject: Use thread safe collections in TickHook and TickReporter (#155)
---
.../src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java | 4 ++--
.../main/java/me/lucko/spark/common/tick/AbstractTickReporter.java | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
index a6e8745..2a31e0d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.tick;
-import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
public abstract class AbstractTickHook implements TickHook {
- private final Set tasks = new HashSet<>();
+ private final Set tasks = new CopyOnWriteArraySet<>();
private int tick = 0;
protected void onTick() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
index 74a814d..431a641 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
@@ -20,11 +20,11 @@
package me.lucko.spark.common.tick;
-import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
public abstract class AbstractTickReporter implements TickReporter {
- private final Set tasks = new HashSet<>();
+ private final Set tasks = new CopyOnWriteArraySet<>();
protected void onTick(double duration) {
for (Callback r : this.tasks) {
--
cgit
From d2716da1dc7f61aa45c0058e9a8fd65aa858f3c8 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 20 Jan 2022 20:22:02 +0000
Subject: Add ping statistics and command
---
.../spark/bukkit/BukkitPlayerPingProvider.java | 57 +++++
.../me/lucko/spark/bukkit/BukkitSparkPlugin.java | 10 +
.../placeholder/SparkPlaceholderProvider.java | 54 ++---
.../bungeecord/BungeeCordPlayerPingProvider.java | 47 ++++
.../spark/bungeecord/BungeeCordSparkPlugin.java | 6 +
.../java/me/lucko/spark/common/SparkPlatform.java | 17 ++
.../java/me/lucko/spark/common/SparkPlugin.java | 12 ++
.../spark/common/command/modules/HealthModule.java | 239 ++++++++-------------
.../spark/common/monitor/MonitoringExecutor.java | 16 ++
.../lucko/spark/common/monitor/cpu/CpuMonitor.java | 12 +-
.../spark/common/monitor/ping/PingStatistics.java | 148 +++++++++++++
.../spark/common/monitor/ping/PingSummary.java | 81 +++++++
.../common/monitor/ping/PlayerPingProvider.java | 40 ++++
.../platform/PlatformStatisticsProvider.java | 17 +-
.../me/lucko/spark/common/util/RollingAverage.java | 6 +
.../spark/common/util/StatisticFormatter.java | 187 ++++++++++++++++
spark-common/src/main/proto/spark/spark.proto | 25 ++-
.../spark/fabric/FabricPlayerPingProvider.java | 47 ++++
.../placeholder/SparkFabricPlaceholderApi.java | 36 ++--
.../fabric/plugin/FabricServerSparkPlugin.java | 7 +
.../lucko/spark/forge/ForgePlayerPingProvider.java | 47 ++++
.../spark/forge/plugin/ForgeServerSparkPlugin.java | 7 +
.../spark/nukkit/NukkitPlayerPingProvider.java | 47 ++++
.../me/lucko/spark/nukkit/NukkitSparkPlugin.java | 6 +
.../spark/sponge/Sponge7PlayerPingProvider.java | 47 ++++
.../me/lucko/spark/sponge/Sponge7SparkPlugin.java | 10 +
.../spark/sponge/Sponge8PlayerPingProvider.java | 47 ++++
.../me/lucko/spark/sponge/Sponge8SparkPlugin.java | 10 +
.../spark/velocity/VelocityPlayerPingProvider.java | 46 ++++
.../lucko/spark/velocity/VelocitySparkPlugin.java | 6 +
.../velocity/Velocity4PlayerPingProvider.java | 46 ++++
.../lucko/spark/velocity/Velocity4SparkPlugin.java | 6 +
.../spark/waterdog/WaterdogPlayerPingProvider.java | 47 ++++
.../lucko/spark/waterdog/WaterdogSparkPlugin.java | 6 +
34 files changed, 1220 insertions(+), 222 deletions(-)
create mode 100644 spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java
create mode 100644 spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java
create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java
create mode 100644 spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java
create mode 100644 spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java
create mode 100644 spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java
create mode 100644 spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java
create mode 100644 spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java
create mode 100644 spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java
(limited to 'spark-common/src')
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java
new file mode 100644
index 0000000..2cf58cf
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.bukkit;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import org.bukkit.Server;
+import org.bukkit.entity.Player;
+
+import java.util.Map;
+
+public class BukkitPlayerPingProvider implements PlayerPingProvider {
+
+ public static boolean isSupported() {
+ try {
+ Player.Spigot.class.getMethod("getPing");
+ return true;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+
+ private final Server server;
+
+ public BukkitPlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers()) {
+ builder.put(player.getName(), player.spigot().getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index f81a176..6929a4d 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -25,6 +25,7 @@ import me.lucko.spark.bukkit.placeholder.SparkMVdWPlaceholders;
import me.lucko.spark.bukkit.placeholder.SparkPlaceholderApi;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
@@ -171,6 +172,15 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
return new BukkitClassSourceLookup();
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ if (BukkitPlayerPingProvider.isSupported()) {
+ return new BukkitPlayerPingProvider(getServer());
+ } else {
+ return null;
+ }
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
index 6e14bdb..96c9e93 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
@@ -21,9 +21,9 @@
package me.lucko.spark.bukkit.placeholder;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.command.modules.HealthModule;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.util.StatisticFormatter;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.TextComponent;
@@ -42,22 +42,22 @@ enum SparkPlaceholderProvider {
switch (placeholder) {
case "tps":
return Component.text()
- .append(HealthModule.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build();
case "tps_5s":
- return HealthModule.formatTps(tickStatistics.tps5Sec());
+ return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
case "tps_10s":
- return HealthModule.formatTps(tickStatistics.tps10Sec());
+ return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
case "tps_1m":
- return HealthModule.formatTps(tickStatistics.tps1Min());
+ return StatisticFormatter.formatTps(tickStatistics.tps1Min());
case "tps_5m":
- return HealthModule.formatTps(tickStatistics.tps5Min());
+ return StatisticFormatter.formatTps(tickStatistics.tps5Min());
case "tps_15m":
- return HealthModule.formatTps(tickStatistics.tps15Min());
+ return StatisticFormatter.formatTps(tickStatistics.tps15Min());
}
}
@@ -70,13 +70,13 @@ enum SparkPlaceholderProvider {
switch (placeholder) {
case "tickduration":
return Component.text()
- .append(HealthModule.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(HealthModule.formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build();
case "tickduration_10s":
- return HealthModule.formatTickDurations(tickStatistics.duration10Sec());
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
case "tickduration_1m":
- return HealthModule.formatTickDurations(tickStatistics.duration1Min());
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
}
}
@@ -84,28 +84,28 @@ enum SparkPlaceholderProvider {
switch (placeholder) {
case "cpu_system":
return Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.build();
case "cpu_system_10s":
- return HealthModule.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
case "cpu_system_1m":
- return HealthModule.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
case "cpu_system_15m":
- return HealthModule.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
case "cpu_process":
return Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.build();
case "cpu_process_10s":
- return HealthModule.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
case "cpu_process_1m":
- return HealthModule.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
case "cpu_process_15m":
- return HealthModule.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
}
}
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java
new file mode 100644
index 0000000..37955a3
--- /dev/null
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.bungeecord;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.md_5.bungee.api.ProxyServer;
+import net.md_5.bungee.api.connection.ProxiedPlayer;
+
+import java.util.Map;
+
+public class BungeeCordPlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public BungeeCordPlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (ProxiedPlayer player : this.proxy.getPlayers()) {
+ builder.put(player.getName(), player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
index ebdfe18..e259adc 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
@@ -22,6 +22,7 @@ package me.lucko.spark.bungeecord;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -90,6 +91,11 @@ public class BungeeCordSparkPlugin extends Plugin implements SparkPlugin {
return new BungeeCordClassSourceLookup();
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new BungeeCordPlayerPingProvider(getProxy());
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new BungeeCordPlatformInfo(getProxy());
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 53454aa..c93b876 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -40,6 +40,8 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.platform.PlatformStatisticsProvider;
import me.lucko.spark.common.tick.TickHook;
@@ -99,6 +101,7 @@ public class SparkPlatform {
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
+ private final PingStatistics pingStatistics;
private final PlatformStatisticsProvider statisticsProvider;
private Map startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
@@ -136,6 +139,10 @@ public class SparkPlatform {
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
this.tickStatistics = this.tickHook != null ? new TickStatistics() : null;
+
+ PlayerPingProvider pingProvider = plugin.createPlayerPingProvider();
+ this.pingStatistics = pingProvider != null ? new PingStatistics(pingProvider) : null;
+
this.statisticsProvider = new PlatformStatisticsProvider(this);
}
@@ -152,6 +159,9 @@ public class SparkPlatform {
this.tickReporter.addCallback(this.tickStatistics);
this.tickReporter.start();
}
+ if (this.pingStatistics != null) {
+ this.pingStatistics.start();
+ }
CpuMonitor.ensureMonitoring();
// poll startup GC statistics after plugins & the world have loaded
@@ -172,6 +182,9 @@ public class SparkPlatform {
if (this.tickReporter != null) {
this.tickReporter.close();
}
+ if (this.pingStatistics != null) {
+ this.pingStatistics.close();
+ }
for (CommandModule module : this.commandModules) {
module.close();
@@ -231,6 +244,10 @@ public class SparkPlatform {
return this.tickStatistics;
}
+ public PingStatistics getPingStatistics() {
+ return this.pingStatistics;
+ }
+
public Map getStartupGcStatistics() {
return this.startupGcStatistics;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index f312916..5feb172 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -22,6 +22,7 @@ package me.lucko.spark.common;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
@@ -120,6 +121,17 @@ public interface SparkPlugin {
return ClassSourceLookup.NO_OP;
}
+ /**
+ * Creates a player ping provider function.
+ *
+ * Returns {@code null} if the platform does not support querying player pings
+ *
+ * @return the player ping provider function
+ */
+ default PlayerPingProvider createPlayerPingProvider() {
+ return null;
+ }
+
/**
* Gets information for the platform.
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index b036d21..ea4f140 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -20,8 +20,6 @@
package me.lucko.spark.common.command.modules;
-import com.google.common.base.Strings;
-
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
@@ -31,13 +29,14 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.ping.PingSummary;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.common.util.StatisticFormatter;
import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.TextComponent;
-import net.kyori.adventure.text.format.TextColor;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
@@ -46,6 +45,7 @@ import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
import java.util.LinkedList;
import java.util.List;
+import java.util.Set;
import java.util.function.Consumer;
import static net.kyori.adventure.text.Component.empty;
@@ -57,7 +57,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
-import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class HealthModule implements CommandModule {
@@ -71,6 +70,14 @@ public class HealthModule implements CommandModule {
.build()
);
+ consumer.accept(Command.builder()
+ .aliases("ping")
+ .argumentUsage("player", "username")
+ .executor(HealthModule::ping)
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--player"))
+ .build()
+ );
+
consumer.accept(Command.builder()
.aliases("healthreport", "health", "ht")
.argumentUsage("memory", null)
@@ -86,11 +93,11 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("TPS from last 5s, 10s, 1m, 5m, 15m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatTps(tickStatistics.tps5Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps10Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps1Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps5Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
);
resp.replyPrefixed(empty());
@@ -99,8 +106,8 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("Tick durations (min/med/95%ile/max ms) from last 10s, 1m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
- .append(formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
);
resp.replyPrefixed(empty());
@@ -110,22 +117,67 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("CPU usage from last 10s, 1m, 15m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.append(text(" (system)", DARK_GRAY))
.build()
);
resp.replyPrefixed(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.append(text(" (process)", DARK_GRAY))
.build()
);
}
+ private static void ping(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
+ PingStatistics pingStatistics = platform.getPingStatistics();
+ if (pingStatistics == null) {
+ resp.replyPrefixed(text("Ping data is not available on this platform."));
+ return;
+ }
+
+ // lookup for specific player
+ Set players = arguments.stringFlag("player");
+ if (!players.isEmpty()) {
+ for (String player : players) {
+ PingStatistics.PlayerPing playerPing = pingStatistics.query(player);
+ if (playerPing == null) {
+ resp.replyPrefixed(text("Ping data is not available for '" + player + "'."));
+ } else {
+ resp.replyPrefixed(text()
+ .content("Player ")
+ .append(text(playerPing.name(), WHITE))
+ .append(text(" has "))
+ .append(StatisticFormatter.formatPingRtt(playerPing.ping()))
+ .append(text(" ms ping."))
+ .build()
+ );
+ }
+ }
+ return;
+ }
+
+ PingSummary summary = pingStatistics.currentSummary();
+ RollingAverage average = pingStatistics.getPingAverage();
+
+ if (summary.total() == 0 && average.getSamples() == 0) {
+ resp.replyPrefixed(text("There is not enough data to show ping averages yet. Please try again later."));
+ return;
+ }
+
+ resp.replyPrefixed(text("Average Pings (min/med/95%ile/max ms) from now, last 15m:"));
+ resp.replyPrefixed(text()
+ .content(" ")
+ .append(StatisticFormatter.formatPingRtts(summary.min(), summary.median(), summary.percentile95th(), summary.max())).append(text("; "))
+ .append(StatisticFormatter.formatPingRtts(average.min(), average.median(), average.percentile95th(), average.max()))
+ .build()
+ );
+ }
+
private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
resp.replyPrefixed(text("Generating server health report..."));
List report = new LinkedList<>();
@@ -159,11 +211,11 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatTps(tickStatistics.tps5Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps10Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps1Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps5Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
);
report.add(empty());
@@ -177,8 +229,8 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
- .append(formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
);
report.add(empty());
@@ -194,17 +246,17 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.append(text(" (system)", DARK_GRAY))
.build()
);
report.add(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.append(text(" (process)", DARK_GRAY))
.build()
);
@@ -232,7 +284,7 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateMemoryUsageDiagram(heapUsage, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateMemoryUsageDiagram(heapUsage, 60)).build());
report.add(empty());
}
@@ -283,7 +335,7 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateMemoryPoolDiagram(usage, collectionUsage, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateMemoryPoolDiagram(usage, collectionUsage, 60)).build());
if (collectionUsage != null) {
report.add(text()
@@ -327,127 +379,8 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateDiskUsageDiagram(used, total, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateDiskUsageDiagram(used, total, 60)).build());
report.add(empty());
}
- public static TextComponent formatTps(double tps) {
- TextColor color;
- if (tps > 18.0) {
- color = GREEN;
- } else if (tps > 16.0) {
- color = YELLOW;
- } else {
- color = RED;
- }
-
- return text((tps > 20.0 ? "*" : "") + Math.min(Math.round(tps * 100.0) / 100.0, 20.0), color);
- }
-
- public static TextComponent formatTickDurations(RollingAverage average) {
- return text()
- .append(formatTickDuration(average.min()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.median()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.percentile95th()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.max()))
- .build();
- }
-
- public static TextComponent formatTickDuration(double duration) {
- TextColor color;
- if (duration >= 50d) {
- color = RED;
- } else if (duration >= 40d) {
- color = YELLOW;
- } else {
- color = GREEN;
- }
-
- return text(String.format("%.1f", duration), color);
- }
-
- public static TextComponent formatCpuUsage(double usage) {
- TextColor color;
- if (usage > 0.9) {
- color = RED;
- } else if (usage > 0.65) {
- color = YELLOW;
- } else {
- color = GREEN;
- }
-
- return text(FormatUtil.percent(usage, 1d), color);
- }
-
- private static TextComponent generateMemoryUsageDiagram(MemoryUsage usage, int length) {
- double used = usage.getUsed();
- double committed = usage.getCommitted();
- double max = usage.getMax();
-
- int usedChars = (int) ((used * length) / max);
- int committedChars = (int) ((committed * length) / max);
-
- TextComponent.Builder line = text().content(Strings.repeat("/", usedChars)).color(GRAY);
- if (committedChars > usedChars) {
- line.append(text(Strings.repeat(" ", (committedChars - usedChars) - 1)));
- line.append(text("|", YELLOW));
- }
- if (length > committedChars) {
- line.append(text(Strings.repeat(" ", (length - committedChars))));
- }
-
- return text()
- .append(text("[", DARK_GRAY))
- .append(line.build())
- .append(text("]", DARK_GRAY))
- .build();
- }
-
- private static TextComponent generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
- double used = usage.getUsed();
- double collectionUsed = used;
- if (collectionUsage != null) {
- collectionUsed = collectionUsage.getUsed();
- }
- double committed = usage.getCommitted();
- double max = usage.getMax();
-
- int usedChars = (int) ((used * length) / max);
- int collectionUsedChars = (int) ((collectionUsed * length) / max);
- int committedChars = (int) ((committed * length) / max);
-
- TextComponent.Builder line = text().content(Strings.repeat("/", collectionUsedChars)).color(GRAY);
-
- if (usedChars > collectionUsedChars) {
- line.append(text("|", RED));
- line.append(text(Strings.repeat("/", (usedChars - collectionUsedChars) - 1), GRAY));
- }
- if (committedChars > usedChars) {
- line.append(text(Strings.repeat(" ", (committedChars - usedChars) - 1)));
- line.append(text("|", YELLOW));
- }
- if (length > committedChars) {
- line.append(text(Strings.repeat(" ", (length - committedChars))));
- }
-
- return text()
- .append(text("[", DARK_GRAY))
- .append(line.build())
- .append(text("]", DARK_GRAY))
- .build();
- }
-
- private static TextComponent generateDiskUsageDiagram(double used, double max, int length) {
- int usedChars = (int) ((used * length) / max);
- String line = Strings.repeat("/", usedChars) + Strings.repeat(" ", length - usedChars);
- return text()
- .append(text("[", DARK_GRAY))
- .append(text(line, GRAY))
- .append(text("]", DARK_GRAY))
- .build();
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
new file mode 100644
index 0000000..779dbbf
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
@@ -0,0 +1,16 @@
+package me.lucko.spark.common.monitor;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+
+public enum MonitoringExecutor {
+ ;
+
+ /** The executor used to monitor & calculate rolling averages. */
+ public static final ScheduledExecutorService INSTANCE = Executors.newSingleThreadScheduledExecutor(r -> {
+ Thread thread = Executors.defaultThreadFactory().newThread(r);
+ thread.setName("spark-monitor");
+ thread.setDaemon(true);
+ return thread;
+ });
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
index 43e1f90..b4ab831 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
@@ -20,12 +20,11 @@
package me.lucko.spark.common.monitor.cpu;
+import me.lucko.spark.common.monitor.MonitoringExecutor;
import me.lucko.spark.common.util.RollingAverage;
import java.lang.management.ManagementFactory;
import java.math.BigDecimal;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.management.JMX;
@@ -42,13 +41,6 @@ public enum CpuMonitor {
private static final String OPERATING_SYSTEM_BEAN = "java.lang:type=OperatingSystem";
/** The OperatingSystemMXBean instance */
private static final OperatingSystemMXBean BEAN;
- /** The executor used to monitor & calculate rolling averages. */
- private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-cpu-monitor");
- thread.setDaemon(true);
- return thread;
- });
// Rolling averages for system/process data
private static final RollingAverage SYSTEM_AVERAGE_10_SEC = new RollingAverage(10);
@@ -68,7 +60,7 @@ public enum CpuMonitor {
}
// schedule rolling average calculations.
- EXECUTOR.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, 1, TimeUnit.SECONDS);
+ MonitoringExecutor.INSTANCE.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, 1, TimeUnit.SECONDS);
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
new file mode 100644
index 0000000..8b5b5b3
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
@@ -0,0 +1,148 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import me.lucko.spark.common.monitor.MonitoringExecutor;
+import me.lucko.spark.common.util.RollingAverage;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.math.BigDecimal;
+import java.util.Map;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Provides statistics for player ping RTT to the server.
+ */
+public final class PingStatistics implements Runnable, AutoCloseable {
+ private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15);
+ private static final int QUERY_RATE_SECONDS = 10;
+
+ /** The platform function that provides player ping times */
+ private final PlayerPingProvider provider;
+ /** Rolling average of the median ping across all players */
+ private final RollingAverage rollingAverage = new RollingAverage(WINDOW_SIZE_SECONDS / QUERY_RATE_SECONDS);
+
+ /** The scheduler task that polls pings and calculates the rolling average */
+ private ScheduledFuture> future;
+
+ public PingStatistics(PlayerPingProvider provider) {
+ this.provider = provider;
+ }
+
+ /**
+ * Starts the statistics monitor
+ */
+ public void start() {
+ if (this.future != null) {
+ throw new IllegalStateException();
+ }
+ this.future = MonitoringExecutor.INSTANCE.scheduleAtFixedRate(this, QUERY_RATE_SECONDS, QUERY_RATE_SECONDS, TimeUnit.SECONDS);
+ }
+
+ @Override
+ public void close() {
+ if (this.future != null) {
+ this.future.cancel(false);
+ this.future = null;
+ }
+ }
+
+ @Override
+ public void run() {
+ PingSummary summary = currentSummary();
+ if (summary.total() == 0) {
+ return;
+ }
+
+ this.rollingAverage.add(BigDecimal.valueOf(summary.median()));
+ }
+
+ /**
+ * Gets the ping rolling average.
+ *
+ * @return the rolling average
+ */
+ public RollingAverage getPingAverage() {
+ return this.rollingAverage;
+ }
+
+ /**
+ * Queries a summary of current player pings.
+ *
+ * @return a summary of current pings
+ */
+ public PingSummary currentSummary() {
+ Map results = this.provider.poll();
+ int[] values = results.values().stream().filter(ping -> ping > 0).mapToInt(i -> i).toArray();
+ return values.length == 0
+ ? new PingSummary(new int[]{0})
+ : new PingSummary(values);
+ }
+
+ /**
+ * Queries the ping of a given player.
+ *
+ * @param playerName the name of the player
+ * @return the ping, if available
+ */
+ public @Nullable PlayerPing query(String playerName) {
+ Map results = this.provider.poll();
+
+ // try exact match
+ Integer result = results.get(playerName);
+ if (result != null) {
+ return new PlayerPing(playerName, result);
+ }
+
+ // try case-insensitive match
+ for (Map.Entry entry : results.entrySet()) {
+ if (entry.getKey().equalsIgnoreCase(playerName)) {
+ return new PlayerPing(
+ entry.getKey(),
+ entry.getValue()
+ );
+ }
+ }
+
+ return null;
+ }
+
+ public static final class PlayerPing {
+ private final String name;
+ private final int ping;
+
+ PlayerPing(String name, int ping) {
+ this.name = name;
+ this.ping = ping;
+ }
+
+ public String name() {
+ return this.name;
+ }
+
+ public int ping() {
+ return this.ping;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
new file mode 100644
index 0000000..024d27d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
@@ -0,0 +1,81 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import java.util.Arrays;
+
+public final class PingSummary {
+
+ private final int[] values;
+ private final int total;
+ private final int max;
+ private final int min;
+ private final double mean;
+
+ public PingSummary(int[] values) {
+ Arrays.sort(values);
+ this.values = values;
+
+ int total = 0;
+ for (int value : values) {
+ total += value;
+ }
+ this.total = total;
+
+ this.mean = (double) total / values.length;
+ this.max = values[values.length - 1];
+ this.min = values[0];
+ }
+
+ public int total() {
+ return this.total;
+ }
+
+ public double mean() {
+ return this.mean;
+ }
+
+ public int max() {
+ return this.max;
+ }
+
+ public int min() {
+ return this.min;
+ }
+
+ public int percentile(double percentile) {
+ if (percentile < 0 || percentile > 1) {
+ throw new IllegalArgumentException("Invalid percentile " + percentile);
+ }
+
+ int rank = (int) Math.ceil(percentile * (this.values.length - 1));
+ return this.values[rank];
+ }
+
+ public double median() {
+ return percentile(0.50d);
+ }
+
+ public double percentile95th() {
+ return percentile(0.95d);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
new file mode 100644
index 0000000..7576573
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
@@ -0,0 +1,40 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import java.util.Map;
+
+/**
+ * Provides information about player ping RTT.
+ */
+@FunctionalInterface
+public interface PlayerPingProvider {
+
+ /**
+ * Poll current player pings in milliseconds.
+ *
+ * The map keys are player names and the values are the ping values.
+ *
+ * @return a map of player pings
+ */
+ Map poll();
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index fce45ec..5b8d1d4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -26,6 +26,7 @@ import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.disk.DiskUsage;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
import me.lucko.spark.common.monitor.memory.MemoryInfo;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.RollingAverage;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
@@ -148,18 +149,26 @@ public class PlatformStatisticsProvider {
);
if (tickStatistics.isDurationSupported()) {
builder.setMspt(PlatformStatistics.Mspt.newBuilder()
- .setLast1M(msptValues(tickStatistics.duration1Min()))
- .setLast5M(msptValues(tickStatistics.duration5Min()))
+ .setLast1M(rollingAverageValues(tickStatistics.duration1Min()))
+ .setLast5M(rollingAverageValues(tickStatistics.duration5Min()))
.build()
);
}
}
+ PingStatistics pingStatistics = this.platform.getPingStatistics();
+ if (pingStatistics != null && pingStatistics.getPingAverage().getSamples() != 0) {
+ builder.setPing(PlatformStatistics.Ping.newBuilder()
+ .setLast15M(rollingAverageValues(pingStatistics.getPingAverage()))
+ .build()
+ );
+ }
+
return builder.build();
}
- private static PlatformStatistics.Mspt.Values msptValues(RollingAverage rollingAverage) {
- return PlatformStatistics.Mspt.Values.newBuilder()
+ private static PlatformStatistics.RollingAverageValues rollingAverageValues(RollingAverage rollingAverage) {
+ return PlatformStatistics.RollingAverageValues.newBuilder()
.setMean(rollingAverage.mean())
.setMax(rollingAverage.max())
.setMin(rollingAverage.min())
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
index 87c41a4..57dfdff 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
@@ -39,6 +39,12 @@ public class RollingAverage implements DoubleAverageInfo {
this.samples = new ArrayDeque<>(this.windowSize + 1);
}
+ public int getSamples() {
+ synchronized (this) {
+ return this.samples.size();
+ }
+ }
+
public void add(BigDecimal num) {
synchronized (this) {
this.total = this.total.add(num);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
new file mode 100644
index 0000000..9a2850e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
@@ -0,0 +1,187 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.util;
+
+import com.google.common.base.Strings;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.format.TextColor;
+
+import java.lang.management.MemoryUsage;
+
+import static net.kyori.adventure.text.Component.text;
+import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
+import static net.kyori.adventure.text.format.NamedTextColor.RED;
+import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
+
+public enum StatisticFormatter {
+ ;
+
+ private static final String BAR_CHARACTER = "|";
+
+ public static TextComponent formatTps(double tps) {
+ TextColor color;
+ if (tps > 18.0) {
+ color = GREEN;
+ } else if (tps > 16.0) {
+ color = YELLOW;
+ } else {
+ color = RED;
+ }
+
+ return text((tps > 20.0 ? "*" : "") + Math.min(Math.round(tps * 100.0) / 100.0, 20.0), color);
+ }
+
+ public static TextComponent formatTickDurations(RollingAverage average) {
+ return text()
+ .append(formatTickDuration(average.min()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.median()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.percentile95th()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.max()))
+ .build();
+ }
+
+ public static TextComponent formatTickDuration(double duration) {
+ TextColor color;
+ if (duration >= 50d) {
+ color = RED;
+ } else if (duration >= 40d) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text(String.format("%.1f", duration), color);
+ }
+
+ public static TextComponent formatCpuUsage(double usage) {
+ TextColor color;
+ if (usage > 0.9) {
+ color = RED;
+ } else if (usage > 0.65) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text(FormatUtil.percent(usage, 1d), color);
+ }
+
+ public static TextComponent formatPingRtts(double min, double median, double percentile95th, double max) {
+ return text()
+ .append(formatPingRtt(min))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(median))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(percentile95th))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(max))
+ .build();
+ }
+
+ public static TextComponent formatPingRtt(double ping) {
+ TextColor color;
+ if (ping >= 200) {
+ color = RED;
+ } else if (ping >= 100) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text((int) Math.ceil(ping), color);
+ }
+
+ public static TextComponent generateMemoryUsageDiagram(MemoryUsage usage, int length) {
+ double used = usage.getUsed();
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_CHARACTER, usedChars)).color(YELLOW);
+ if (committedChars > usedChars) {
+ line.append(text(Strings.repeat(BAR_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_CHARACTER, RED));
+ }
+ if (length > committedChars) {
+ line.append(text(Strings.repeat(BAR_CHARACTER, (length - committedChars)), GRAY));
+ }
+
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(line.build())
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+
+ public static TextComponent generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
+ double used = usage.getUsed();
+ double collectionUsed = used;
+ if (collectionUsage != null) {
+ collectionUsed = collectionUsage.getUsed();
+ }
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int collectionUsedChars = (int) ((collectionUsed * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_CHARACTER, collectionUsedChars)).color(YELLOW);
+
+ if (usedChars > collectionUsedChars) {
+ line.append(Component.text(BAR_CHARACTER, RED));
+ line.append(text(Strings.repeat(BAR_CHARACTER, (usedChars - collectionUsedChars) - 1), YELLOW));
+ }
+ if (committedChars > usedChars) {
+ line.append(text(Strings.repeat(BAR_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_CHARACTER, YELLOW));
+ }
+ if (length > committedChars) {
+ line.append(text(Strings.repeat(BAR_CHARACTER, (length - committedChars)), GRAY));
+ }
+
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(line.build())
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+
+ public static TextComponent generateDiskUsageDiagram(double used, double max, int length) {
+ int usedChars = (int) ((used * length) / max);
+ int freeChars = length - usedChars;
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(text(Strings.repeat(BAR_CHARACTER, usedChars), YELLOW))
+ .append(text(Strings.repeat(BAR_CHARACTER, freeChars), GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index ae04cd7..d800762 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -85,6 +85,7 @@ message PlatformStatistics {
int64 uptime = 3;
Tps tps = 4; // optional
Mspt mspt = 5; // optional
+ Ping ping = 6; // optional
message Memory {
MemoryPool heap = 1;
@@ -108,16 +109,20 @@ message PlatformStatistics {
}
message Mspt {
- Values last1m = 1;
- Values last5m = 2;
-
- message Values {
- double mean = 1;
- double max = 2;
- double min = 3;
- double median = 4;
- double percentile95 = 5;
- }
+ RollingAverageValues last1m = 1;
+ RollingAverageValues last5m = 2;
+ }
+
+ message Ping {
+ RollingAverageValues last15m = 1;
+ }
+
+ message RollingAverageValues {
+ double mean = 1;
+ double max = 2;
+ double min = 3;
+ double median = 4;
+ double percentile95 = 5;
}
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java
new file mode 100644
index 0000000..bae6c41
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.fabric;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.network.ServerPlayerEntity;
+
+import java.util.Map;
+
+public class FabricPlayerPingProvider implements PlayerPingProvider {
+ private final MinecraftServer server;
+
+ public FabricPlayerPingProvider(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (ServerPlayerEntity player : this.server.getPlayerManager().getPlayerList()) {
+ builder.put(player.getGameProfile().getName(), player.pingMilliseconds);
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
index 9171cbb..b9cff691 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
@@ -4,10 +4,10 @@ import eu.pb4.placeholders.PlaceholderAPI;
import eu.pb4.placeholders.PlaceholderResult;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.command.modules.HealthModule;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.common.util.StatisticFormatter;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
@@ -47,16 +47,16 @@ public class SparkFabricPlaceholderApi {
if (tps == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatTps(tps)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatTps(tps)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
));
}
@@ -84,13 +84,13 @@ public class SparkFabricPlaceholderApi {
if (duration == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatTickDurations(duration)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatTickDurations(duration)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(HealthModule.formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
));
}
@@ -115,14 +115,14 @@ public class SparkFabricPlaceholderApi {
if (usage == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatCpuUsage(usage)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.build()
));
}
@@ -147,14 +147,14 @@ public class SparkFabricPlaceholderApi {
if (usage == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatCpuUsage(usage)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.build()
));
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
index 617564a..6dc5483 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
@@ -29,11 +29,13 @@ import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.fabric.api.permissions.v0.Permissions;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
import me.lucko.spark.fabric.FabricPlatformInfo;
+import me.lucko.spark.fabric.FabricPlayerPingProvider;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
@@ -140,6 +142,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
return new FabricTickReporter.Server();
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new FabricPlayerPingProvider(this.server);
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new FabricPlatformInfo(PlatformInfo.Type.SERVER);
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java
new file mode 100644
index 0000000..f31cc5b
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.forge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.level.ServerPlayer;
+
+import java.util.Map;
+
+public class ForgePlayerPingProvider implements PlayerPingProvider {
+ private final MinecraftServer server;
+
+ public ForgePlayerPingProvider(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (ServerPlayer player : this.server.getPlayerList().getPlayers()) {
+ builder.put(player.getGameProfile().getName(), player.latency);
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
index c03bb05..bbfb8a5 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
@@ -29,11 +29,13 @@ import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.ForgeCommandSender;
import me.lucko.spark.forge.ForgePlatformInfo;
+import me.lucko.spark.forge.ForgePlayerPingProvider;
import me.lucko.spark.forge.ForgeSparkMod;
import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
@@ -200,6 +202,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
return new ForgeTickReporter(TickEvent.Type.SERVER);
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new ForgePlayerPingProvider(this.server);
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new ForgePlatformInfo(PlatformInfo.Type.SERVER);
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java
new file mode 100644
index 0000000..fc25d7c
--- /dev/null
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.nukkit;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import cn.nukkit.Player;
+import cn.nukkit.Server;
+
+import java.util.Map;
+
+public class NukkitPlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public NukkitPlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers().values()) {
+ builder.put(player.getName(), player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
index c9e099d..18132c3 100644
--- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
@@ -23,6 +23,7 @@ package me.lucko.spark.nukkit;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -107,6 +108,11 @@ public class NukkitSparkPlugin extends PluginBase implements SparkPlugin {
return new NukkitClassSourceLookup();
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new NukkitPlayerPingProvider(getServer());
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new NukkitPlatformInfo(getServer());
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java
new file mode 100644
index 0000000..8f4c15f
--- /dev/null
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.living.player.Player;
+
+import java.util.Map;
+
+public class Sponge7PlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public Sponge7PlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers()) {
+ builder.put(player.getName(), player.getConnection().getLatency());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
index 7ac317b..1aed04a 100644
--- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
@@ -25,6 +25,7 @@ import com.google.inject.Inject;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
@@ -144,6 +145,15 @@ public class Sponge7SparkPlugin implements SparkPlugin {
return new Sponge7TickHook(this);
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge7PlayerPingProvider(this.game.getServer());
+ } else {
+ return null;
+ }
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new Sponge7PlatformInfo(this.game);
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java
new file mode 100644
index 0000000..2bcaf6a
--- /dev/null
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.living.player.server.ServerPlayer;
+
+import java.util.Map;
+
+public class Sponge8PlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public Sponge8PlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (ServerPlayer player : this.server.onlinePlayers()) {
+ builder.put(player.name(), player.connection().latency());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
index 0195b24..6eb2674 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
@@ -25,6 +25,7 @@ import com.google.inject.Inject;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
@@ -146,6 +147,15 @@ public class Sponge8SparkPlugin implements SparkPlugin {
return new Sponge8TickHook(this.pluginContainer, this.game);
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge8PlayerPingProvider(this.game.server());
+ } else {
+ return null;
+ }
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new Sponge8PlatformInfo(this.game);
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java
new file mode 100644
index 0000000..382ea22
--- /dev/null
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java
@@ -0,0 +1,46 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.velocity;
+
+import com.google.common.collect.ImmutableMap;
+import com.velocitypowered.api.proxy.Player;
+import com.velocitypowered.api.proxy.ProxyServer;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import java.util.Map;
+
+public class VelocityPlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public VelocityPlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (Player player : this.proxy.getAllPlayers()) {
+ builder.put(player.getUsername(), (int) player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
index 698aab0..7d9ced8 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
@@ -32,6 +32,7 @@ import com.velocitypowered.api.proxy.ProxyServer;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -132,6 +133,11 @@ public class VelocitySparkPlugin implements SparkPlugin, SimpleCommand {
return new VelocityClassSourceLookup(this.proxy.getPluginManager());
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new VelocityPlayerPingProvider(this.proxy);
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new VelocityPlatformInfo(this.proxy);
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java
new file mode 100644
index 0000000..18f36a7
--- /dev/null
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java
@@ -0,0 +1,46 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.velocity;
+
+import com.google.common.collect.ImmutableMap;
+import com.velocitypowered.api.proxy.ProxyServer;
+import com.velocitypowered.api.proxy.connection.Player;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import java.util.Map;
+
+public class Velocity4PlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public Velocity4PlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (Player player : this.proxy.connectedPlayers()) {
+ builder.put(player.username(), (int) player.ping());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
index e1acd57..0c57689 100644
--- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
@@ -32,6 +32,7 @@ import com.velocitypowered.api.proxy.ProxyServer;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -132,6 +133,11 @@ public class Velocity4SparkPlugin implements SparkPlugin, SimpleCommand {
return new Velocity4ClassSourceLookup(this.proxy.pluginManager());
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new Velocity4PlayerPingProvider(this.proxy);
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new Velocity4PlatformInfo(this.proxy);
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java
new file mode 100644
index 0000000..b22325c
--- /dev/null
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.waterdog;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import dev.waterdog.waterdogpe.ProxyServer;
+import dev.waterdog.waterdogpe.player.ProxiedPlayer;
+
+import java.util.Map;
+
+public class WaterdogPlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public WaterdogPlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map poll() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (ProxiedPlayer player : this.proxy.getPlayers().values()) {
+ builder.put(player.getName(), (int) player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
index fd2f031..07b153a 100644
--- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
@@ -22,6 +22,7 @@ package me.lucko.spark.waterdog;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -99,6 +100,11 @@ public class WaterdogSparkPlugin extends Plugin implements SparkPlugin {
return new WaterdogClassSourceLookup(getProxy());
}
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new WaterdogPlayerPingProvider(getProxy());
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new WaterdogPlatformInfo();
--
cgit
From bd0827f199e12a6b31e92e12fb33c549e2788ef8 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 20 Jan 2022 22:49:34 +0000
Subject: Expose some server configuration values in the viewer
---
.../spark/bukkit/BukkitServerConfigProvider.java | 78 ++++++++++++++
.../me/lucko/spark/bukkit/BukkitSparkPlugin.java | 6 ++
.../java/me/lucko/spark/common/SparkPlugin.java | 10 ++
.../serverconfig/AbstractServerConfigProvider.java | 113 +++++++++++++++++++++
.../serverconfig/ServerConfigProvider.java | 59 +++++++++++
.../spark/common/sampler/AbstractSampler.java | 8 ++
.../src/main/proto/spark/spark_sampler.proto | 1 +
7 files changed, 275 insertions(+)
create mode 100644 spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
(limited to 'spark-common/src')
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
new file mode 100644
index 0000000..b1d2a60
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -0,0 +1,78 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.bukkit;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonSerializer;
+
+import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
+
+import org.bukkit.configuration.MemorySection;
+import org.bukkit.configuration.file.YamlConfiguration;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Map;
+
+public class BukkitServerConfigProvider extends AbstractServerConfigProvider {
+ private static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ private static final Map FILES = ImmutableMap.of(
+ "bukkit.yml", FileType.YAML,
+ "spigot.yml", FileType.YAML,
+ "paper.yml", FileType.YAML
+ );
+
+ // todo: make configurable?
+ private static final List HIDDEN_PATHS = ImmutableList.of(
+ "database",
+ "settings.bungeecord-addresses",
+ "settings.velocity-support.secret"
+ );
+
+ public BukkitServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ @Override
+ protected JsonElement load(String path, FileType type) throws IOException {
+ try (BufferedReader reader = Files.newBufferedReader(Paths.get(path), StandardCharsets.UTF_8)) {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ Map values = config.getValues(false);
+ return GSON.toJsonTree(values);
+ }
+ }
+
+ enum FileType {
+ YAML
+ }
+
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index 6929a4d..9727277 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -181,6 +182,11 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
}
+ @Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new BukkitServerConfigProvider();
+ }
+
@Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index 5feb172..b817df1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -24,6 +24,7 @@ import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -132,6 +133,15 @@ public interface SparkPlugin {
return null;
}
+ /**
+ * Creates a server config provider.
+ *
+ * @return the server config provider function
+ */
+ default ServerConfigProvider createServerConfigProvider() {
+ return ServerConfigProvider.NO_OP;
+ }
+
/**
* Gets information for the platform.
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
new file mode 100644
index 0000000..98db960
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -0,0 +1,113 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Abstract implementation of {@link ServerConfigProvider}.
+ *
+ * THis implementation is able to delete hidden paths from
+ * the configurations before they are sent to the viewer.
+ *
+ * @param the file type
+ */
+public abstract class AbstractServerConfigProvider> implements ServerConfigProvider {
+ private final Map files;
+ private final List hiddenPaths;
+
+ protected AbstractServerConfigProvider(Map files, List hiddenPaths) {
+ this.files = files;
+ this.hiddenPaths = hiddenPaths;
+ }
+
+ @Override
+ public final Map loadServerConfigurations() {
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+
+ this.files.forEach((path, type) -> {
+ try {
+ JsonElement json = load(path, type);
+ delete(json, this.hiddenPaths);
+ builder.put(path, json);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ });
+
+ return builder.build();
+ }
+
+ /**
+ * Loads a file from the system.
+ *
+ * @param path the name of the file to load
+ * @param type the type of the file
+ * @return the loaded file
+ * @throws IOException if an error occurs performing i/o
+ */
+ protected abstract JsonElement load(String path, T type) throws IOException;
+
+ /**
+ * Deletes the given paths from the json element.
+ *
+ * @param json the json element
+ * @param paths the paths to delete
+ */
+ private static void delete(JsonElement json, List paths) {
+ for (String path : paths) {
+ Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
+ delete(json, pathDeque);
+ }
+ }
+
+ private static void delete(JsonElement json, Deque path) {
+ if (path.isEmpty()) {
+ return;
+ }
+ if (!json.isJsonObject()) {
+ return;
+ }
+
+ JsonObject jsonObject = json.getAsJsonObject();
+ String member = path.removeFirst();
+
+ if (!jsonObject.has(member)) {
+ return;
+ }
+
+ if (path.isEmpty()) {
+ jsonObject.remove(member);
+ } else {
+ delete(jsonObject.get(member), path);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
new file mode 100644
index 0000000..1fc2391
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -0,0 +1,59 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Function to export server configuration files for access within the spark viewer.
+ */
+@FunctionalInterface
+public interface ServerConfigProvider {
+
+ /**
+ * Loads a map of the server configuration files.
+ *
+ * The key is the name of the file and the value is a
+ * {@link JsonElement} of the contents.
+ *
+ * @return the exported server configurations
+ */
+ Map loadServerConfigurations();
+
+ default Map exportServerConfigurations() {
+ return loadServerConfigurations().entrySet()
+ .stream()
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ e -> e.getValue().toString()
+ ));
+ }
+
+ /**
+ * A no-op implementation
+ */
+ ServerConfigProvider NO_OP = Collections::emptyMap;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index 34abdfa..9ae82e8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
@@ -116,6 +117,13 @@ public abstract class AbstractSampler implements Sampler {
e.printStackTrace();
}
+ try {
+ ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider();
+ metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
proto.setMetadata(metadata);
}
diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto
index c7cecc3..48e2a87 100644
--- a/spark-common/src/main/proto/spark/spark_sampler.proto
+++ b/spark-common/src/main/proto/spark/spark_sampler.proto
@@ -23,6 +23,7 @@ message SamplerMetadata {
PlatformMetadata platform_metadata = 7;
PlatformStatistics platform_statistics = 8;
SystemStatistics system_statistics = 9;
+ map server_configurations = 10;
message ThreadDumper {
Type type = 1;
--
cgit
From fe9b39b500ee2633db18ea14774daf01e2946824 Mon Sep 17 00:00:00 2001
From: Manuel Kasten
Date: Fri, 21 Jan 2022 21:09:46 +0100
Subject: Add disableResponseBroadcast config option (#158)
---
.../src/main/java/me/lucko/spark/common/SparkPlatform.java | 7 +++++++
.../lucko/spark/common/command/CommandResponseHandler.java | 14 +++++++++++---
2 files changed, 18 insertions(+), 3 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index c93b876..a087fc9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -94,6 +94,7 @@ public class SparkPlatform {
private final String viewerUrl;
private final OkHttpClient httpClient;
private final BytebinClient bytebinClient;
+ private final boolean disableResponseBroadcast;
private final List commandModules;
private final List commands;
private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
@@ -118,6 +119,8 @@ public class SparkPlatform {
this.httpClient = new OkHttpClient();
this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin");
+ this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false);
+
this.commandModules = ImmutableList.of(
new SamplerModule(),
new HealthModule(),
@@ -216,6 +219,10 @@ public class SparkPlatform {
return this.bytebinClient;
}
+ public boolean shouldBroadcastResponse() {
+ return !this.disableResponseBroadcast;
+ }
+
public List getCommands() {
return this.commands;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
index a9e2229..d1481bd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -88,12 +88,20 @@ public class CommandResponseHandler {
}
public void broadcast(Component message) {
- allSenders(sender -> sender.sendMessage(message));
+ if (this.platform.shouldBroadcastResponse()) {
+ allSenders(sender -> sender.sendMessage(message));
+ } else {
+ reply(message);
+ }
}
public void broadcast(Iterable message) {
- Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message);
- allSenders(sender -> sender.sendMessage(joinedMsg));
+ if (this.platform.shouldBroadcastResponse()) {
+ Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message);
+ allSenders(sender -> sender.sendMessage(joinedMsg));
+ } else {
+ reply(message);
+ }
}
public void replyPrefixed(Component message) {
--
cgit
From 1642af9bacc88c20614234296fd596932b14d8df Mon Sep 17 00:00:00 2001
From: Ben Kerllenevich
Date: Fri, 21 Jan 2022 15:53:51 -0500
Subject: BukkitServerConfigProvider improvements (#159)
---
.../spark/bukkit/BukkitServerConfigProvider.java | 103 +++++++++++++++++----
.../serverconfig/AbstractServerConfigProvider.java | 14 +--
2 files changed, 95 insertions(+), 22 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index b1d2a60..5fdd178 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -20,8 +20,8 @@
package me.lucko.spark.bukkit;
-import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
@@ -32,31 +32,31 @@ import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
import org.bukkit.configuration.MemorySection;
import org.bukkit.configuration.file.YamlConfiguration;
+import co.aikar.timings.TimingsManager;
+
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
+import java.nio.file.Path;
import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Properties;
public class BukkitServerConfigProvider extends AbstractServerConfigProvider {
private static final Gson GSON = new GsonBuilder()
.registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
.create();
- private static final Map FILES = ImmutableMap.of(
- "bukkit.yml", FileType.YAML,
- "spigot.yml", FileType.YAML,
- "paper.yml", FileType.YAML
- );
-
- // todo: make configurable?
- private static final List HIDDEN_PATHS = ImmutableList.of(
- "database",
- "settings.bungeecord-addresses",
- "settings.velocity-support.secret"
- );
+ /** A map of provided files and their type */
+ private static final Map FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection HIDDEN_PATHS;
public BukkitServerConfigProvider() {
super(FILES, HIDDEN_PATHS);
@@ -64,15 +64,86 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider values = config.getValues(false);
+ Path filePath = Paths.get(path);
+ if (!Files.exists(filePath)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
+ Map values;
+
+ if (type == FileType.PROPERTIES) {
+ Properties properties = new Properties();
+ properties.load(reader);
+
+ values = new HashMap<>();
+ properties.forEach((k, v) -> {
+ String key = k.toString();
+ String value = v.toString();
+
+ if ("true".equals(value) || "false".equals(value)) {
+ values.put(key, Boolean.parseBoolean(value));
+ } else if (value.matches("\\d+")) {
+ values.put(key, Integer.parseInt(value));
+ } else {
+ values.put(key, value);
+ }
+ });
+ } else if (type == FileType.YAML) {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ values = config.getValues(false);
+ } else {
+ throw new IllegalArgumentException("Unknown file type: " + type);
+ }
+
return GSON.toJsonTree(values);
}
}
enum FileType {
+ PROPERTIES,
YAML
}
+ static {
+ ImmutableMap.Builder files = ImmutableMap.builder()
+ .put("server.properties", FileType.PROPERTIES)
+ .put("bukkit.yml", FileType.YAML)
+ .put("spigot.yml", FileType.YAML)
+ .put("paper.yml", FileType.YAML)
+ .put("purpur.yml", FileType.YAML);
+
+ for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
+ files.put(config, FileType.YAML);
+ }
+
+ ImmutableSet.Builder hiddenPaths = ImmutableSet.builder()
+ .add("database")
+ .add("settings.bungeecord-addresses")
+ .add("settings.velocity-support.secret")
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .addAll(getTimingsHiddenConfigs())
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = files.build();
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+ private static List getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
+ }
+
+ private static List getTimingsHiddenConfigs() {
+ try {
+ return TimingsManager.hiddenConfigs;
+ } catch (NoClassDefFoundError e) {
+ return Collections.emptyList();
+ }
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
index 98db960..1e6f1b4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -26,9 +26,9 @@ import com.google.gson.JsonObject;
import java.io.IOException;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Deque;
import java.util.LinkedList;
-import java.util.List;
import java.util.Map;
/**
@@ -41,9 +41,9 @@ import java.util.Map;
*/
public abstract class AbstractServerConfigProvider> implements ServerConfigProvider {
private final Map files;
- private final List hiddenPaths;
+ private final Collection hiddenPaths;
- protected AbstractServerConfigProvider(Map files, List hiddenPaths) {
+ protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) {
this.files = files;
this.hiddenPaths = hiddenPaths;
}
@@ -55,8 +55,10 @@ public abstract class AbstractServerConfigProvider> implements
this.files.forEach((path, type) -> {
try {
JsonElement json = load(path, type);
- delete(json, this.hiddenPaths);
- builder.put(path, json);
+ if (json != null) {
+ delete(json, this.hiddenPaths);
+ builder.put(path, json);
+ }
} catch (Exception e) {
e.printStackTrace();
}
@@ -81,7 +83,7 @@ public abstract class AbstractServerConfigProvider> implements
* @param json the json element
* @param paths the paths to delete
*/
- private static void delete(JsonElement json, List paths) {
+ private static void delete(JsonElement json, Collection paths) {
for (String path : paths) {
Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
delete(json, pathDeque);
--
cgit
From ef490d2c00af0c8d549c111654619ee57b1b9d87 Mon Sep 17 00:00:00 2001
From: Luck
Date: Fri, 28 Jan 2022 18:23:43 +0000
Subject: Change textual diagram characters
---
.../spark/common/util/StatisticFormatter.java | 27 +++++++++++-----------
1 file changed, 14 insertions(+), 13 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
index 9a2850e..24e49fa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
@@ -38,7 +38,8 @@ import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
public enum StatisticFormatter {
;
- private static final String BAR_CHARACTER = "|";
+ private static final String BAR_TRUE_CHARACTER = "┃";
+ private static final String BAR_FALSE_CHARACTER = "â•»";
public static TextComponent formatTps(double tps) {
TextColor color;
@@ -124,13 +125,13 @@ public enum StatisticFormatter {
int usedChars = (int) ((used * length) / max);
int committedChars = (int) ((committed * length) / max);
- TextComponent.Builder line = text().content(Strings.repeat(BAR_CHARACTER, usedChars)).color(YELLOW);
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_TRUE_CHARACTER, usedChars)).color(YELLOW);
if (committedChars > usedChars) {
- line.append(text(Strings.repeat(BAR_CHARACTER, (committedChars - usedChars) - 1), GRAY));
- line.append(Component.text(BAR_CHARACTER, RED));
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_FALSE_CHARACTER, RED));
}
if (length > committedChars) {
- line.append(text(Strings.repeat(BAR_CHARACTER, (length - committedChars)), GRAY));
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (length - committedChars)), GRAY));
}
return text()
@@ -153,18 +154,18 @@ public enum StatisticFormatter {
int collectionUsedChars = (int) ((collectionUsed * length) / max);
int committedChars = (int) ((committed * length) / max);
- TextComponent.Builder line = text().content(Strings.repeat(BAR_CHARACTER, collectionUsedChars)).color(YELLOW);
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_TRUE_CHARACTER, collectionUsedChars)).color(YELLOW);
if (usedChars > collectionUsedChars) {
- line.append(Component.text(BAR_CHARACTER, RED));
- line.append(text(Strings.repeat(BAR_CHARACTER, (usedChars - collectionUsedChars) - 1), YELLOW));
+ line.append(Component.text(BAR_TRUE_CHARACTER, RED));
+ line.append(text(Strings.repeat(BAR_TRUE_CHARACTER, (usedChars - collectionUsedChars) - 1), YELLOW));
}
if (committedChars > usedChars) {
- line.append(text(Strings.repeat(BAR_CHARACTER, (committedChars - usedChars) - 1), GRAY));
- line.append(Component.text(BAR_CHARACTER, YELLOW));
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_FALSE_CHARACTER, YELLOW));
}
if (length > committedChars) {
- line.append(text(Strings.repeat(BAR_CHARACTER, (length - committedChars)), GRAY));
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (length - committedChars)), GRAY));
}
return text()
@@ -179,8 +180,8 @@ public enum StatisticFormatter {
int freeChars = length - usedChars;
return text()
.append(text("[", DARK_GRAY))
- .append(text(Strings.repeat(BAR_CHARACTER, usedChars), YELLOW))
- .append(text(Strings.repeat(BAR_CHARACTER, freeChars), GRAY))
+ .append(text(Strings.repeat(BAR_TRUE_CHARACTER, usedChars), YELLOW))
+ .append(text(Strings.repeat(BAR_FALSE_CHARACTER, freeChars), GRAY))
.append(text("]", DARK_GRAY))
.build();
}
--
cgit
From d35216ec6cfe39cc95ffe7bae4b57638e8cdd77b Mon Sep 17 00:00:00 2001
From: Luck
Date: Sun, 30 Jan 2022 10:57:54 +0000
Subject: Fix NPE in DiskUsage util (#166)
---
.../main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java | 8 ++++++++
1 file changed, 8 insertions(+)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
index 4450fcd..7a4ada4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
@@ -44,6 +44,10 @@ public enum DiskUsage {
}
public static long getUsed() {
+ if (FILE_STORE == null) {
+ return 0;
+ }
+
try {
long total = FILE_STORE.getTotalSpace();
return total - FILE_STORE.getUsableSpace();
@@ -53,6 +57,10 @@ public enum DiskUsage {
}
public static long getTotal() {
+ if (FILE_STORE == null) {
+ return 0;
+ }
+
try {
return FILE_STORE.getTotalSpace();
} catch (IOException e) {
--
cgit
From ce56c23132383bd5d865070f23dcb89e574b97cd Mon Sep 17 00:00:00 2001
From: Luck
Date: Sun, 30 Jan 2022 18:59:48 +0000
Subject: Exclude rcon password from server.properties file
---
.../src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java | 1 +
.../common/platform/serverconfig/AbstractServerConfigProvider.java | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
(limited to 'spark-common/src')
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 5fdd178..2042ee1 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -124,6 +124,7 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider> implements
}
JsonObject jsonObject = json.getAsJsonObject();
- String member = path.removeFirst();
+ String member = path.removeFirst().replace("\\.", ".");
if (!jsonObject.has(member)) {
return;
--
cgit
From c5494d20ce8c3c8868f0fc8e19a03d7888c6fe78 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 3 Feb 2022 19:01:01 +0000
Subject: Fix hidden config paths containing a '.' (#169)
---
.../src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java | 2 +-
.../common/platform/serverconfig/AbstractServerConfigProvider.java | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 2042ee1..e4750f9 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -124,7 +124,7 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProviderpassword")
.addAll(getTimingsHiddenConfigs())
.addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
index b40f07f..d9e8bf4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -99,7 +99,7 @@ public abstract class AbstractServerConfigProvider> implements
}
JsonObject jsonObject = json.getAsJsonObject();
- String member = path.removeFirst().replace("\\.", ".");
+ String member = path.removeFirst().replace("", ".");
if (!jsonObject.has(member)) {
return;
--
cgit
From 25f46b649363d99f51b1b5262b5f7c0ce0c3251b Mon Sep 17 00:00:00 2001
From: Luck
Date: Sun, 6 Feb 2022 20:25:07 +0000
Subject: Improve build tooling
- add version to output file name
- check license headers automatically
---
HEADER.txt | 17 +++++++++++
build.gradle | 27 ++++++++++++++++--
settings.gradle | 4 +--
spark-api/HEADER.txt | 22 +++++++++++++++
spark-api/build.gradle | 4 +++
spark-bukkit/build.gradle | 29 ++++++++++++++++++-
.../java/me/lucko/spark/bukkit/CommandMapUtil.java | 27 ++++++++----------
.../placeholder/SparkPlaceholderProvider.java | 2 +-
spark-bungeecord/build.gradle | 29 ++++++++++++++++++-
spark-common/build.gradle | 4 +++
.../command/tabcomplete/CompletionSupplier.java | 27 ++++++++----------
.../common/command/tabcomplete/TabCompleter.java | 27 ++++++++----------
.../spark/common/monitor/MonitoringExecutor.java | 20 +++++++++++++
.../lucko/spark/common/sampler/ThreadDumper.java | 1 -
.../spark/common/sampler/java/JavaSampler.java | 1 -
.../spark/common/sampler/node/AbstractNode.java | 1 -
.../spark/common/sampler/node/StackTraceNode.java | 1 -
.../me/lucko/spark/common/util/BytebinClient.java | 27 ++++++++----------
spark-fabric/build.gradle | 4 +--
.../placeholder/SparkFabricPlaceholderApi.java | 20 +++++++++++++
spark-forge/build.gradle | 2 +-
spark-nukkit/build.gradle | 2 +-
spark-sponge7/build.gradle | 2 +-
spark-sponge8/build.gradle | 2 +-
.../lucko/spark/sponge/Sponge8CommandSender.java | 1 +
spark-universal/build.gradle | 33 ----------------------
spark-velocity/build.gradle | 2 +-
spark-velocity4/build.gradle | 2 +-
spark-waterdog/build.gradle | 2 +-
29 files changed, 224 insertions(+), 118 deletions(-)
create mode 100644 HEADER.txt
create mode 100644 spark-api/HEADER.txt
delete mode 100644 spark-universal/build.gradle
(limited to 'spark-common/src')
diff --git a/HEADER.txt b/HEADER.txt
new file mode 100644
index 0000000..3457e84
--- /dev/null
+++ b/HEADER.txt
@@ -0,0 +1,17 @@
+This file is part of spark.
+
+ Copyright (c) lucko (Luck)
+ Copyright (c) contributors
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
index 9e64e75..2780159 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,6 +1,10 @@
+plugins {
+ id 'org.cadixdev.licenser' version '0.6.1' apply false
+}
+
allprojects {
group = 'me.lucko'
- version = '1.6-SNAPSHOT'
+ version = '1.8-SNAPSHOT'
configurations {
compileClasspath // Fabric-loom needs this for remap jar for some reason
@@ -11,9 +15,12 @@ subprojects {
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'java-library'
+ apply plugin: 'org.cadixdev.licenser'
ext {
- pluginVersion = '1.8.0'
+ baseVersion = '1.8'
+ patchVersion = determinePatchVersion()
+ pluginVersion = baseVersion + '.' + patchVersion
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
}
@@ -26,6 +33,11 @@ subprojects {
duplicatesStrategy = DuplicatesStrategy.INCLUDE
}
+ license {
+ header = rootProject.file('HEADER.txt')
+ include '**/*.java'
+ }
+
repositories {
mavenCentral()
maven { url "https://oss.sonatype.org/content/repositories/snapshots/" }
@@ -34,3 +46,14 @@ subprojects {
}
}
+
+def determinePatchVersion() {
+ def tagInfo = new ByteArrayOutputStream()
+ exec {
+ commandLine 'git', 'describe', '--tags'
+ standardOutput = tagInfo
+ }
+ tagInfo = tagInfo.toString()
+
+ return tagInfo.contains('-') ? tagInfo.split("-")[1] : 0
+}
\ No newline at end of file
diff --git a/settings.gradle b/settings.gradle
index 09d0a60..8ec8a72 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -1,6 +1,5 @@
pluginManagement {
repositories {
- jcenter()
maven {
name = 'Fabric'
url = 'https://maven.fabricmc.net/'
@@ -22,6 +21,5 @@ include (
'spark-forge',
'spark-fabric',
'spark-nukkit',
- 'spark-waterdog',
- 'spark-universal'
+ 'spark-waterdog'
)
diff --git a/spark-api/HEADER.txt b/spark-api/HEADER.txt
new file mode 100644
index 0000000..63aafcb
--- /dev/null
+++ b/spark-api/HEADER.txt
@@ -0,0 +1,22 @@
+This file is part of spark, licensed under the MIT License.
+
+ Copyright (c) lucko (Luck)
+ Copyright (c) contributors
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
\ No newline at end of file
diff --git a/spark-api/build.gradle b/spark-api/build.gradle
index 31458af..0fbe9e1 100644
--- a/spark-api/build.gradle
+++ b/spark-api/build.gradle
@@ -9,6 +9,10 @@ dependencies {
compileOnly 'org.jetbrains:annotations:20.1.0'
}
+license {
+ header = project.file('HEADER.txt')
+}
+
publishing {
//repositories {
// maven {
diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle
index b66134d..8e111e8 100644
--- a/spark-bukkit/build.gradle
+++ b/spark-bukkit/build.gradle
@@ -1,3 +1,7 @@
+plugins {
+ id 'com.github.johnrengelman.shadow' version '7.0.0'
+}
+
dependencies {
implementation project(':spark-common')
implementation('me.lucko:adventure-platform-bukkit:4.9.4') {
@@ -28,4 +32,27 @@ processResources {
)
include 'plugin.yml'
}
-}
\ No newline at end of file
+}
+
+shadowJar {
+ archiveName = "spark-${project.pluginVersion}-bukkit.jar"
+
+ relocate 'okio', 'me.lucko.spark.lib.okio'
+ relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
+ relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
+ relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
+ relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
+ relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
+ relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
index 5c4d0db..e604321 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
@@ -1,26 +1,21 @@
/*
- * This file is part of helper, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
*/
package me.lucko.spark.bukkit;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
index 96c9e93..5b57857 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
@@ -3,7 +3,7 @@
*
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
- *
+ *
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle
index f1a6db5..ccea89d 100644
--- a/spark-bungeecord/build.gradle
+++ b/spark-bungeecord/build.gradle
@@ -1,3 +1,7 @@
+plugins {
+ id 'com.github.johnrengelman.shadow' version '7.0.0'
+}
+
dependencies {
implementation project(':spark-common')
implementation('me.lucko:adventure-platform-bungeecord:4.9.4') {
@@ -18,4 +22,27 @@ processResources {
)
include 'bungee.yml'
}
-}
\ No newline at end of file
+}
+
+shadowJar {
+ archiveName = "spark-${project.pluginVersion}-bungeecord.jar"
+
+ relocate 'okio', 'me.lucko.spark.lib.okio'
+ relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
+ relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
+ relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
+ relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
+ relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
+ relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
diff --git a/spark-common/build.gradle b/spark-common/build.gradle
index c1ddafb..aa0f409 100644
--- a/spark-common/build.gradle
+++ b/spark-common/build.gradle
@@ -2,6 +2,10 @@ plugins {
id 'com.google.protobuf' version '0.8.16'
}
+license {
+ exclude '**/sampler/async/jfr/**'
+}
+
dependencies {
api project(':spark-api')
implementation 'com.github.jvm-profiling-tools:async-profiler:v2.5'
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
index f1a6d10..9975df5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
@@ -1,26 +1,21 @@
/*
- * This file is part of LuckPerms, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
*/
package me.lucko.spark.common.command.tabcomplete;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
index d2b2622..9707f55 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
@@ -1,26 +1,21 @@
/*
- * This file is part of LuckPerms, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
*/
package me.lucko.spark.common.command.tabcomplete;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
index 779dbbf..635ae20 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
@@ -1,3 +1,23 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
package me.lucko.spark.common.monitor;
import java.util.concurrent.Executors;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 5cc41b9..9d54f50 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index d2959bd..2bedae6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index 18f67ba..fd2be8d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index 54217be..b0d9237 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
index 29ee5bb..c2ca1b1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -1,26 +1,21 @@
/*
- * This file is part of bytebin, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck)
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
*/
package me.lucko.spark.common.util;
diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle
index d7e87cd..8cdf5fe 100644
--- a/spark-fabric/build.gradle
+++ b/spark-fabric/build.gradle
@@ -67,7 +67,7 @@ processResources {
}
shadowJar {
- archiveFileName = 'spark-fabric-dev.jar'
+ archiveFileName = "spark-fabric-${project.pluginVersion}-dev.jar"
configurations = [project.configurations.shade]
relocate 'okio', 'me.lucko.spark.lib.okio'
@@ -89,7 +89,7 @@ task remappedShadowJar(type: RemapJarTask) {
dependsOn tasks.shadowJar
input = tasks.shadowJar.archiveFile
addNestedDependencies = true
- archiveFileName = 'spark-fabric.jar'
+ archiveFileName = "spark-${project.pluginVersion}-fabric.jar"
}
tasks.assemble.dependsOn tasks.remappedShadowJar
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
index b9cff691..dc2e7d9 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
@@ -1,3 +1,23 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
package me.lucko.spark.fabric.placeholder;
import eu.pb4.placeholders.PlaceholderAPI;
diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle
index e0fe0f8..25e51ef 100644
--- a/spark-forge/build.gradle
+++ b/spark-forge/build.gradle
@@ -49,7 +49,7 @@ processResources {
}
shadowJar {
- archiveName = 'spark-forge.jar'
+ archiveName = "spark-${project.pluginVersion}-forge.jar"
configurations = [project.configurations.shade]
relocate 'okio', 'me.lucko.spark.lib.okio'
diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle
index ff36091..2e1ad55 100644
--- a/spark-nukkit/build.gradle
+++ b/spark-nukkit/build.gradle
@@ -23,7 +23,7 @@ processResources {
}
shadowJar {
- archiveName = 'spark-nukkit.jar'
+ archiveName = "spark-${project.pluginVersion}-nukkit.jar"
relocate 'okio', 'me.lucko.spark.lib.okio'
relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
diff --git a/spark-sponge7/build.gradle b/spark-sponge7/build.gradle
index 5b26924..b18ffdb 100644
--- a/spark-sponge7/build.gradle
+++ b/spark-sponge7/build.gradle
@@ -20,7 +20,7 @@ blossom {
}
shadowJar {
- archiveFileName = 'spark-sponge7.jar'
+ archiveFileName = "spark-${project.pluginVersion}-sponge7.jar"
relocate 'okio', 'me.lucko.spark.lib.okio'
relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
diff --git a/spark-sponge8/build.gradle b/spark-sponge8/build.gradle
index 45f17b3..314ab18 100644
--- a/spark-sponge8/build.gradle
+++ b/spark-sponge8/build.gradle
@@ -22,7 +22,7 @@ processResources {
}
shadowJar {
- archiveFileName = 'spark-sponge8.jar'
+ archiveFileName = "spark-${project.pluginVersion}-sponge8.jar"
dependencies {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java
index 5e7a65a..e7878dc 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java
@@ -17,6 +17,7 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see .
*/
+
package me.lucko.spark.sponge;
import me.lucko.spark.common.command.sender.AbstractCommandSender;
diff --git a/spark-universal/build.gradle b/spark-universal/build.gradle
deleted file mode 100644
index f784a9e..0000000
--- a/spark-universal/build.gradle
+++ /dev/null
@@ -1,33 +0,0 @@
-plugins {
- id 'com.github.johnrengelman.shadow' version '7.0.0'
-}
-
-dependencies {
- implementation project(':spark-common')
- implementation project(':spark-bukkit')
- implementation project(':spark-bungeecord')
-}
-
-shadowJar {
- archiveName = 'spark.jar'
-
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
- relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
- relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
- relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
- relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
- relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
- relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
-
- exclude 'module-info.class'
- exclude 'META-INF/maven/**'
- exclude 'META-INF/proguard/**'
-}
-
-artifacts {
- archives shadowJar
- shadow shadowJar
-}
-
diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle
index 8a345f9..b2e938b 100644
--- a/spark-velocity/build.gradle
+++ b/spark-velocity/build.gradle
@@ -20,7 +20,7 @@ blossom {
}
shadowJar {
- archiveName = 'spark-velocity.jar'
+ archiveName = "spark-${project.pluginVersion}-velocity.jar"
dependencies {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle
index e2a0559..5bef80b 100644
--- a/spark-velocity4/build.gradle
+++ b/spark-velocity4/build.gradle
@@ -25,7 +25,7 @@ blossom {
}
shadowJar {
- archiveName = 'spark-velocity4.jar'
+ archiveName = "spark-${project.pluginVersion}-velocity4.jar"
dependencies {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle
index 07be15c..c11e3fb 100644
--- a/spark-waterdog/build.gradle
+++ b/spark-waterdog/build.gradle
@@ -28,7 +28,7 @@ processResources {
}
shadowJar {
- archiveName = 'spark-waterdog.jar'
+ archiveName = "spark-${project.pluginVersion}-waterdog.jar"
relocate 'okio', 'me.lucko.spark.lib.okio'
relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
--
cgit
From 8a39ad426ca4fe1f5ef39fc5b0b538937802a001 Mon Sep 17 00:00:00 2001
From: Luck
Date: Sun, 6 Feb 2022 21:02:28 +0000
Subject: Catch any exception when uploading data (#171)
---
.../java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java | 2 +-
.../main/java/me/lucko/spark/common/command/modules/SamplerModule.java | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 491ec1e..1030f35 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -111,7 +111,7 @@ public class HeapAnalysisModule implements CommandModule {
);
platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
- } catch (IOException e) {
+ } catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED));
e.printStackTrace();
saveToFile = true;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 26f20e7..970d062 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -324,7 +324,7 @@ public class SamplerModule implements CommandModule {
);
platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
- } catch (IOException e) {
+ } catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED));
e.printStackTrace();
saveToFile = true;
--
cgit
From f54a31e9d4756c12836d5e42c31d3d100546533d Mon Sep 17 00:00:00 2001
From: Luck
Date: Sat, 12 Feb 2022 00:14:02 +0000
Subject: Add class source lookups for Sponge8
---
.../lucko/spark/common/util/ClassSourceLookup.java | 25 +++++++++---
.../spark/sponge/Sponge8ClassSourceLookup.java | 44 ++++++++++++++++++++++
.../me/lucko/spark/sponge/Sponge8SparkPlugin.java | 6 +++
3 files changed, 70 insertions(+), 5 deletions(-)
create mode 100644 spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
index 42a04f7..bd9ec37 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
@@ -26,6 +26,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.io.IOException;
+import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
@@ -87,8 +88,22 @@ public interface ClassSourceLookup {
*/
interface ByUrl extends ClassSourceLookup {
- default String identifyUrl(URL url) throws URISyntaxException {
- return url.getProtocol().equals("file") ? identifyFile(Paths.get(url.toURI())) : null;
+ default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
+ Path path = null;
+
+ String protocol = url.getProtocol();
+ if (protocol.equals("file")) {
+ path = Paths.get(url.toURI());
+ } else if (protocol.equals("jar")) {
+ URL innerUrl = new URL(url.getPath());
+ path = Paths.get(innerUrl.getPath().split("!")[0]);
+ }
+
+ if (path != null) {
+ return identifyFile(path.toAbsolutePath().normalize());
+ }
+
+ return null;
}
default String identifyFile(Path path) {
@@ -123,7 +138,7 @@ public interface ClassSourceLookup {
*/
class ByCodeSource implements ClassSourceLookup, ByUrl {
@Override
- public @Nullable String identify(Class> clazz) throws URISyntaxException {
+ public @Nullable String identify(Class> clazz) throws URISyntaxException, MalformedURLException {
ProtectionDomain protectionDomain = clazz.getProtectionDomain();
if (protectionDomain == null) {
return null;
@@ -148,12 +163,12 @@ public interface ClassSourceLookup {
static Visitor createVisitor(ClassSourceLookup lookup) {
if (lookup == ClassSourceLookup.NO_OP) {
- return NoOpVistitor.INSTANCE; // don't bother!
+ return NoOpVisitor.INSTANCE; // don't bother!
}
return new VisitorImpl(lookup);
}
- enum NoOpVistitor implements Visitor {
+ enum NoOpVisitor implements Visitor {
INSTANCE;
@Override
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
new file mode 100644
index 0000000..60ebb60
--- /dev/null
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.sponge;
+
+import me.lucko.spark.common.util.ClassSourceLookup;
+
+import org.spongepowered.api.Game;
+
+import java.nio.file.Path;
+
+public class Sponge8ClassSourceLookup extends ClassSourceLookup.ByCodeSource {
+ private final Path modsDirectory;
+
+ public Sponge8ClassSourceLookup(Game game) {
+ this.modsDirectory = game.gameDirectory().resolve("mods").toAbsolutePath().normalize();
+ }
+
+ @Override
+ public String identifyFile(Path path) {
+ if (!path.startsWith(this.modsDirectory)) {
+ return null;
+ }
+
+ return super.identifyFileName(this.modsDirectory.relativize(path).toString());
+ }
+}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
index 6eb2674..e867a75 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
@@ -29,6 +29,7 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.ClassSourceLookup;
import net.kyori.adventure.text.Component;
@@ -147,6 +148,11 @@ public class Sponge8SparkPlugin implements SparkPlugin {
return new Sponge8TickHook(this.pluginContainer, this.game);
}
+ @Override
+ public ClassSourceLookup createClassSourceLookup() {
+ return new Sponge8ClassSourceLookup(this.game);
+ }
+
@Override
public PlayerPingProvider createPlayerPingProvider() {
if (this.game.isServerAvailable()) {
--
cgit
From f8ae6c5e41d72e7e695c65fe77281c6dd87e1ec3 Mon Sep 17 00:00:00 2001
From: Luck
Date: Tue, 22 Feb 2022 20:46:51 +0000
Subject: Exclude level seed from server.properties config (#178)
---
.../spark/bukkit/BukkitServerConfigProvider.java | 26 ++-------
.../serverconfig/PropertiesFileReader.java | 64 ++++++++++++++++++++++
2 files changed, 68 insertions(+), 22 deletions(-)
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
(limited to 'spark-common/src')
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 492e610..0ef662c 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -28,6 +28,7 @@ import com.google.gson.JsonElement;
import com.google.gson.JsonSerializer;
import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
+import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader;
import org.bukkit.configuration.MemorySection;
import org.bukkit.configuration.file.YamlConfiguration;
@@ -43,10 +44,8 @@ import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
public class BukkitServerConfigProvider extends AbstractServerConfigProvider {
private static final Gson GSON = new GsonBuilder()
@@ -73,26 +72,8 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider values;
if (type == FileType.PROPERTIES) {
- Properties properties = new Properties();
- properties.load(reader);
-
- values = new HashMap<>();
- properties.forEach((k, v) -> {
- String key = k.toString();
- String value = v.toString();
-
- if ("true".equals(value) || "false".equals(value)) {
- values.put(key, Boolean.parseBoolean(value));
- } else if (value.matches("\\d+")) {
- try {
- values.put(key, Long.parseLong(value));
- } catch (NumberFormatException e) {
- values.put(key, value);
- }
- } else {
- values.put(key, value);
- }
- });
+ PropertiesFileReader propertiesReader = new PropertiesFileReader(reader);
+ values = propertiesReader.readProperties();
} else if (type == FileType.YAML) {
YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
values = config.getValues(false);
@@ -129,6 +110,7 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProviderpassword")
+ .add("level-seed")
.addAll(getTimingsHiddenConfigs())
.addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
new file mode 100644
index 0000000..8fc89d7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
@@ -0,0 +1,64 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import java.io.FilterReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * A {@link Reader} that can parse a .properties file.
+ */
+public class PropertiesFileReader extends FilterReader {
+
+ public PropertiesFileReader(Reader in) {
+ super(in);
+ }
+
+ public Map readProperties() throws IOException {
+ Properties properties = new Properties();
+ properties.load(this);
+
+ Map values = new HashMap<>();
+ properties.forEach((k, v) -> {
+ String key = k.toString();
+ String value = v.toString();
+
+ if ("true".equals(value) || "false".equals(value)) {
+ values.put(key, Boolean.parseBoolean(value));
+ } else if (value.matches("\\d+")) {
+ try {
+ values.put(key, Long.parseLong(value));
+ } catch (NumberFormatException e) {
+ values.put(key, value);
+ }
+ } else {
+ values.put(key, value);
+ }
+ });
+
+ return values;
+ }
+
+}
--
cgit
From 530610afda0697536bb0d103d95f8e4ebf73b7c8 Mon Sep 17 00:00:00 2001
From: Luck
Date: Tue, 22 Feb 2022 21:09:32 +0000
Subject: Update async-profiler, add support for linux/aarch64 (#181)
---
spark-common/build.gradle | 2 +-
.../common/sampler/async/AsyncProfilerAccess.java | 18 ++++++++++--------
.../spark/common/sampler/async/jfr/JfrReader.java | 18 ++++++++++++++++++
.../resources/spark/linux/aarch64/libasyncProfiler.so | Bin 0 -> 318936 bytes
.../resources/spark/linux/amd64/libasyncProfiler.so | Bin 0 -> 332431 bytes
.../main/resources/spark/linux/libasyncProfiler.so | Bin 398099 -> 0 bytes
.../main/resources/spark/macos/libasyncProfiler.so | Bin 0 -> 670944 bytes
.../main/resources/spark/macosx/libasyncProfiler.so | Bin 599568 -> 0 bytes
8 files changed, 29 insertions(+), 9 deletions(-)
create mode 100755 spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
create mode 100755 spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
delete mode 100755 spark-common/src/main/resources/spark/linux/libasyncProfiler.so
create mode 100755 spark-common/src/main/resources/spark/macos/libasyncProfiler.so
delete mode 100755 spark-common/src/main/resources/spark/macosx/libasyncProfiler.so
(limited to 'spark-common/src')
diff --git a/spark-common/build.gradle b/spark-common/build.gradle
index aa0f409..554eec2 100644
--- a/spark-common/build.gradle
+++ b/spark-common/build.gradle
@@ -8,7 +8,7 @@ license {
dependencies {
api project(':spark-api')
- implementation 'com.github.jvm-profiling-tools:async-profiler:v2.5'
+ implementation 'com.github.jvm-profiling-tools:async-profiler:v2.7'
implementation 'org.ow2.asm:asm:9.1'
implementation 'com.google.protobuf:protobuf-javalite:3.15.6'
implementation 'com.squareup.okhttp3:okhttp:3.14.1'
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index d1c8393..d642a53 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -20,8 +20,8 @@
package me.lucko.spark.common.sampler.async;
-import com.google.common.collect.ImmutableSetMultimap;
-import com.google.common.collect.Multimap;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Table;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.util.TemporaryFiles;
@@ -108,18 +108,20 @@ public enum AsyncProfilerAccess {
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
- Multimap supported = ImmutableSetMultimap.builder()
- .put("linux", "amd64")
- .put("macosx", "amd64")
- .put("macosx", "aarch64")
+ Table supported = ImmutableTable.builder()
+ .put("linux", "amd64", "linux/amd64")
+ .put("linux", "aarch64", "linux/aarch64")
+ .put("macosx", "amd64", "macos")
+ .put("macosx", "aarch64", "macos")
.build();
- if (!supported.containsEntry(os, arch)) {
+ String libPath = supported.get(os, arch);
+ if (libPath == null) {
throw new UnsupportedSystemException(os, arch);
}
// extract the profiler binary from the spark jar file
- String resource = "spark/" + os + "/libasyncProfiler.so";
+ String resource = "spark/" + libPath + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
index a705f2d..e0cc4e9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
@@ -59,6 +59,7 @@ public class JfrReader implements Closeable {
public final Dictionary stackFrames = new Dictionary<>(); // spark
public final Map frameTypes = new HashMap<>();
public final Map threadStates = new HashMap<>();
+ public final Map settings = new HashMap<>();
private int executionSample;
private int nativeMethodSample;
@@ -67,6 +68,8 @@ public class JfrReader implements Closeable {
private int allocationSample;
private int monitorEnter;
private int threadPark;
+ private int activeSetting;
+ private boolean activeSettingHasStack;
public JfrReader(Path path) throws IOException { // spark - Path instead of String
this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String
@@ -129,6 +132,8 @@ public class JfrReader implements Closeable {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false);
} else if (type == threadPark) {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true);
+ } else if (type == activeSetting) {
+ readActiveSetting();
}
if ((pos += size) <= buf.limit()) {
@@ -170,6 +175,17 @@ public class JfrReader implements Closeable {
return new ContendedLock(time, tid, stackTraceId, duration, classId);
}
+ private void readActiveSetting() {
+ long time = getVarlong();
+ long duration = getVarlong();
+ int tid = getVarint();
+ if (activeSettingHasStack) getVarint();
+ long id = getVarlong();
+ String name = getString();
+ String value = getString();
+ settings.put(name, value);
+ }
+
private boolean readChunk(int pos) throws IOException {
if (pos + CHUNK_HEADER_SIZE > buf.limit() || buf.getInt(pos) != CHUNK_SIGNATURE) {
throw new IOException("Not a valid JFR file");
@@ -424,6 +440,8 @@ public class JfrReader implements Closeable {
allocationSample = getTypeId("jdk.ObjectAllocationSample");
monitorEnter = getTypeId("jdk.JavaMonitorEnter");
threadPark = getTypeId("jdk.ThreadPark");
+ activeSetting = getTypeId("jdk.ActiveSetting");
+ activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null;
}
private int getTypeId(String typeName) {
diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
new file mode 100755
index 0000000..bd920cd
Binary files /dev/null and b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
new file mode 100755
index 0000000..714ed4b
Binary files /dev/null and b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so differ
diff --git a/spark-common/src/main/resources/spark/linux/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/libasyncProfiler.so
deleted file mode 100755
index ddee900..0000000
Binary files a/spark-common/src/main/resources/spark/linux/libasyncProfiler.so and /dev/null differ
diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
new file mode 100755
index 0000000..2a1019e
Binary files /dev/null and b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so differ
diff --git a/spark-common/src/main/resources/spark/macosx/libasyncProfiler.so b/spark-common/src/main/resources/spark/macosx/libasyncProfiler.so
deleted file mode 100755
index 75daf6e..0000000
Binary files a/spark-common/src/main/resources/spark/macosx/libasyncProfiler.so and /dev/null differ
--
cgit
From 2c219289bd4815a1c3987f4a0bc68c453f3f3efa Mon Sep 17 00:00:00 2001
From: Luck
Date: Wed, 16 Mar 2022 20:29:37 +0000
Subject: Show 'available' physical memory instead of 'free'
---
.../me/lucko/spark/common/monitor/cpu/CpuInfo.java | 24 +------
.../spark/common/monitor/memory/MemoryInfo.java | 67 +++++++++++++++++-
.../java/me/lucko/spark/common/util/LinuxProc.java | 79 ++++++++++++++++++++++
3 files changed, 148 insertions(+), 22 deletions(-)
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
index a179904..9bbe0f8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -20,13 +20,8 @@
package me.lucko.spark.common.monitor.cpu;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
+import me.lucko.spark.common.util.LinuxProc;
+
import java.util.regex.Pattern;
/**
@@ -43,8 +38,7 @@ public enum CpuInfo {
* @return the cpu model
*/
public static String queryCpuModel() {
- List cpuInfo = readFile("/proc/cpuinfo");
- for (String line : cpuInfo) {
+ for (String line : LinuxProc.CPUINFO.read()) {
String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line);
if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) {
@@ -54,16 +48,4 @@ public enum CpuInfo {
return "";
}
- private static List readFile(String file) {
- Path path = Paths.get(file);
- if (Files.isReadable(path)) {
- try {
- return Files.readAllLines(path, StandardCharsets.UTF_8);
- } catch (IOException e) {
- // ignore
- }
- }
- return new ArrayList<>();
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
index 4ed9b1c..4aa52f4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -20,12 +20,19 @@
package me.lucko.spark.common.monitor.memory;
+import me.lucko.spark.common.util.LinuxProc;
+
import java.lang.management.ManagementFactory;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import javax.management.JMX;
import javax.management.MBeanServer;
import javax.management.ObjectName;
+/**
+ * Utility to query information about system memory usage.
+ */
public enum MemoryInfo {
;
@@ -34,6 +41,9 @@ public enum MemoryInfo {
/** The OperatingSystemMXBean instance */
private static final OperatingSystemMXBean BEAN;
+ /** The format used by entries in /proc/meminfo */
+ private static final Pattern PROC_MEMINFO_VALUE = Pattern.compile("^(\\w+):\\s*(\\d+) kB$");
+
static {
try {
MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
@@ -44,6 +54,8 @@ public enum MemoryInfo {
}
}
+ /* Swap */
+
public static long getUsedSwap() {
return BEAN.getTotalSwapSpaceSize() - BEAN.getFreeSwapSpaceSize();
}
@@ -52,14 +64,67 @@ public enum MemoryInfo {
return BEAN.getTotalSwapSpaceSize();
}
+ /* Physical Memory */
+
public static long getUsedPhysicalMemory() {
- return BEAN.getTotalPhysicalMemorySize() - BEAN.getFreePhysicalMemorySize();
+ return BEAN.getTotalPhysicalMemorySize() - getAvailablePhysicalMemory();
}
public static long getTotalPhysicalMemory() {
return BEAN.getTotalPhysicalMemorySize();
}
+ public static long getAvailablePhysicalMemory() {
+ boolean present = false;
+ long free = 0, buffers = 0, cached = 0, sReclaimable = 0;
+
+ for (String line : LinuxProc.MEMINFO.read()) {
+ Matcher matcher = PROC_MEMINFO_VALUE.matcher(line);
+ if (matcher.matches()) {
+ present = true;
+
+ String label = matcher.group(1);
+ long value = Long.parseLong(matcher.group(2)) * 1024; // kB -> B
+
+ // if MemAvailable is set, just return that
+ if (label.equals("MemAvailable")) {
+ return value;
+ }
+
+ // otherwise, record MemFree, Buffers, Cached and SReclaimable
+ switch (label) {
+ case "MemFree":
+ free = value;
+ break;
+ case "Buffers":
+ buffers = value;
+ break;
+ case "Cached":
+ cached = value;
+ break;
+ case "SReclaimable":
+ sReclaimable = value;
+ break;
+ }
+ }
+ }
+
+ // estimate how much is "available" - not exact but this is probably good enough.
+ // most Linux systems (assuming they have been updated in the last ~8 years) will
+ // have MemAvailable set, and we return that instead if present
+ //
+ // useful ref: https://www.linuxatemyram.com/
+ if (present) {
+ return free + buffers + cached + sReclaimable;
+ }
+
+ // fallback to what the JVM understands as "free"
+ // on non-linux systems, this is probably good enough to estimate what's available
+ return BEAN.getFreePhysicalMemorySize();
+ }
+
+ /* Virtual Memory */
+
public static long getTotalVirtualMemory() {
return BEAN.getCommittedVirtualMemorySize();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
new file mode 100644
index 0000000..0926ae7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
@@ -0,0 +1,79 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.util;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility for reading from /proc/ on Linux systems.
+ */
+public enum LinuxProc {
+
+ /**
+ * Information about the system CPU.
+ */
+ CPUINFO("/proc/cpuinfo"),
+
+ /**
+ * Information about the system memory.
+ */
+ MEMINFO("/proc/meminfo");
+
+ private final Path path;
+
+ LinuxProc(String path) {
+ this.path = resolvePath(path);
+ }
+
+ private static @Nullable Path resolvePath(String path) {
+ try {
+ Path p = Paths.get(path);
+ if (Files.isReadable(p)) {
+ return p;
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+ return null;
+ }
+
+ public @NonNull List read() {
+ if (this.path != null) {
+ try {
+ return Files.readAllLines(this.path, StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+
+ return Collections.emptyList();
+ }
+
+}
--
cgit
From 8a586bdf08f07c4379ccdb96cfb35af36d719d00 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 24 Mar 2022 19:05:36 +0000
Subject: Show how many threads were combined in profiler (#193)
---
.../spark/common/sampler/AbstractSampler.java | 11 +++--
.../me/lucko/spark/common/sampler/Sampler.java | 3 +-
.../lucko/spark/common/sampler/ThreadGrouper.java | 48 ++++++++++++++++++----
.../spark/common/sampler/ThreadNodeOrder.java | 11 +++--
.../sampler/aggregator/AbstractDataAggregator.java | 10 ++++-
.../common/sampler/aggregator/DataAggregator.java | 4 +-
.../spark/common/sampler/async/AsyncSampler.java | 3 +-
.../common/sampler/java/JavaDataAggregator.java | 6 +--
.../spark/common/sampler/java/JavaSampler.java | 3 +-
.../common/sampler/java/TickedDataAggregator.java | 5 +--
.../spark/common/sampler/node/ThreadNode.java | 27 +++++++++---
11 files changed, 91 insertions(+), 40 deletions(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index 9ae82e8..5b236f1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -31,7 +31,6 @@ import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
-import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
@@ -127,15 +126,15 @@ public abstract class AbstractSampler implements Sampler {
proto.setMetadata(metadata);
}
- protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator super Map.Entry> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- List> data = new ArrayList<>(dataAggregator.getData().entrySet());
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ List data = dataAggregator.exportData();
data.sort(outputOrder);
ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
- for (Map.Entry entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
+ for (ThreadNode entry : data) {
+ proto.addThreads(entry.toProto(mergeMode));
+ classSourceVisitor.visit(entry);
}
if (classSourceVisitor.hasMappings()) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index d27b2fc..845043f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -28,7 +28,6 @@ import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.util.Comparator;
-import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
@@ -68,6 +67,6 @@ public interface Sampler {
CompletableFuture getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
+ SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index 225f768..f71ad9f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -22,7 +22,9 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+import java.util.Collections;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -41,6 +43,11 @@ public interface ThreadGrouper {
return threadName;
}
+ @Override
+ public String getLabel(String group) {
+ return group;
+ }
+
@Override
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME;
@@ -55,14 +62,18 @@ public interface ThreadGrouper {
* separated from the pool name with any of one or more of ' ', '-', or '#'.
*/
ThreadGrouper BY_POOL = new ThreadGrouper() {
+ private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+
+ // thread id -> group
private final Map cache = new ConcurrentHashMap<>();
- private final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+ // group -> thread ids
+ private final Map> seen = new ConcurrentHashMap<>();
@Override
public String getGroup(long threadId, String threadName) {
- String group = this.cache.get(threadId);
- if (group != null) {
- return group;
+ String cached = this.cache.get(threadId);
+ if (cached != null) {
+ return cached;
}
Matcher matcher = this.pattern.matcher(threadName);
@@ -70,11 +81,18 @@ public interface ThreadGrouper {
return threadName;
}
- group = matcher.group(1).trim() + " (Combined)";
- this.cache.put(threadId, group); // we don't care about race conditions here
+ String group = matcher.group(1).trim();
+ this.cache.put(threadId, group);
+ this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId);
return group;
}
+ @Override
+ public String getLabel(String group) {
+ int count = this.seen.getOrDefault(group, Collections.emptySet()).size();
+ return group + " (x" + count + ")";
+ }
+
@Override
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL;
@@ -86,9 +104,17 @@ public interface ThreadGrouper {
* the name "All".
*/
ThreadGrouper AS_ONE = new ThreadGrouper() {
+ private final Set seen = ConcurrentHashMap.newKeySet();
+
@Override
public String getGroup(long threadId, String threadName) {
- return "All";
+ this.seen.add(threadId);
+ return "root";
+ }
+
+ @Override
+ public String getLabel(String group) {
+ return "All (x" + this.seen.size() + ")";
}
@Override
@@ -106,6 +132,14 @@ public interface ThreadGrouper {
*/
String getGroup(long threadId, String threadName);
+ /**
+ * Gets the label to use for a given group.
+ *
+ * @param group the group
+ * @return the label
+ */
+ String getLabel(String group);
+
SamplerMetadata.DataAggregator.ThreadGrouper asProto();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
index 4fa8ff4..adcedcd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
@@ -23,20 +23,19 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Comparator;
-import java.util.Map;
/**
* Methods of ordering {@link ThreadNode}s in the output data.
*/
-public enum ThreadNodeOrder implements Comparator> {
+public enum ThreadNodeOrder implements Comparator {
/**
* Order by the name of the thread (alphabetically)
*/
BY_NAME {
@Override
- public int compare(Map.Entry o1, Map.Entry o2) {
- return o1.getKey().compareTo(o2.getKey());
+ public int compare(ThreadNode o1, ThreadNode o2) {
+ return o1.getThreadLabel().compareTo(o2.getThreadLabel());
}
},
@@ -45,8 +44,8 @@ public enum ThreadNodeOrder implements Comparator>
*/
BY_TIME {
@Override
- public int compare(Map.Entry o1, Map.Entry o2) {
- return -Double.compare(o1.getValue().getTotalTime(), o2.getValue().getTotalTime());
+ public int compare(ThreadNode o1, ThreadNode o2) {
+ return -Double.compare(o1.getTotalTime(), o2.getTotalTime());
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
index 7640d60..ad9dee4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
@@ -23,6 +23,8 @@ package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -50,7 +52,11 @@ public abstract class AbstractDataAggregator implements DataAggregator {
}
@Override
- public Map getData() {
- return this.threadData;
+ public List exportData() {
+ List data = new ArrayList<>(this.threadData.values());
+ for (ThreadNode node : data) {
+ node.setThreadLabel(this.threadGrouper.getLabel(node.getThreadGroup()));
+ }
+ return data;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 3b1d349..5590a96 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -23,7 +23,7 @@ package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
-import java.util.Map;
+import java.util.List;
/**
* Aggregates sampling data.
@@ -35,7 +35,7 @@ public interface DataAggregator {
*
* @return the output data
*/
- Map getData();
+ List exportData();
/**
* Gets metadata about the data aggregator instance.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 5d587a0..60c4e03 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -42,7 +42,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Comparator;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
@@ -154,7 +153,7 @@ public class AsyncSampler extends AbstractSampler {
}
@Override
- public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
aggregateOutput();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
index 54d9e1c..cc530d6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
@@ -27,7 +27,7 @@ import me.lucko.spark.common.sampler.node.StackTraceNode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import java.lang.management.ThreadInfo;
-import java.util.Map;
+import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -86,7 +86,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
}
@Override
- public Map getData() {
+ public List exportData() {
// wait for all pending data to be inserted
this.workerPool.shutdown();
try {
@@ -95,7 +95,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
e.printStackTrace();
}
- return super.getData();
+ return super.exportData();
}
private static boolean isSleeping(ThreadInfo thread) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 2bedae6..cfa0a0f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -37,7 +37,6 @@ import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.Comparator;
-import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
@@ -125,7 +124,7 @@ public class JavaSampler extends AbstractSampler implements Runnable {
}
@Override
- public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator super Map.Entry> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index ac34d01..e817828 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -29,7 +29,6 @@ import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -102,13 +101,13 @@ public class TickedDataAggregator extends JavaDataAggregator {
}
@Override
- public Map getData() {
+ public List exportData() {
// push the current tick
synchronized (this.mutex) {
pushCurrentTick();
}
- return super.getData();
+ return super.exportData();
}
private final class TickList implements Runnable {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index fc56987..ed97443 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -28,17 +28,34 @@ import me.lucko.spark.proto.SparkSamplerProtos;
public final class ThreadNode extends AbstractNode {
/**
- * The name of this thread
+ * The name of this thread / thread group
*/
- private final String threadName;
+ private final String name;
- public ThreadNode(String threadName) {
- this.threadName = threadName;
+ /**
+ * The label used to describe this thread in the viewer
+ */
+ public String label;
+
+ public ThreadNode(String name) {
+ this.name = name;
+ }
+
+ public String getThreadLabel() {
+ return this.label != null ? this.label : this.name;
+ }
+
+ public String getThreadGroup() {
+ return this.name;
+ }
+
+ public void setThreadLabel(String label) {
+ this.label = label;
}
public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) {
SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder()
- .setName(this.threadName)
+ .setName(getThreadLabel())
.setTime(getTotalTime());
for (StackTraceNode child : exportChildren(mergeMode)) {
--
cgit
From 4e60e856bd5c0ac5c48a5a312a263fbec3c343a0 Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 14 Apr 2022 22:29:48 +0100
Subject: Fix names for non-combined threads
---
.../src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java | 3 +++
1 file changed, 3 insertions(+)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index f71ad9f..9ad84df 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -90,6 +90,9 @@ public interface ThreadGrouper {
@Override
public String getLabel(String group) {
int count = this.seen.getOrDefault(group, Collections.emptySet()).size();
+ if (count == 0) {
+ return group;
+ }
return group + " (x" + count + ")";
}
--
cgit
From ecc9ac3fdb731ef3beee2ce45ddf4b355b12745f Mon Sep 17 00:00:00 2001
From: Luck
Date: Thu, 14 Apr 2022 23:03:20 +0100
Subject: Try to read total physical memory from /proc on linux
---
.../me/lucko/spark/common/monitor/memory/MemoryInfo.java | 16 +++++++++++++++-
1 file changed, 15 insertions(+), 1 deletion(-)
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
index 4aa52f4..226f75b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -67,10 +67,24 @@ public enum MemoryInfo {
/* Physical Memory */
public static long getUsedPhysicalMemory() {
- return BEAN.getTotalPhysicalMemorySize() - getAvailablePhysicalMemory();
+ return getTotalPhysicalMemory() - getAvailablePhysicalMemory();
}
public static long getTotalPhysicalMemory() {
+ // try to read from /proc/meminfo on linux systems
+ for (String line : LinuxProc.MEMINFO.read()) {
+ Matcher matcher = PROC_MEMINFO_VALUE.matcher(line);
+ if (matcher.matches()) {
+ String label = matcher.group(1);
+ long value = Long.parseLong(matcher.group(2)) * 1024; // kB -> B
+
+ if (label.equals("MemTotal")) {
+ return value;
+ }
+ }
+ }
+
+ // fallback to JVM measure
return BEAN.getTotalPhysicalMemorySize();
}
--
cgit
From 45edd6197aa777d0c746f37a813d6be4ca916694 Mon Sep 17 00:00:00 2001
From: Luck
Date: Mon, 18 Apr 2022 11:54:40 +0100
Subject: Include network interface statistics in '/spark health' command
---
.../java/me/lucko/spark/common/SparkPlatform.java | 2 +
.../spark/common/command/modules/HealthModule.java | 51 +++-
.../lucko/spark/common/monitor/net/Direction.java | 37 +++
.../monitor/net/NetworkInterfaceAverages.java | 88 +++++++
.../common/monitor/net/NetworkInterfaceInfo.java | 274 +++++++++++++++++++++
.../spark/common/monitor/net/NetworkMonitor.java | 140 +++++++++++
.../spark/common/monitor/ping/PingStatistics.java | 5 +-
.../platform/PlatformStatisticsProvider.java | 30 +--
.../serverconfig/AbstractServerConfigProvider.java | 2 +-
.../me/lucko/spark/common/util/FormatUtil.java | 23 ++
.../java/me/lucko/spark/common/util/LinuxProc.java | 7 +-
.../me/lucko/spark/common/util/RollingAverage.java | 11 +
spark-common/src/main/proto/spark/spark.proto | 22 +-
.../me/lucko/spark/sponge/Sponge7SparkPlugin.java | 2 +-
14 files changed, 667 insertions(+), 27 deletions(-)
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
(limited to 'spark-common/src')
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index a087fc9..a961925 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -40,6 +40,7 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.monitor.tick.TickStatistics;
@@ -166,6 +167,7 @@ public class SparkPlatform {
this.pingStatistics.start();
}
CpuMonitor.ensureMonitoring();
+ NetworkMonitor.ensureMonitoring();
// poll startup GC statistics after plugins & the world have loaded
this.plugin.executeAsync(() -> {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index ea4f140..ee3592a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -29,6 +29,9 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.net.Direction;
+import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.ping.PingSummary;
import me.lucko.spark.common.monitor.tick.TickStatistics;
@@ -45,6 +48,7 @@ import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
@@ -81,8 +85,9 @@ public class HealthModule implements CommandModule {
consumer.accept(Command.builder()
.aliases("healthreport", "health", "ht")
.argumentUsage("memory", null)
+ .argumentUsage("network", null)
.executor(HealthModule::healthReport)
- .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory"))
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory", "--network"))
.build()
);
}
@@ -197,6 +202,8 @@ public class HealthModule implements CommandModule {
addDetailedMemoryStats(report, memoryMXBean);
}
+ addNetworkStats(report, arguments.boolFlag("network"));
+
addDiskStats(report);
resp.reply(report);
@@ -352,6 +359,48 @@ public class HealthModule implements CommandModule {
}
}
+ private static void addNetworkStats(List report, boolean detailed) {
+ List averagesReport = new LinkedList<>();
+
+ for (Map.Entry ent : NetworkMonitor.systemAverages().entrySet()) {
+ String interfaceName = ent.getKey();
+ NetworkInterfaceAverages averages = ent.getValue();
+
+ for (Direction direction : Direction.values()) {
+ long bytesPerSec = (long) averages.bytesPerSecond(direction).mean();
+ long packetsPerSec = (long) averages.packetsPerSecond(direction).mean();
+
+ if (detailed || bytesPerSec > 0 || packetsPerSec > 0) {
+ averagesReport.add(text()
+ .color(GRAY)
+ .content(" ")
+ .append(FormatUtil.formatBytes(bytesPerSec, GREEN, "/s"))
+ .append(text(" / "))
+ .append(text(String.format("%,d", packetsPerSec), WHITE))
+ .append(text(" pps "))
+ .append(text().color(DARK_GRAY)
+ .append(text('('))
+ .append(text(interfaceName + " " + direction.abbrev(), WHITE))
+ .append(text(')'))
+ )
+ .build()
+ );
+ }
+ }
+ }
+
+ if (!averagesReport.isEmpty()) {
+ report.add(text()
+ .append(text(">", DARK_GRAY, BOLD))
+ .append(space())
+ .append(text("Network usage: (system, last 15m)", GOLD))
+ .build()
+ );
+ report.addAll(averagesReport);
+ report.add(empty());
+ }
+ }
+
private static void addDiskStats(List report) {
long total = DiskUsage.getTotal();
long used = DiskUsage.getUsed();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
new file mode 100644
index 0000000..d4d11ff
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
@@ -0,0 +1,37 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+public enum Direction {
+
+ RECEIVE("rx"),
+ TRANSMIT("tx");
+
+ private final String abbreviation;
+
+ Direction(String abbreviation) {
+ this.abbreviation = abbreviation;
+ }
+
+ public String abbrev() {
+ return this.abbreviation;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
new file mode 100644
index 0000000..0ce0639
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
@@ -0,0 +1,88 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import me.lucko.spark.common.util.RollingAverage;
+
+import java.math.BigDecimal;
+
+public final class NetworkInterfaceAverages {
+ private final RollingAverage rxBytesPerSecond;
+ private final RollingAverage txBytesPerSecond;
+ private final RollingAverage rxPacketsPerSecond;
+ private final RollingAverage txPacketsPerSecond;
+
+ NetworkInterfaceAverages(int windowSize) {
+ this.rxBytesPerSecond = new RollingAverage(windowSize);
+ this.txBytesPerSecond = new RollingAverage(windowSize);
+ this.rxPacketsPerSecond = new RollingAverage(windowSize);
+ this.txPacketsPerSecond = new RollingAverage(windowSize);
+ }
+
+ void accept(NetworkInterfaceInfo info, RateCalculator rateCalculator) {
+ this.rxBytesPerSecond.add(rateCalculator.calculate(info.getReceivedBytes()));
+ this.txBytesPerSecond.add(rateCalculator.calculate(info.getTransmittedBytes()));
+ this.rxPacketsPerSecond.add(rateCalculator.calculate(info.getReceivedPackets()));
+ this.txPacketsPerSecond.add(rateCalculator.calculate(info.getTransmittedPackets()));
+ }
+
+ interface RateCalculator {
+ BigDecimal calculate(long value);
+ }
+
+ public RollingAverage bytesPerSecond(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return rxBytesPerSecond();
+ case TRANSMIT:
+ return txBytesPerSecond();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public RollingAverage packetsPerSecond(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return rxPacketsPerSecond();
+ case TRANSMIT:
+ return txPacketsPerSecond();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public RollingAverage rxBytesPerSecond() {
+ return this.rxBytesPerSecond;
+ }
+
+ public RollingAverage rxPacketsPerSecond() {
+ return this.rxPacketsPerSecond;
+ }
+
+ public RollingAverage txBytesPerSecond() {
+ return this.txBytesPerSecond;
+ }
+
+ public RollingAverage txPacketsPerSecond() {
+ return this.txPacketsPerSecond;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
new file mode 100644
index 0000000..6ec700b
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
@@ -0,0 +1,274 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.IntStream;
+
+/**
+ * Exposes information/statistics about a network interface.
+ */
+public final class NetworkInterfaceInfo {
+ public static final NetworkInterfaceInfo ZERO = new NetworkInterfaceInfo("", 0, 0, 0, 0, 0, 0);
+
+ private final String name;
+ private final long rxBytes;
+ private final long rxPackets;
+ private final long rxErrors;
+ private final long txBytes;
+ private final long txPackets;
+ private final long txErrors;
+
+ public NetworkInterfaceInfo(String name, long rxBytes, long rxPackets, long rxErrors, long txBytes, long txPackets, long txErrors) {
+ this.name = name;
+ this.rxBytes = rxBytes;
+ this.rxPackets = rxPackets;
+ this.rxErrors = rxErrors;
+ this.txBytes = txBytes;
+ this.txPackets = txPackets;
+ this.txErrors = txErrors;
+ }
+
+ /**
+ * Gets the name of the network interface.
+ *
+ * @return the interface name
+ */
+ public String getName() {
+ return this.name;
+ }
+
+ /**
+ * Gets the total number of bytes of data received by the interface.
+ *
+ * @return the total received bytes
+ */
+ public long getReceivedBytes() {
+ return this.rxBytes;
+ }
+
+ /**
+ * Gets the total number of packets of data received by the interface.
+ *
+ * @return the total received packets
+ */
+ public long getReceivedPackets() {
+ return this.rxPackets;
+ }
+
+ /**
+ * Gets the total number of receive errors detected by the device driver.
+ *
+ * @return the total receive errors
+ */
+ public long getReceiveErrors() {
+ return this.rxErrors;
+ }
+
+ /**
+ * Gets the total number of bytes of data transmitted by the interface.
+ *
+ * @return the total transmitted bytes
+ */
+ public long getTransmittedBytes() {
+ return this.txBytes;
+ }
+
+ /**
+ * Gets the total number of packets of data transmitted by the interface.
+ *
+ * @return the total transmitted packets
+ */
+ public long getTransmittedPackets() {
+ return this.txPackets;
+ }
+
+ /**
+ * Gets the total number of transmit errors detected by the device driver.
+ *
+ * @return the total transmit errors
+ */
+ public long getTransmitErrors() {
+ return this.txErrors;
+ }
+
+ public long getBytes(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return getReceivedBytes();
+ case TRANSMIT:
+ return getTransmittedBytes();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public long getPackets(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return getReceivedPackets();
+ case TRANSMIT:
+ return getTransmittedPackets();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public boolean isZero() {
+ return this.rxBytes == 0 && this.rxPackets == 0 && this.rxErrors == 0 &&
+ this.txBytes == 0 && this.txPackets == 0 && this.txErrors == 0;
+ }
+
+ public NetworkInterfaceInfo subtract(NetworkInterfaceInfo other) {
+ if (other == ZERO || other.isZero()) {
+ return this;
+ }
+
+ return new NetworkInterfaceInfo(
+ this.name,
+ this.rxBytes - other.rxBytes,
+ this.rxPackets - other.rxPackets,
+ this.rxErrors - other.rxErrors,
+ this.txBytes - other.txBytes,
+ this.txPackets - other.txPackets,
+ this.txErrors - other.txErrors
+ );
+ }
+
+ /**
+ * Calculate the difference between two readings in order to calculate the rate.
+ *
+ * @param current the polled values
+ * @param previous the previously polled values
+ * @return the difference
+ */
+ public static @NonNull Map difference(Map current, Map previous) {
+ if (previous == null || previous.isEmpty()) {
+ return current;
+ }
+
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+ for (NetworkInterfaceInfo netInf : current.values()) {
+ String name = netInf.getName();
+ builder.put(name, netInf.subtract(previous.getOrDefault(name, ZERO)));
+ }
+ return builder.build();
+ }
+
+ /**
+ * Queries the network interface statistics for the system.
+ *
+ * Returns an empty {@link Map} if no statistics could be gathered.
+ *
+ * @return the system net stats
+ */
+ public static @NonNull Map pollSystem() {
+ try {
+ List output = LinuxProc.NET_DEV.read();
+ return read(output);
+ } catch (Exception e) {
+ return Collections.emptyMap();
+ }
+ }
+
+ private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s+(\\w+):([\\d\\s]+)$");
+
+ private static @NonNull Map read(List output) {
+ // Inter-| Receive | Transmit
+ // face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
+ // lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0
+ // eth0: 1215645 2751 0 0 0 0 0 0 1782404 4324 0 0 0 427 0 0
+ // ppp0: 1622270 5552 1 0 0 0 0 0 354130 5669 0 0 0 0 0 0
+ // tap0: 7714 81 0 0 0 0 0 0 7714 81 0 0 0 0 0 0
+
+ if (output.size() < 3) {
+ // no data
+ return Collections.emptyMap();
+ }
+
+ String header = output.get(1);
+ String[] categories = header.split("\\|");
+ if (categories.length != 3) {
+ // unknown format
+ return Collections.emptyMap();
+ }
+
+ List rxFields = Arrays.asList(categories[1].trim().split("\\s+"));
+ List txFields = Arrays.asList(categories[2].trim().split("\\s+"));
+
+ int rxFieldsLength = rxFields.size();
+ int txFieldsLength = txFields.size();
+
+ int fieldRxBytes = rxFields.indexOf("bytes");
+ int fieldRxPackets = rxFields.indexOf("packets");
+ int fieldRxErrors = rxFields.indexOf("errs");
+
+ int fieldTxBytes = rxFieldsLength + txFields.indexOf("bytes");
+ int fieldTxPackets = rxFieldsLength + txFields.indexOf("packets");
+ int fieldTxErrors = rxFieldsLength + txFields.indexOf("errs");
+
+ int expectedFields = rxFieldsLength + txFieldsLength;
+
+ if (IntStream.of(fieldRxBytes, fieldRxPackets, fieldRxErrors, fieldTxBytes, fieldTxPackets, fieldTxErrors).anyMatch(i -> i == -1)) {
+ // missing required fields
+ return Collections.emptyMap();
+ }
+
+ ImmutableMap.Builder builder = ImmutableMap.builder();
+
+ for (String line : output.subList(2, output.size())) {
+ Matcher matcher = PROC_NET_DEV_PATTERN.matcher(line);
+ if (matcher.matches()) {
+ String interfaceName = matcher.group(1);
+ String[] stringValues = matcher.group(2).trim().split("\\s+");
+
+ if (stringValues.length != expectedFields) {
+ continue;
+ }
+
+ long[] values = Arrays.stream(stringValues).mapToLong(Long::parseLong).toArray();
+ builder.put(interfaceName, new NetworkInterfaceInfo(
+ interfaceName,
+ values[fieldRxBytes],
+ values[fieldRxPackets],
+ values[fieldRxErrors],
+ values[fieldTxBytes],
+ values[fieldTxPackets],
+ values[fieldTxErrors]
+ ));
+ }
+ }
+
+ return builder.build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
new file mode 100644
index 0000000..dadd4e5
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
@@ -0,0 +1,140 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck)
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import me.lucko.spark.common.monitor.MonitoringExecutor;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
+import java.util.regex.Pattern;
+
+/**
+ * Exposes and monitors the system/process network usage.
+ */
+public enum NetworkMonitor {
+ ;
+
+ // Latest readings
+ private static final AtomicReference