aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'spark-common/src/main')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java87
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java22
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java14
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java322
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java87
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java31
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java71
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java153
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java37
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java88
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java274
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java141
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java149
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java81
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java40
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java17
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java38
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java188
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java136
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java64
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java59
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java77
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java53
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java80
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java53
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java18
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java41
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java33
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java25
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/Compression.java100
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java84
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java17
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java49
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java189
-rw-r--r--spark-common/src/main/proto/spark/spark.proto192
-rw-r--r--spark-common/src/main/proto/spark/spark_heap.proto27
-rw-r--r--spark-common/src/main/proto/spark/spark_sampler.proto73
-rwxr-xr-xspark-common/src/main/resources/linux/libasyncProfiler.sobin398099 -> 0 bytes
-rwxr-xr-xspark-common/src/main/resources/macosx/libasyncProfiler.sobin599568 -> 0 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.sobin0 -> 328432 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.sobin0 -> 342239 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/macos/libasyncProfiler.sobin0 -> 688400 bytes
66 files changed, 2911 insertions, 610 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 57f8732..0ef4556 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -40,7 +40,11 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.PlatformStatisticsProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
@@ -63,7 +67,9 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
+import java.util.logging.Level;
import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
@@ -89,6 +95,7 @@ public class SparkPlatform {
private final String viewerUrl;
private final OkHttpClient httpClient;
private final BytebinClient bytebinClient;
+ private final boolean disableResponseBroadcast;
private final List<CommandModule> commandModules;
private final List<Command> commands;
private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
@@ -96,6 +103,8 @@ public class SparkPlatform {
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
+ private final PingStatistics pingStatistics;
+ private final PlatformStatisticsProvider statisticsProvider;
private Map<String, GarbageCollectorStatistics> startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
private final AtomicBoolean enabled = new AtomicBoolean(false);
@@ -111,6 +120,8 @@ public class SparkPlatform {
this.httpClient = new OkHttpClient();
this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin");
+ this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false);
+
this.commandModules = ImmutableList.of(
new SamplerModule(),
new HealthModule(),
@@ -131,7 +142,12 @@ public class SparkPlatform {
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
- this.tickStatistics = this.tickHook != null ? new TickStatistics() : null;
+ this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null;
+
+ PlayerPingProvider pingProvider = plugin.createPlayerPingProvider();
+ this.pingStatistics = pingProvider != null ? new PingStatistics(pingProvider) : null;
+
+ this.statisticsProvider = new PlatformStatisticsProvider(this);
}
public void enable() {
@@ -147,7 +163,11 @@ public class SparkPlatform {
this.tickReporter.addCallback(this.tickStatistics);
this.tickReporter.start();
}
+ if (this.pingStatistics != null) {
+ this.pingStatistics.start();
+ }
CpuMonitor.ensureMonitoring();
+ NetworkMonitor.ensureMonitoring();
// poll startup GC statistics after plugins & the world have loaded
this.plugin.executeAsync(() -> {
@@ -167,6 +187,9 @@ public class SparkPlatform {
if (this.tickReporter != null) {
this.tickReporter.close();
}
+ if (this.pingStatistics != null) {
+ this.pingStatistics.close();
+ }
for (CommandModule module : this.commandModules) {
module.close();
@@ -198,6 +221,10 @@ public class SparkPlatform {
return this.bytebinClient;
}
+ public boolean shouldBroadcastResponse() {
+ return !this.disableResponseBroadcast;
+ }
+
public List<Command> getCommands() {
return this.commands;
}
@@ -214,6 +241,10 @@ public class SparkPlatform {
return this.tickReporter;
}
+ public PlatformStatisticsProvider getStatisticsProvider() {
+ return this.statisticsProvider;
+ }
+
public ClassSourceLookup createClassSourceLookup() {
return this.plugin.createClassSourceLookup();
}
@@ -222,6 +253,10 @@ public class SparkPlatform {
return this.tickStatistics;
}
+ public PingStatistics getPingStatistics() {
+ return this.pingStatistics;
+ }
+
public Map<String, GarbageCollectorStatistics> getStartupGcStatistics() {
return this.startupGcStatistics;
}
@@ -255,12 +290,62 @@ public class SparkPlatform {
}
public void executeCommand(CommandSender sender, String[] args) {
+ AtomicReference<Thread> executorThread = new AtomicReference<>();
+ AtomicReference<Thread> timeoutThread = new AtomicReference<>();
+ AtomicBoolean completed = new AtomicBoolean(false);
+
+ // execute the command
this.plugin.executeAsync(() -> {
+ executorThread.set(Thread.currentThread());
this.commandExecuteLock.lock();
try {
executeCommand0(sender, args);
+ } catch (Exception e) {
+ this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command");
+ e.printStackTrace();
} finally {
this.commandExecuteLock.unlock();
+ executorThread.set(null);
+ completed.set(true);
+
+ Thread timeout = timeoutThread.get();
+ if (timeout != null) {
+ timeout.interrupt();
+ }
+ }
+ });
+
+ // schedule a task to detect timeouts
+ this.plugin.executeAsync(() -> {
+ timeoutThread.set(Thread.currentThread());
+ try {
+ for (int i = 1; i <= 3; i++) {
+ try {
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+
+ if (completed.get()) {
+ return;
+ }
+
+ Thread executor = executorThread.get();
+ if (executor == null) {
+ getPlugin().log(Level.WARNING, "A command execution has not completed after " +
+ (i * 5) + " seconds but there is no executor present. Perhaps the executor shutdown?");
+
+ } else {
+ String stackTrace = Arrays.stream(executor.getStackTrace())
+ .map(el -> " " + el.toString())
+ .collect(Collectors.joining("\n"));
+
+ getPlugin().log(Level.WARNING, "A command execution has not completed after " +
+ (i * 5) + " seconds, it might be stuck. Trace: \n" + stackTrace);
+ }
+ }
+ } finally {
+ timeoutThread.set(null);
}
});
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index f312916..b817df1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -22,7 +22,9 @@ package me.lucko.spark.common;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -121,6 +123,26 @@ public interface SparkPlugin {
}
/**
+ * Creates a player ping provider function.
+ *
+ * <p>Returns {@code null} if the platform does not support querying player pings</p>
+ *
+ * @return the player ping provider function
+ */
+ default PlayerPingProvider createPlayerPingProvider() {
+ return null;
+ }
+
+ /**
+ * Creates a server config provider.
+ *
+ * @return the server config provider function
+ */
+ default ServerConfigProvider createServerConfigProvider() {
+ return ServerConfigProvider.NO_OP;
+ }
+
+ /**
* Gets information for the platform.
*
* @return information about the platform
diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
index 8d289aa..fc14c67 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
@@ -36,10 +36,8 @@ public class GarbageCollectorInfo implements GarbageCollector {
this.name = name;
this.totalCollections = stats.getCollectionCount();
this.totalTime = stats.getCollectionTime();
-
- double totalTimeDouble = this.totalTime;
- this.averageTime = this.totalCollections == 0 ? 0 : totalTimeDouble / this.totalCollections;
- this.averageFrequency = this.totalCollections == 0 ? 0 : (long) ((serverUptime - totalTimeDouble) / this.totalCollections);
+ this.averageTime = stats.getAverageCollectionTime();
+ this.averageFrequency = stats.getAverageCollectionFrequency(serverUptime);
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
index a9e2229..d1481bd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -88,12 +88,20 @@ public class CommandResponseHandler {
}
public void broadcast(Component message) {
- allSenders(sender -> sender.sendMessage(message));
+ if (this.platform.shouldBroadcastResponse()) {
+ allSenders(sender -> sender.sendMessage(message));
+ } else {
+ reply(message);
+ }
}
public void broadcast(Iterable<Component> message) {
- Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message);
- allSenders(sender -> sender.sendMessage(joinedMsg));
+ if (this.platform.shouldBroadcastResponse()) {
+ Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message);
+ allSenders(sender -> sender.sendMessage(joinedMsg));
+ } else {
+ reply(message);
+ }
}
public void replyPrefixed(Component message) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index 51fa905..16eadc8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -20,8 +20,6 @@
package me.lucko.spark.common.command.modules;
-import com.google.common.base.Strings;
-
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
@@ -30,25 +28,29 @@ import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.net.Direction;
+import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.ping.PingSummary;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.common.util.StatisticFormatter;
import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.TextComponent;
-import net.kyori.adventure.text.format.TextColor;
-import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
-import java.nio.file.FileStore;
-import java.nio.file.Files;
-import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
import java.util.function.Consumer;
import static net.kyori.adventure.text.Component.empty;
@@ -60,13 +62,10 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
-import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class HealthModule implements CommandModule {
- private static final double MSPT_95_PERCENTILE = 0.95d;
-
@Override
public void registerCommands(Consumer<Command> consumer) {
consumer.accept(Command.builder()
@@ -77,10 +76,19 @@ public class HealthModule implements CommandModule {
);
consumer.accept(Command.builder()
+ .aliases("ping")
+ .argumentUsage("player", "username")
+ .executor(HealthModule::ping)
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--player"))
+ .build()
+ );
+
+ consumer.accept(Command.builder()
.aliases("healthreport", "health", "ht")
.argumentUsage("memory", null)
+ .argumentUsage("network", null)
.executor(HealthModule::healthReport)
- .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory"))
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory", "--network"))
.build()
);
}
@@ -91,11 +99,11 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("TPS from last 5s, 10s, 1m, 5m, 15m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatTps(tickStatistics.tps5Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps10Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps1Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps5Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
);
resp.replyPrefixed(empty());
@@ -104,8 +112,8 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("Tick durations (min/med/95%ile/max ms) from last 10s, 1m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
- .append(formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
);
resp.replyPrefixed(empty());
@@ -115,22 +123,67 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("CPU usage from last 10s, 1m, 15m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.append(text(" (system)", DARK_GRAY))
.build()
);
resp.replyPrefixed(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.append(text(" (process)", DARK_GRAY))
.build()
);
}
+ private static void ping(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
+ PingStatistics pingStatistics = platform.getPingStatistics();
+ if (pingStatistics == null) {
+ resp.replyPrefixed(text("Ping data is not available on this platform."));
+ return;
+ }
+
+ // lookup for specific player
+ Set<String> players = arguments.stringFlag("player");
+ if (!players.isEmpty()) {
+ for (String player : players) {
+ PingStatistics.PlayerPing playerPing = pingStatistics.query(player);
+ if (playerPing == null) {
+ resp.replyPrefixed(text("Ping data is not available for '" + player + "'."));
+ } else {
+ resp.replyPrefixed(text()
+ .content("Player ")
+ .append(text(playerPing.name(), WHITE))
+ .append(text(" has "))
+ .append(StatisticFormatter.formatPingRtt(playerPing.ping()))
+ .append(text(" ms ping."))
+ .build()
+ );
+ }
+ }
+ return;
+ }
+
+ PingSummary summary = pingStatistics.currentSummary();
+ RollingAverage average = pingStatistics.getPingAverage();
+
+ if (summary.total() == 0 && average.getSamples() == 0) {
+ resp.replyPrefixed(text("There is not enough data to show ping averages yet. Please try again later."));
+ return;
+ }
+
+ resp.replyPrefixed(text("Average Pings (min/med/95%ile/max ms) from now, last 15m:"));
+ resp.replyPrefixed(text()
+ .content(" ")
+ .append(StatisticFormatter.formatPingRtts(summary.min(), summary.median(), summary.percentile95th(), summary.max())).append(text("; "))
+ .append(StatisticFormatter.formatPingRtts(average.min(), average.median(), average.percentile95th(), average.max()))
+ .build()
+ );
+ }
+
private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
resp.replyPrefixed(text("Generating server health report..."));
List<Component> report = new LinkedList<>();
@@ -150,11 +203,9 @@ public class HealthModule implements CommandModule {
addDetailedMemoryStats(report, memoryMXBean);
}
- try {
- addDiskStats(report);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ addNetworkStats(report, arguments.boolFlag("network"));
+
+ addDiskStats(report);
resp.reply(report);
}
@@ -168,11 +219,11 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatTps(tickStatistics.tps5Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps10Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps1Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps5Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
);
report.add(empty());
@@ -186,8 +237,8 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
- .append(formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
);
report.add(empty());
@@ -203,17 +254,17 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.append(text(" (system)", DARK_GRAY))
.build()
);
report.add(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.append(text(" (process)", DARK_GRAY))
.build()
);
@@ -241,7 +292,7 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateMemoryUsageDiagram(heapUsage, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateMemoryUsageDiagram(heapUsage, 60)).build());
report.add(empty());
}
@@ -292,7 +343,7 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateMemoryPoolDiagram(usage, collectionUsage, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateMemoryPoolDiagram(usage, collectionUsage, 60)).build());
if (collectionUsage != null) {
report.add(text()
@@ -309,10 +360,56 @@ public class HealthModule implements CommandModule {
}
}
- private static void addDiskStats(List<Component> report) throws IOException {
- FileStore fileStore = Files.getFileStore(Paths.get("."));
- long totalSpace = fileStore.getTotalSpace();
- long usedSpace = totalSpace - fileStore.getUsableSpace();
+ private static void addNetworkStats(List<Component> report, boolean detailed) {
+ List<Component> averagesReport = new LinkedList<>();
+
+ for (Map.Entry<String, NetworkInterfaceAverages> ent : NetworkMonitor.systemAverages().entrySet()) {
+ String interfaceName = ent.getKey();
+ NetworkInterfaceAverages averages = ent.getValue();
+
+ for (Direction direction : Direction.values()) {
+ long bytesPerSec = (long) averages.bytesPerSecond(direction).mean();
+ long packetsPerSec = (long) averages.packetsPerSecond(direction).mean();
+
+ if (detailed || bytesPerSec > 0 || packetsPerSec > 0) {
+ averagesReport.add(text()
+ .color(GRAY)
+ .content(" ")
+ .append(FormatUtil.formatBytes(bytesPerSec, GREEN, "/s"))
+ .append(text(" / "))
+ .append(text(String.format(Locale.ENGLISH, "%,d", packetsPerSec), WHITE))
+ .append(text(" pps "))
+ .append(text().color(DARK_GRAY)
+ .append(text('('))
+ .append(text(interfaceName + " " + direction.abbrev(), WHITE))
+ .append(text(')'))
+ )
+ .build()
+ );
+ }
+ }
+ }
+
+ if (!averagesReport.isEmpty()) {
+ report.add(text()
+ .append(text(">", DARK_GRAY, BOLD))
+ .append(space())
+ .append(text("Network usage: (system, last 15m)", GOLD))
+ .build()
+ );
+ report.addAll(averagesReport);
+ report.add(empty());
+ }
+ }
+
+ private static void addDiskStats(List<Component> report) {
+ long total = DiskUsage.getTotal();
+ long used = DiskUsage.getUsed();
+
+ if (total == 0 || used == 0) {
+ return;
+ }
+
report.add(text()
.append(text(">", DARK_GRAY, BOLD))
.append(space())
@@ -321,138 +418,19 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(text(FormatUtil.formatBytes(usedSpace), WHITE))
+ .append(text(FormatUtil.formatBytes(used), WHITE))
.append(space())
.append(text("/", GRAY))
.append(space())
- .append(text(FormatUtil.formatBytes(totalSpace), WHITE))
+ .append(text(FormatUtil.formatBytes(total), WHITE))
.append(text(" "))
.append(text("(", GRAY))
- .append(text(FormatUtil.percent(usedSpace, totalSpace), GREEN))
+ .append(text(FormatUtil.percent(used, total), GREEN))
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateDiskUsageDiagram(usedSpace, totalSpace, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateDiskUsageDiagram(used, total, 60)).build());
report.add(empty());
}
- public static TextComponent formatTps(double tps) {
- TextColor color;
- if (tps > 18.0) {
- color = GREEN;
- } else if (tps > 16.0) {
- color = YELLOW;
- } else {
- color = RED;
- }
-
- return text((tps > 20.0 ? "*" : "") + Math.min(Math.round(tps * 100.0) / 100.0, 20.0), color);
- }
-
- public static TextComponent formatTickDurations(RollingAverage average) {
- return text()
- .append(formatTickDuration(average.min()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.median()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.percentile(MSPT_95_PERCENTILE)))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.max()))
- .build();
- }
-
- public static TextComponent formatTickDuration(double duration) {
- TextColor color;
- if (duration >= 50d) {
- color = RED;
- } else if (duration >= 40d) {
- color = YELLOW;
- } else {
- color = GREEN;
- }
-
- return text(String.format("%.1f", duration), color);
- }
-
- public static TextComponent formatCpuUsage(double usage) {
- TextColor color;
- if (usage > 0.9) {
- color = RED;
- } else if (usage > 0.65) {
- color = YELLOW;
- } else {
- color = GREEN;
- }
-
- return text(FormatUtil.percent(usage, 1d), color);
- }
-
- private static TextComponent generateMemoryUsageDiagram(MemoryUsage usage, int length) {
- double used = usage.getUsed();
- double committed = usage.getCommitted();
- double max = usage.getMax();
-
- int usedChars = (int) ((used * length) / max);
- int committedChars = (int) ((committed * length) / max);
-
- TextComponent.Builder line = text().content(Strings.repeat("/", usedChars)).color(GRAY);
- if (committedChars > usedChars) {
- line.append(text(Strings.repeat(" ", (committedChars - usedChars) - 1)));
- line.append(text("|", YELLOW));
- }
- if (length > committedChars) {
- line.append(text(Strings.repeat(" ", (length - committedChars))));
- }
-
- return text()
- .append(text("[", DARK_GRAY))
- .append(line.build())
- .append(text("]", DARK_GRAY))
- .build();
- }
-
- private static TextComponent generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
- double used = usage.getUsed();
- double collectionUsed = used;
- if (collectionUsage != null) {
- collectionUsed = collectionUsage.getUsed();
- }
- double committed = usage.getCommitted();
- double max = usage.getMax();
-
- int usedChars = (int) ((used * length) / max);
- int collectionUsedChars = (int) ((collectionUsed * length) / max);
- int committedChars = (int) ((committed * length) / max);
-
- TextComponent.Builder line = text().content(Strings.repeat("/", collectionUsedChars)).color(GRAY);
-
- if (usedChars > collectionUsedChars) {
- line.append(text("|", RED));
- line.append(text(Strings.repeat("/", (usedChars - collectionUsedChars) - 1), GRAY));
- }
- if (committedChars > usedChars) {
- line.append(text(Strings.repeat(" ", (committedChars - usedChars) - 1)));
- line.append(text("|", YELLOW));
- }
- if (length > committedChars) {
- line.append(text(Strings.repeat(" ", (length - committedChars))));
- }
-
- return text()
- .append(text("[", DARK_GRAY))
- .append(line.build())
- .append(text("]", DARK_GRAY))
- .build();
- }
-
- private static TextComponent generateDiskUsageDiagram(double used, double max, int length) {
- int usedChars = (int) ((used * length) / max);
- String line = Strings.repeat("/", usedChars) + Strings.repeat(" ", length - usedChars);
- return text()
- .append(text("[", DARK_GRAY))
- .append(text(line, GRAY))
- .append(text("]", DARK_GRAY))
- .build();
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 70f6c3c..1030f35 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -30,20 +30,15 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.heapdump.HeapDump;
import me.lucko.spark.common.heapdump.HeapDumpSummary;
+import me.lucko.spark.common.util.Compression;
import me.lucko.spark.common.util.FormatUtil;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkHeapProtos;
import net.kyori.adventure.text.event.ClickEvent;
-import org.tukaani.xz.LZMA2Options;
-import org.tukaani.xz.LZMAOutputStream;
-import org.tukaani.xz.XZOutputStream;
-
import okhttp3.MediaType;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
@@ -51,7 +46,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.LongConsumer;
-import java.util.zip.GZIPOutputStream;
import static net.kyori.adventure.text.Component.text;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
@@ -98,7 +92,7 @@ public class HeapAnalysisModule implements CommandModule {
return;
}
- SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender);
+ SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender);
boolean saveToFile = false;
if (arguments.boolFlag("save-to-file")) {
@@ -117,7 +111,7 @@ public class HeapAnalysisModule implements CommandModule {
);
platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
- } catch (IOException e) {
+ } catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED));
e.printStackTrace();
saveToFile = true;
@@ -175,11 +169,11 @@ public class HeapAnalysisModule implements CommandModule {
platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
- CompressionMethod compressionMethod = null;
+ Compression compressionMethod = null;
Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
if (compressArgs.hasNext()) {
try {
- compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
+ compressionMethod = Compression.valueOf(compressArgs.next().toUpperCase());
} catch (IllegalArgumentException e) {
// ignore
}
@@ -194,7 +188,7 @@ public class HeapAnalysisModule implements CommandModule {
}
}
- private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException {
+ private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, Compression method) throws IOException {
resp.broadcastPrefixed(text("Compressing heap dump, please wait..."));
long size = Files.size(file);
@@ -244,71 +238,4 @@ public class HeapAnalysisModule implements CommandModule {
);
}
- public enum CompressionMethod {
- GZIP {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- XZ {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- LZMA {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- };
-
- public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
-
- private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
- byte[] buf = new byte[1024 * 64];
- long total = 0;
- long iterations = 0;
- while (true) {
- int r = from.read(buf);
- if (r == -1) {
- break;
- }
- to.write(buf, 0, r);
- total += r;
-
- // report progress every 5MB
- if (iterations++ % ((1024 / 64) * 5) == 0) {
- progress.accept(total);
- }
- }
- return total;
- }
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 2dd07c9..970d062 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -40,7 +40,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.MethodDisambiguator;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
import net.kyori.adventure.text.event.ClickEvent;
@@ -305,7 +305,7 @@ public class SamplerModule implements CommandModule {
}
private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
- SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
+ SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
boolean saveToFile = false;
if (saveToFileFlag) {
@@ -324,7 +324,7 @@ public class SamplerModule implements CommandModule {
);
platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
- } catch (IOException e) {
+ } catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED));
e.printStackTrace();
saveToFile = true;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
index f1a6d10..9975df5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
@@ -1,26 +1,21 @@
/*
- * This file is part of LuckPerms, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.common.command.tabcomplete;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
index d2b2622..9707f55 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
@@ -1,26 +1,21 @@
/*
- * This file is part of LuckPerms, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.common.command.tabcomplete;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index 34fd6c4..c0980e7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -20,11 +20,11 @@
package me.lucko.spark.common.heapdump;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.proto.SparkProtos;
-import me.lucko.spark.proto.SparkProtos.HeapData;
-import me.lucko.spark.proto.SparkProtos.HeapEntry;
+import me.lucko.spark.proto.SparkHeapProtos.HeapData;
+import me.lucko.spark.proto.SparkHeapProtos.HeapEntry;
+import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata;
import org.objectweb.asm.Type;
@@ -125,13 +125,24 @@ public final class HeapDumpSummary {
this.entries = entries;
}
- public HeapData toProto(PlatformInfo platformInfo, CommandSender creator) {
+ public HeapData toProto(SparkPlatform platform, CommandSender creator) {
+ HeapMetadata.Builder metadata = HeapMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setCreator(creator.toData().toProto());
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
HeapData.Builder proto = HeapData.newBuilder();
- proto.setMetadata(SparkProtos.HeapMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
- .build()
- );
+ proto.setMetadata(metadata);
for (Entry entry : this.entries) {
proto.addEntries(entry.toProto());
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
new file mode 100644
index 0000000..635ae20
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+
+public enum MonitoringExecutor {
+ ;
+
+ /** The executor used to monitor & calculate rolling averages. */
+ public static final ScheduledExecutorService INSTANCE = Executors.newSingleThreadScheduledExecutor(r -> {
+ Thread thread = Executors.defaultThreadFactory().newThread(r);
+ thread.setName("spark-monitor");
+ thread.setDaemon(true);
+ return thread;
+ });
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
new file mode 100644
index 0000000..9bbe0f8
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -0,0 +1,51 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.cpu;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import java.util.regex.Pattern;
+
+/**
+ * Small utility to query the CPU model on Linux systems.
+ */
+public enum CpuInfo {
+ ;
+
+ private static final Pattern SPACE_COLON_SPACE_PATTERN = Pattern.compile("\\s+:\\s");
+
+ /**
+ * Queries the CPU model.
+ *
+ * @return the cpu model
+ */
+ public static String queryCpuModel() {
+ for (String line : LinuxProc.CPUINFO.read()) {
+ String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line);
+
+ if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) {
+ return splitLine[1];
+ }
+ }
+ return "";
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
index 43e1f90..b4ab831 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
@@ -20,12 +20,11 @@
package me.lucko.spark.common.monitor.cpu;
+import me.lucko.spark.common.monitor.MonitoringExecutor;
import me.lucko.spark.common.util.RollingAverage;
import java.lang.management.ManagementFactory;
import java.math.BigDecimal;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.management.JMX;
@@ -42,13 +41,6 @@ public enum CpuMonitor {
private static final String OPERATING_SYSTEM_BEAN = "java.lang:type=OperatingSystem";
/** The OperatingSystemMXBean instance */
private static final OperatingSystemMXBean BEAN;
- /** The executor used to monitor & calculate rolling averages. */
- private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-cpu-monitor");
- thread.setDaemon(true);
- return thread;
- });
// Rolling averages for system/process data
private static final RollingAverage SYSTEM_AVERAGE_10_SEC = new RollingAverage(10);
@@ -68,7 +60,7 @@ public enum CpuMonitor {
}
// schedule rolling average calculations.
- EXECUTOR.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, 1, TimeUnit.SECONDS);
+ MonitoringExecutor.INSTANCE.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, 1, TimeUnit.SECONDS);
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
new file mode 100644
index 0000000..7a4ada4
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
@@ -0,0 +1,71 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.disk;
+
+import java.io.IOException;
+import java.nio.file.FileStore;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/**
+ * Exposes the system disk usage.
+ */
+public enum DiskUsage {
+ ;
+
+ private static final FileStore FILE_STORE;
+
+ static {
+ FileStore fileStore = null;
+ try {
+ fileStore = Files.getFileStore(Paths.get("."));
+ } catch (IOException e) {
+ // ignore
+ }
+ FILE_STORE = fileStore;
+ }
+
+ public static long getUsed() {
+ if (FILE_STORE == null) {
+ return 0;
+ }
+
+ try {
+ long total = FILE_STORE.getTotalSpace();
+ return total - FILE_STORE.getUsableSpace();
+ } catch (IOException e) {
+ return 0;
+ }
+ }
+
+ public static long getTotal() {
+ if (FILE_STORE == null) {
+ return 0;
+ }
+
+ try {
+ return FILE_STORE.getTotalSpace();
+ } catch (IOException e) {
+ return 0;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
index c831ea1..cfd12a1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
@@ -74,6 +74,8 @@ public class GarbageCollectorStatistics {
this(bean.getCollectionCount(), bean.getCollectionTime());
}
+ // all times in milliseconds
+
public long getCollectionCount() {
return this.collectionCount;
}
@@ -82,6 +84,14 @@ public class GarbageCollectorStatistics {
return this.collectionTime;
}
+ public double getAverageCollectionTime() {
+ return this.collectionCount == 0 ? 0 : (double) this.collectionTime / this.collectionCount;
+ }
+
+ public long getAverageCollectionFrequency(long serverUptime) {
+ return this.collectionCount == 0 ? 0 : (long) ((serverUptime - (double) this.collectionTime) / this.collectionCount);
+ }
+
public GarbageCollectorStatistics subtract(GarbageCollectorStatistics other) {
if (other == ZERO || (other.collectionCount == 0 && other.collectionTime == 0)) {
return this;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
new file mode 100644
index 0000000..226f75b
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -0,0 +1,153 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.memory;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import java.lang.management.ManagementFactory;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+
+/**
+ * Utility to query information about system memory usage.
+ */
+public enum MemoryInfo {
+ ;
+
+ /** The object name of the com.sun.management.OperatingSystemMXBean */
+ private static final String OPERATING_SYSTEM_BEAN = "java.lang:type=OperatingSystem";
+ /** The OperatingSystemMXBean instance */
+ private static final OperatingSystemMXBean BEAN;
+
+ /** The format used by entries in /proc/meminfo */
+ private static final Pattern PROC_MEMINFO_VALUE = Pattern.compile("^(\\w+):\\s*(\\d+) kB$");
+
+ static {
+ try {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(OPERATING_SYSTEM_BEAN);
+ BEAN = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, OperatingSystemMXBean.class);
+ } catch (Exception e) {
+ throw new UnsupportedOperationException("OperatingSystemMXBean is not supported by the system", e);
+ }
+ }
+
+ /* Swap */
+
+ public static long getUsedSwap() {
+ return BEAN.getTotalSwapSpaceSize() - BEAN.getFreeSwapSpaceSize();
+ }
+
+ public static long getTotalSwap() {
+ return BEAN.getTotalSwapSpaceSize();
+ }
+
+ /* Physical Memory */
+
+ public static long getUsedPhysicalMemory() {
+ return getTotalPhysicalMemory() - getAvailablePhysicalMemory();
+ }
+
+ public static long getTotalPhysicalMemory() {
+ // try to read from /proc/meminfo on linux systems
+ for (String line : LinuxProc.MEMINFO.read()) {
+ Matcher matcher = PROC_MEMINFO_VALUE.matcher(line);
+ if (matcher.matches()) {
+ String label = matcher.group(1);
+ long value = Long.parseLong(matcher.group(2)) * 1024; // kB -> B
+
+ if (label.equals("MemTotal")) {
+ return value;
+ }
+ }
+ }
+
+ // fallback to JVM measure
+ return BEAN.getTotalPhysicalMemorySize();
+ }
+
+ public static long getAvailablePhysicalMemory() {
+ boolean present = false;
+ long free = 0, buffers = 0, cached = 0, sReclaimable = 0;
+
+ for (String line : LinuxProc.MEMINFO.read()) {
+ Matcher matcher = PROC_MEMINFO_VALUE.matcher(line);
+ if (matcher.matches()) {
+ present = true;
+
+ String label = matcher.group(1);
+ long value = Long.parseLong(matcher.group(2)) * 1024; // kB -> B
+
+ // if MemAvailable is set, just return that
+ if (label.equals("MemAvailable")) {
+ return value;
+ }
+
+ // otherwise, record MemFree, Buffers, Cached and SReclaimable
+ switch (label) {
+ case "MemFree":
+ free = value;
+ break;
+ case "Buffers":
+ buffers = value;
+ break;
+ case "Cached":
+ cached = value;
+ break;
+ case "SReclaimable":
+ sReclaimable = value;
+ break;
+ }
+ }
+ }
+
+ // estimate how much is "available" - not exact but this is probably good enough.
+ // most Linux systems (assuming they have been updated in the last ~8 years) will
+ // have MemAvailable set, and we return that instead if present
+ //
+ // useful ref: https://www.linuxatemyram.com/
+ if (present) {
+ return free + buffers + cached + sReclaimable;
+ }
+
+ // fallback to what the JVM understands as "free"
+ // on non-linux systems, this is probably good enough to estimate what's available
+ return BEAN.getFreePhysicalMemorySize();
+ }
+
+ /* Virtual Memory */
+
+ public static long getTotalVirtualMemory() {
+ return BEAN.getCommittedVirtualMemorySize();
+ }
+
+ public interface OperatingSystemMXBean {
+ long getCommittedVirtualMemorySize();
+ long getTotalSwapSpaceSize();
+ long getFreeSwapSpaceSize();
+ long getFreePhysicalMemorySize();
+ long getTotalPhysicalMemorySize();
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
new file mode 100644
index 0000000..d4d11ff
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
@@ -0,0 +1,37 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+public enum Direction {
+
+ RECEIVE("rx"),
+ TRANSMIT("tx");
+
+ private final String abbreviation;
+
+ Direction(String abbreviation) {
+ this.abbreviation = abbreviation;
+ }
+
+ public String abbrev() {
+ return this.abbreviation;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
new file mode 100644
index 0000000..0ce0639
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
@@ -0,0 +1,88 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import me.lucko.spark.common.util.RollingAverage;
+
+import java.math.BigDecimal;
+
+public final class NetworkInterfaceAverages {
+ private final RollingAverage rxBytesPerSecond;
+ private final RollingAverage txBytesPerSecond;
+ private final RollingAverage rxPacketsPerSecond;
+ private final RollingAverage txPacketsPerSecond;
+
+ NetworkInterfaceAverages(int windowSize) {
+ this.rxBytesPerSecond = new RollingAverage(windowSize);
+ this.txBytesPerSecond = new RollingAverage(windowSize);
+ this.rxPacketsPerSecond = new RollingAverage(windowSize);
+ this.txPacketsPerSecond = new RollingAverage(windowSize);
+ }
+
+ void accept(NetworkInterfaceInfo info, RateCalculator rateCalculator) {
+ this.rxBytesPerSecond.add(rateCalculator.calculate(info.getReceivedBytes()));
+ this.txBytesPerSecond.add(rateCalculator.calculate(info.getTransmittedBytes()));
+ this.rxPacketsPerSecond.add(rateCalculator.calculate(info.getReceivedPackets()));
+ this.txPacketsPerSecond.add(rateCalculator.calculate(info.getTransmittedPackets()));
+ }
+
+ interface RateCalculator {
+ BigDecimal calculate(long value);
+ }
+
+ public RollingAverage bytesPerSecond(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return rxBytesPerSecond();
+ case TRANSMIT:
+ return txBytesPerSecond();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public RollingAverage packetsPerSecond(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return rxPacketsPerSecond();
+ case TRANSMIT:
+ return txPacketsPerSecond();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public RollingAverage rxBytesPerSecond() {
+ return this.rxBytesPerSecond;
+ }
+
+ public RollingAverage rxPacketsPerSecond() {
+ return this.rxPacketsPerSecond;
+ }
+
+ public RollingAverage txBytesPerSecond() {
+ return this.txBytesPerSecond;
+ }
+
+ public RollingAverage txPacketsPerSecond() {
+ return this.txPacketsPerSecond;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
new file mode 100644
index 0000000..bd9e187
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
@@ -0,0 +1,274 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.IntStream;
+
+/**
+ * Exposes information/statistics about a network interface.
+ */
+public final class NetworkInterfaceInfo {
+ public static final NetworkInterfaceInfo ZERO = new NetworkInterfaceInfo("", 0, 0, 0, 0, 0, 0);
+
+ private final String name;
+ private final long rxBytes;
+ private final long rxPackets;
+ private final long rxErrors;
+ private final long txBytes;
+ private final long txPackets;
+ private final long txErrors;
+
+ public NetworkInterfaceInfo(String name, long rxBytes, long rxPackets, long rxErrors, long txBytes, long txPackets, long txErrors) {
+ this.name = name;
+ this.rxBytes = rxBytes;
+ this.rxPackets = rxPackets;
+ this.rxErrors = rxErrors;
+ this.txBytes = txBytes;
+ this.txPackets = txPackets;
+ this.txErrors = txErrors;
+ }
+
+ /**
+ * Gets the name of the network interface.
+ *
+ * @return the interface name
+ */
+ public String getName() {
+ return this.name;
+ }
+
+ /**
+ * Gets the total number of bytes of data received by the interface.
+ *
+ * @return the total received bytes
+ */
+ public long getReceivedBytes() {
+ return this.rxBytes;
+ }
+
+ /**
+ * Gets the total number of packets of data received by the interface.
+ *
+ * @return the total received packets
+ */
+ public long getReceivedPackets() {
+ return this.rxPackets;
+ }
+
+ /**
+ * Gets the total number of receive errors detected by the device driver.
+ *
+ * @return the total receive errors
+ */
+ public long getReceiveErrors() {
+ return this.rxErrors;
+ }
+
+ /**
+ * Gets the total number of bytes of data transmitted by the interface.
+ *
+ * @return the total transmitted bytes
+ */
+ public long getTransmittedBytes() {
+ return this.txBytes;
+ }
+
+ /**
+ * Gets the total number of packets of data transmitted by the interface.
+ *
+ * @return the total transmitted packets
+ */
+ public long getTransmittedPackets() {
+ return this.txPackets;
+ }
+
+ /**
+ * Gets the total number of transmit errors detected by the device driver.
+ *
+ * @return the total transmit errors
+ */
+ public long getTransmitErrors() {
+ return this.txErrors;
+ }
+
+ public long getBytes(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return getReceivedBytes();
+ case TRANSMIT:
+ return getTransmittedBytes();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public long getPackets(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return getReceivedPackets();
+ case TRANSMIT:
+ return getTransmittedPackets();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public boolean isZero() {
+ return this.rxBytes == 0 && this.rxPackets == 0 && this.rxErrors == 0 &&
+ this.txBytes == 0 && this.txPackets == 0 && this.txErrors == 0;
+ }
+
+ public NetworkInterfaceInfo subtract(NetworkInterfaceInfo other) {
+ if (other == ZERO || other.isZero()) {
+ return this;
+ }
+
+ return new NetworkInterfaceInfo(
+ this.name,
+ this.rxBytes - other.rxBytes,
+ this.rxPackets - other.rxPackets,
+ this.rxErrors - other.rxErrors,
+ this.txBytes - other.txBytes,
+ this.txPackets - other.txPackets,
+ this.txErrors - other.txErrors
+ );
+ }
+
+ /**
+ * Calculate the difference between two readings in order to calculate the rate.
+ *
+ * @param current the polled values
+ * @param previous the previously polled values
+ * @return the difference
+ */
+ public static @NonNull Map<String, NetworkInterfaceInfo> difference(Map<String, NetworkInterfaceInfo> current, Map<String, NetworkInterfaceInfo> previous) {
+ if (previous == null || previous.isEmpty()) {
+ return current;
+ }
+
+ ImmutableMap.Builder<String, NetworkInterfaceInfo> builder = ImmutableMap.builder();
+ for (NetworkInterfaceInfo netInf : current.values()) {
+ String name = netInf.getName();
+ builder.put(name, netInf.subtract(previous.getOrDefault(name, ZERO)));
+ }
+ return builder.build();
+ }
+
+ /**
+ * Queries the network interface statistics for the system.
+ *
+ * <p>Returns an empty {@link Map} if no statistics could be gathered.</p>
+ *
+ * @return the system net stats
+ */
+ public static @NonNull Map<String, NetworkInterfaceInfo> pollSystem() {
+ try {
+ List<String> output = LinuxProc.NET_DEV.read();
+ return read(output);
+ } catch (Exception e) {
+ return Collections.emptyMap();
+ }
+ }
+
+ private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$");
+
+ private static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) {
+ // Inter-| Receive | Transmit
+ // face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
+ // lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0
+ // eth0: 1215645 2751 0 0 0 0 0 0 1782404 4324 0 0 0 427 0 0
+ // ppp0: 1622270 5552 1 0 0 0 0 0 354130 5669 0 0 0 0 0 0
+ // tap0: 7714 81 0 0 0 0 0 0 7714 81 0 0 0 0 0 0
+
+ if (output.size() < 3) {
+ // no data
+ return Collections.emptyMap();
+ }
+
+ String header = output.get(1);
+ String[] categories = header.split("\\|");
+ if (categories.length != 3) {
+ // unknown format
+ return Collections.emptyMap();
+ }
+
+ List<String> rxFields = Arrays.asList(categories[1].trim().split("\\s+"));
+ List<String> txFields = Arrays.asList(categories[2].trim().split("\\s+"));
+
+ int rxFieldsLength = rxFields.size();
+ int txFieldsLength = txFields.size();
+
+ int fieldRxBytes = rxFields.indexOf("bytes");
+ int fieldRxPackets = rxFields.indexOf("packets");
+ int fieldRxErrors = rxFields.indexOf("errs");
+
+ int fieldTxBytes = rxFieldsLength + txFields.indexOf("bytes");
+ int fieldTxPackets = rxFieldsLength + txFields.indexOf("packets");
+ int fieldTxErrors = rxFieldsLength + txFields.indexOf("errs");
+
+ int expectedFields = rxFieldsLength + txFieldsLength;
+
+ if (IntStream.of(fieldRxBytes, fieldRxPackets, fieldRxErrors, fieldTxBytes, fieldTxPackets, fieldTxErrors).anyMatch(i -> i == -1)) {
+ // missing required fields
+ return Collections.emptyMap();
+ }
+
+ ImmutableMap.Builder<String, NetworkInterfaceInfo> builder = ImmutableMap.builder();
+
+ for (String line : output.subList(2, output.size())) {
+ Matcher matcher = PROC_NET_DEV_PATTERN.matcher(line);
+ if (matcher.matches()) {
+ String interfaceName = matcher.group(1);
+ String[] stringValues = matcher.group(2).trim().split("\\s+");
+
+ if (stringValues.length != expectedFields) {
+ continue;
+ }
+
+ long[] values = Arrays.stream(stringValues).mapToLong(Long::parseLong).toArray();
+ builder.put(interfaceName, new NetworkInterfaceInfo(
+ interfaceName,
+ values[fieldRxBytes],
+ values[fieldRxPackets],
+ values[fieldRxErrors],
+ values[fieldTxBytes],
+ values[fieldTxPackets],
+ values[fieldTxErrors]
+ ));
+ }
+ }
+
+ return builder.build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
new file mode 100644
index 0000000..79ab8d9
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
@@ -0,0 +1,141 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import me.lucko.spark.common.monitor.MonitoringExecutor;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
+import java.util.regex.Pattern;
+
+/**
+ * Exposes and monitors the system/process network usage.
+ */
+public enum NetworkMonitor {
+ ;
+
+ // Latest readings
+ private static final AtomicReference<Map<String, NetworkInterfaceInfo>> SYSTEM = new AtomicReference<>();
+
+ // a pattern to match the interface names to exclude from monitoring
+ // ignore: virtual eth adapters + container bridge networks
+ private static final Pattern INTERFACES_TO_IGNORE = Pattern.compile("^(veth\\w+)|(br-\\w+)$");
+
+ // Rolling averages for system/process data over 15 mins
+ private static final Map<String, NetworkInterfaceAverages> SYSTEM_AVERAGES = new ConcurrentHashMap<>();
+
+ // poll every minute, keep rolling averages for 15 mins
+ private static final int POLL_INTERVAL = 60;
+ private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15);
+ private static final int WINDOW_SIZE = WINDOW_SIZE_SECONDS / POLL_INTERVAL; // 15
+
+ static {
+ // schedule rolling average calculations.
+ MonitoringExecutor.INSTANCE.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, POLL_INTERVAL, TimeUnit.SECONDS);
+ }
+
+ /**
+ * Ensures that the static initializer has been called.
+ */
+ @SuppressWarnings("EmptyMethod")
+ public static void ensureMonitoring() {
+ // intentionally empty
+ }
+
+ public static Map<String, NetworkInterfaceInfo> systemTotals() {
+ Map<String, NetworkInterfaceInfo> values = SYSTEM.get();
+ return values == null ? Collections.emptyMap() : values;
+ }
+
+ public static Map<String, NetworkInterfaceAverages> systemAverages() {
+ return Collections.unmodifiableMap(SYSTEM_AVERAGES);
+ }
+
+ /**
+ * Task to poll network activity and add to the rolling averages in the enclosing class.
+ */
+ private static final class RollingAverageCollectionTask implements Runnable {
+ private static final BigDecimal POLL_INTERVAL_DECIMAL = BigDecimal.valueOf(POLL_INTERVAL);
+
+ @Override
+ public void run() {
+ Map<String, NetworkInterfaceInfo> values = pollAndDiff(NetworkInterfaceInfo::pollSystem, SYSTEM);
+ if (values != null) {
+ submit(SYSTEM_AVERAGES, values);
+ }
+ }
+
+ /**
+ * Submits the incoming values into the rolling averages map.
+ *
+ * @param values the values
+ */
+ private static void submit(Map<String, NetworkInterfaceAverages> rollingAveragesMap, Map<String, NetworkInterfaceInfo> values) {
+ // ensure all incoming keys are present in the rolling averages map
+ for (String key : values.keySet()) {
+ if (!INTERFACES_TO_IGNORE.matcher(key).matches()) {
+ rollingAveragesMap.computeIfAbsent(key, k -> new NetworkInterfaceAverages(WINDOW_SIZE));
+ }
+ }
+
+ // submit a value (0 if unknown) to each rolling average instance in the map
+ for (Map.Entry<String, NetworkInterfaceAverages> entry : rollingAveragesMap.entrySet()) {
+ String interfaceName = entry.getKey();
+ NetworkInterfaceAverages rollingAvgs = entry.getValue();
+
+ NetworkInterfaceInfo info = values.getOrDefault(interfaceName, NetworkInterfaceInfo.ZERO);
+ rollingAvgs.accept(info, RollingAverageCollectionTask::calculateRate);
+ }
+ }
+
+ private static BigDecimal calculateRate(long value) {
+ return BigDecimal.valueOf(value).divide(POLL_INTERVAL_DECIMAL, RoundingMode.HALF_UP);
+ }
+
+ private static Map<String, NetworkInterfaceInfo> pollAndDiff(Supplier<Map<String, NetworkInterfaceInfo>> poller, AtomicReference<Map<String, NetworkInterfaceInfo>> valueReference) {
+ // poll the latest value from the supplier
+ Map<String, NetworkInterfaceInfo> latest = poller.get();
+
+ // update the value ref.
+ // if the previous value was null, and the new value is empty, keep it null
+ Map<String, NetworkInterfaceInfo> previous = valueReference.getAndUpdate(prev -> {
+ if (prev == null && latest.isEmpty()) {
+ return null;
+ } else {
+ return latest;
+ }
+ });
+
+ if (previous == null) {
+ return null;
+ }
+
+ return NetworkInterfaceInfo.difference(latest, previous);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
new file mode 100644
index 0000000..49fcbe1
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
@@ -0,0 +1,149 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import me.lucko.spark.common.monitor.MonitoringExecutor;
+import me.lucko.spark.common.util.RollingAverage;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.math.BigDecimal;
+import java.util.Map;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Provides statistics for player ping RTT to the server.
+ */
+public final class PingStatistics implements Runnable, AutoCloseable {
+ private static final int QUERY_RATE_SECONDS = 10;
+ private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15); // 900
+ private static final int WINDOW_SIZE = WINDOW_SIZE_SECONDS / QUERY_RATE_SECONDS; // 90
+
+ /** The platform function that provides player ping times */
+ private final PlayerPingProvider provider;
+ /** Rolling average of the median ping across all players */
+ private final RollingAverage rollingAverage = new RollingAverage(WINDOW_SIZE);
+
+ /** The scheduler task that polls pings and calculates the rolling average */
+ private ScheduledFuture<?> future;
+
+ public PingStatistics(PlayerPingProvider provider) {
+ this.provider = provider;
+ }
+
+ /**
+ * Starts the statistics monitor
+ */
+ public void start() {
+ if (this.future != null) {
+ throw new IllegalStateException();
+ }
+ this.future = MonitoringExecutor.INSTANCE.scheduleAtFixedRate(this, QUERY_RATE_SECONDS, QUERY_RATE_SECONDS, TimeUnit.SECONDS);
+ }
+
+ @Override
+ public void close() {
+ if (this.future != null) {
+ this.future.cancel(false);
+ this.future = null;
+ }
+ }
+
+ @Override
+ public void run() {
+ PingSummary summary = currentSummary();
+ if (summary.total() == 0) {
+ return;
+ }
+
+ this.rollingAverage.add(BigDecimal.valueOf(summary.median()));
+ }
+
+ /**
+ * Gets the ping rolling average.
+ *
+ * @return the rolling average
+ */
+ public RollingAverage getPingAverage() {
+ return this.rollingAverage;
+ }
+
+ /**
+ * Queries a summary of current player pings.
+ *
+ * @return a summary of current pings
+ */
+ public PingSummary currentSummary() {
+ Map<String, Integer> results = this.provider.poll();
+ int[] values = results.values().stream().filter(ping -> ping > 0).mapToInt(i -> i).toArray();
+ return values.length == 0
+ ? new PingSummary(new int[]{0})
+ : new PingSummary(values);
+ }
+
+ /**
+ * Queries the ping of a given player.
+ *
+ * @param playerName the name of the player
+ * @return the ping, if available
+ */
+ public @Nullable PlayerPing query(String playerName) {
+ Map<String, Integer> results = this.provider.poll();
+
+ // try exact match
+ Integer result = results.get(playerName);
+ if (result != null) {
+ return new PlayerPing(playerName, result);
+ }
+
+ // try case-insensitive match
+ for (Map.Entry<String, Integer> entry : results.entrySet()) {
+ if (entry.getKey().equalsIgnoreCase(playerName)) {
+ return new PlayerPing(
+ entry.getKey(),
+ entry.getValue()
+ );
+ }
+ }
+
+ return null;
+ }
+
+ public static final class PlayerPing {
+ private final String name;
+ private final int ping;
+
+ PlayerPing(String name, int ping) {
+ this.name = name;
+ this.ping = ping;
+ }
+
+ public String name() {
+ return this.name;
+ }
+
+ public int ping() {
+ return this.ping;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
new file mode 100644
index 0000000..024d27d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
@@ -0,0 +1,81 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import java.util.Arrays;
+
+public final class PingSummary {
+
+ private final int[] values;
+ private final int total;
+ private final int max;
+ private final int min;
+ private final double mean;
+
+ public PingSummary(int[] values) {
+ Arrays.sort(values);
+ this.values = values;
+
+ int total = 0;
+ for (int value : values) {
+ total += value;
+ }
+ this.total = total;
+
+ this.mean = (double) total / values.length;
+ this.max = values[values.length - 1];
+ this.min = values[0];
+ }
+
+ public int total() {
+ return this.total;
+ }
+
+ public double mean() {
+ return this.mean;
+ }
+
+ public int max() {
+ return this.max;
+ }
+
+ public int min() {
+ return this.min;
+ }
+
+ public int percentile(double percentile) {
+ if (percentile < 0 || percentile > 1) {
+ throw new IllegalArgumentException("Invalid percentile " + percentile);
+ }
+
+ int rank = (int) Math.ceil(percentile * (this.values.length - 1));
+ return this.values[rank];
+ }
+
+ public double median() {
+ return percentile(0.50d);
+ }
+
+ public double percentile95th() {
+ return percentile(0.95d);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
new file mode 100644
index 0000000..7576573
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
@@ -0,0 +1,40 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import java.util.Map;
+
+/**
+ * Provides information about player ping RTT.
+ */
+@FunctionalInterface
+public interface PlayerPingProvider {
+
+ /**
+ * Poll current player pings in milliseconds.
+ *
+ * <p>The map keys are player names and the values are the ping values.</p>
+ *
+ * @return a map of player pings
+ */
+ Map<String, Integer> poll();
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
index 31b58e9..bd2b834 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
@@ -56,7 +56,8 @@ public class TickStatistics implements TickHook.Callback, TickReporter.Callback
private boolean durationSupported = false;
private final RollingAverage tickDuration10Sec = new RollingAverage(TPS * 10);
private final RollingAverage tickDuration1Min = new RollingAverage(TPS * 60);
- private final RollingAverage[] tickDurationAverages = {this.tickDuration10Sec, this.tickDuration1Min};
+ private final RollingAverage tickDuration5Min = new RollingAverage(TPS * 60 * 5);
+ private final RollingAverage[] tickDurationAverages = {this.tickDuration10Sec, this.tickDuration1Min, this.tickDuration5Min};
private long last = 0;
@@ -131,6 +132,13 @@ public class TickStatistics implements TickHook.Callback, TickReporter.Callback
return this.tickDuration1Min;
}
+ public RollingAverage duration5Min() {
+ if (!this.durationSupported) {
+ return null;
+ }
+ return this.tickDuration5Min;
+ }
+
/**
* Rolling average calculator.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
deleted file mode 100644
index 645d5b2..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package me.lucko.spark.common.platform;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryUsage;
-
-public abstract class AbstractPlatformInfo implements PlatformInfo {
-
- @Override
- public int getNCpus() {
- return Runtime.getRuntime().availableProcessors();
- }
-
- @Override
- public MemoryUsage getHeapUsage() {
- return ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
- }
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
index 80fb85f..082389d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
@@ -20,13 +20,12 @@
package me.lucko.spark.common.platform;
-import me.lucko.spark.proto.SparkProtos;
import me.lucko.spark.proto.SparkProtos.PlatformMetadata;
-import java.lang.management.MemoryUsage;
-
public interface PlatformInfo {
+ int DATA_VERSION = 2;
+
Type getType();
String getName();
@@ -35,18 +34,13 @@ public interface PlatformInfo {
String getMinecraftVersion();
- int getNCpus();
-
- MemoryUsage getHeapUsage();
-
default int getSparkVersion() {
// does not necessarily correspond to the plugin/mod version
- // this is like a data version I suppose
- return 1;
+ return DATA_VERSION;
}
default Data toData() {
- return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getNCpus(), getHeapUsage(), getSparkVersion());
+ return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getSparkVersion());
}
enum Type {
@@ -70,17 +64,13 @@ public interface PlatformInfo {
private final String name;
private final String version;
private final String minecraftVersion;
- private final int nCpus;
- private final MemoryUsage heapUsage;
private final int sparkVersion;
- public Data(Type type, String name, String version, String minecraftVersion, int nCpus, MemoryUsage heapUsage, int sparkVersion) {
+ public Data(Type type, String name, String version, String minecraftVersion, int sparkVersion) {
this.type = type;
this.name = name;
this.version = version;
this.minecraftVersion = minecraftVersion;
- this.nCpus = nCpus;
- this.heapUsage = heapUsage;
this.sparkVersion = sparkVersion;
}
@@ -100,33 +90,15 @@ public interface PlatformInfo {
return this.minecraftVersion;
}
- public int getNCpus() {
- return this.nCpus;
- }
-
- public MemoryUsage getHeapUsage() {
- return this.heapUsage;
- }
-
public int getSparkVersion() {
return this.sparkVersion;
}
- public SparkProtos.MemoryUsage getHeapUsageProto() {
- return SparkProtos.MemoryUsage.newBuilder()
- .setUsed(this.heapUsage.getUsed())
- .setCommitted(this.heapUsage.getCommitted())
- .setMax(this.heapUsage.getMax())
- .build();
- }
-
public PlatformMetadata toProto() {
PlatformMetadata.Builder proto = PlatformMetadata.newBuilder()
.setType(this.type.toProto())
.setName(this.name)
.setVersion(this.version)
- .setNCpus(this.nCpus)
- .setHeapUsage(getHeapUsageProto())
.setSparkVersion(this.sparkVersion);
if (this.minecraftVersion != null) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
new file mode 100644
index 0000000..f35bbbe
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -0,0 +1,188 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuInfo;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.memory.MemoryInfo;
+import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
+import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryUsage;
+import java.lang.management.RuntimeMXBean;
+import java.util.Map;
+
+public class PlatformStatisticsProvider {
+ private final SparkPlatform platform;
+
+ public PlatformStatisticsProvider(SparkPlatform platform) {
+ this.platform = platform;
+ }
+
+ public SystemStatistics getSystemStatistics() {
+ RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
+
+ SystemStatistics.Builder builder = SystemStatistics.newBuilder()
+ .setCpu(SystemStatistics.Cpu.newBuilder()
+ .setThreads(Runtime.getRuntime().availableProcessors())
+ .setProcessUsage(SystemStatistics.Cpu.Usage.newBuilder()
+ .setLast1M(CpuMonitor.processLoad1MinAvg())
+ .setLast15M(CpuMonitor.processLoad15MinAvg())
+ .build()
+ )
+ .setSystemUsage(SystemStatistics.Cpu.Usage.newBuilder()
+ .setLast1M(CpuMonitor.systemLoad1MinAvg())
+ .setLast15M(CpuMonitor.systemLoad15MinAvg())
+ .build()
+ )
+ .setModelName(CpuInfo.queryCpuModel())
+ .build()
+ )
+ .setMemory(SystemStatistics.Memory.newBuilder()
+ .setPhysical(SystemStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(MemoryInfo.getUsedPhysicalMemory())
+ .setTotal(MemoryInfo.getTotalPhysicalMemory())
+ .build()
+ )
+ .setSwap(SystemStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(MemoryInfo.getUsedSwap())
+ .setTotal(MemoryInfo.getTotalSwap())
+ .build()
+ )
+ .build()
+ )
+ .setDisk(SystemStatistics.Disk.newBuilder()
+ .setTotal(DiskUsage.getTotal())
+ .setUsed(DiskUsage.getUsed())
+ .build()
+ )
+ .setOs(SystemStatistics.Os.newBuilder()
+ .setArch(System.getProperty("os.arch"))
+ .setName(System.getProperty("os.name"))
+ .setVersion(System.getProperty("os.version"))
+ .build()
+ )
+ .setJava(SystemStatistics.Java.newBuilder()
+ .setVendor(System.getProperty("java.vendor", "unknown"))
+ .setVersion(System.getProperty("java.version", "unknown"))
+ .setVendorVersion(System.getProperty("java.vendor.version", "unknown"))
+ .setVmArgs(String.join(" ", runtimeBean.getInputArguments()))
+ .build()
+ );
+
+ long uptime = runtimeBean.getUptime();
+ builder.setUptime(uptime);
+
+ Map<String, GarbageCollectorStatistics> gcStats = GarbageCollectorStatistics.pollStats();
+ gcStats.forEach((name, statistics) -> builder.putGc(
+ name,
+ SystemStatistics.Gc.newBuilder()
+ .setTotal(statistics.getCollectionCount())
+ .setAvgTime(statistics.getAverageCollectionTime())
+ .setAvgFrequency(statistics.getAverageCollectionFrequency(uptime))
+ .build()
+ ));
+
+ Map<String, NetworkInterfaceAverages> networkInterfaceStats = NetworkMonitor.systemAverages();
+ networkInterfaceStats.forEach((name, statistics) -> builder.putNet(
+ name,
+ SystemStatistics.NetInterface.newBuilder()
+ .setRxBytesPerSecond(statistics.rxBytesPerSecond().toProto())
+ .setRxPacketsPerSecond(statistics.rxPacketsPerSecond().toProto())
+ .setTxBytesPerSecond(statistics.txBytesPerSecond().toProto())
+ .setTxPacketsPerSecond(statistics.txPacketsPerSecond().toProto())
+ .build()
+ ));
+
+ return builder.build();
+ }
+
+ public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStatistics> startingGcStatistics) {
+ PlatformStatistics.Builder builder = PlatformStatistics.newBuilder();
+
+ MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+ builder.setMemory(PlatformStatistics.Memory.newBuilder()
+ .setHeap(PlatformStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(memoryUsage.getUsed())
+ .setTotal(memoryUsage.getCommitted())
+ .build()
+ )
+ .build()
+ );
+
+ long uptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime();
+ builder.setUptime(uptime);
+
+ if (startingGcStatistics != null) {
+ Map<String, GarbageCollectorStatistics> gcStats = GarbageCollectorStatistics.pollStatsSubtractInitial(startingGcStatistics);
+ gcStats.forEach((name, statistics) -> builder.putGc(
+ name,
+ PlatformStatistics.Gc.newBuilder()
+ .setTotal(statistics.getCollectionCount())
+ .setAvgTime(statistics.getAverageCollectionTime())
+ .setAvgFrequency(statistics.getAverageCollectionFrequency(uptime))
+ .build()
+ ));
+ }
+
+ TickStatistics tickStatistics = this.platform.getTickStatistics();
+ if (tickStatistics != null) {
+ builder.setTps(PlatformStatistics.Tps.newBuilder()
+ .setLast1M(tickStatistics.tps1Min())
+ .setLast5M(tickStatistics.tps5Min())
+ .setLast15M(tickStatistics.tps15Min())
+ .build()
+ );
+ if (tickStatistics.isDurationSupported()) {
+ builder.setMspt(PlatformStatistics.Mspt.newBuilder()
+ .setLast1M(tickStatistics.duration1Min().toProto())
+ .setLast5M(tickStatistics.duration5Min().toProto())
+ .build()
+ );
+ }
+ }
+
+ PingStatistics pingStatistics = this.platform.getPingStatistics();
+ if (pingStatistics != null && pingStatistics.getPingAverage().getSamples() != 0) {
+ builder.setPing(PlatformStatistics.Ping.newBuilder()
+ .setLast15M(pingStatistics.getPingAverage().toProto())
+ .build()
+ );
+ }
+
+ PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType();
+ if (platformType != PlatformInfo.Type.CLIENT) {
+ long playerCount = this.platform.getPlugin().getCommandSenders().count() - 1; // includes console
+ builder.setPlayerCount(playerCount);
+ }
+
+ return builder.build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
new file mode 100644
index 0000000..ead2131
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -0,0 +1,136 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Abstract implementation of {@link ServerConfigProvider}.
+ *
+ * <p>This implementation is able to delete hidden paths from
+ * the configurations before they are sent to the viewer.</p>
+ *
+ * @param <T> the file type
+ */
+public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
+ private final Map<String, T> files;
+ private final Collection<String> hiddenPaths;
+
+ protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
+ this.files = files;
+ this.hiddenPaths = hiddenPaths;
+ }
+
+ @Override
+ public final Map<String, JsonElement> loadServerConfigurations() {
+ ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
+
+ this.files.forEach((path, type) -> {
+ try {
+ JsonElement json = load(path, type);
+ if (json != null) {
+ delete(json, this.hiddenPaths);
+ builder.put(path, json);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ });
+
+ return builder.build();
+ }
+
+ /**
+ * Loads a file from the system.
+ *
+ * @param path the name of the file to load
+ * @param type the type of the file
+ * @return the loaded file
+ * @throws IOException if an error occurs performing i/o
+ */
+ protected abstract JsonElement load(String path, T type) throws IOException;
+
+ /**
+ * Deletes the given paths from the json element.
+ *
+ * @param json the json element
+ * @param paths the paths to delete
+ */
+ private static void delete(JsonElement json, Collection<String> paths) {
+ for (String path : paths) {
+ Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
+ delete(json, pathDeque);
+ }
+ }
+
+ private static void delete(JsonElement json, Deque<String> path) {
+ if (path.isEmpty()) {
+ return;
+ }
+ if (!json.isJsonObject()) {
+ return;
+ }
+
+ JsonObject jsonObject = json.getAsJsonObject();
+ String expected = path.removeFirst().replace("<dot>", ".");
+
+ Collection<String> keys;
+ if (expected.equals("*")) {
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .collect(Collectors.toList());
+ } else if (expected.endsWith("*")) {
+ String pattern = expected.substring(0, expected.length() - 1);
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .filter(key -> key.startsWith(pattern))
+ .collect(Collectors.toList());
+ } else if (jsonObject.has(expected)) {
+ keys = Collections.singletonList(expected);
+ } else {
+ keys = Collections.emptyList();
+ }
+
+ for (String key : keys) {
+ if (path.isEmpty()) {
+ jsonObject.remove(key);
+ } else {
+ Deque<String> pathCopy = keys.size() > 1
+ ? new LinkedList<>(path)
+ : path;
+
+ delete(jsonObject.get(key), pathCopy);
+ }
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
new file mode 100644
index 0000000..8fc89d7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
@@ -0,0 +1,64 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import java.io.FilterReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * A {@link Reader} that can parse a .properties file.
+ */
+public class PropertiesFileReader extends FilterReader {
+
+ public PropertiesFileReader(Reader in) {
+ super(in);
+ }
+
+ public Map<String, Object> readProperties() throws IOException {
+ Properties properties = new Properties();
+ properties.load(this);
+
+ Map<String, Object> values = new HashMap<>();
+ properties.forEach((k, v) -> {
+ String key = k.toString();
+ String value = v.toString();
+
+ if ("true".equals(value) || "false".equals(value)) {
+ values.put(key, Boolean.parseBoolean(value));
+ } else if (value.matches("\\d+")) {
+ try {
+ values.put(key, Long.parseLong(value));
+ } catch (NumberFormatException e) {
+ values.put(key, value);
+ }
+ } else {
+ values.put(key, value);
+ }
+ });
+
+ return values;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
new file mode 100644
index 0000000..1fc2391
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -0,0 +1,59 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Function to export server configuration files for access within the spark viewer.
+ */
+@FunctionalInterface
+public interface ServerConfigProvider {
+
+ /**
+ * Loads a map of the server configuration files.
+ *
+ * <p>The key is the name of the file and the value is a
+ * {@link JsonElement} of the contents.</p>
+ *
+ * @return the exported server configurations
+ */
+ Map<String, JsonElement> loadServerConfigurations();
+
+ default Map<String, String> exportServerConfigurations() {
+ return loadServerConfigurations().entrySet()
+ .stream()
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ e -> e.getValue().toString()
+ ));
+ }
+
+ /**
+ * A no-op implementation
+ */
+ ServerConfigProvider NO_OP = Collections::emptyMap;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index bae93b1..ce466a0 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -20,6 +20,20 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.node.MergeMode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
@@ -42,6 +56,9 @@ public abstract class AbstractSampler implements Sampler {
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture<Sampler> future = new CompletableFuture<>();
+ /** The garbage collector statistics when profiling started */
+ protected Map<String, GarbageCollectorStatistics> initialGcStats;
+
protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
this.interval = interval;
this.threadDumper = threadDumper;
@@ -65,4 +82,64 @@ public abstract class AbstractSampler implements Sampler {
public CompletableFuture<Sampler> getFuture() {
return this.future;
}
+
+ protected void recordInitialGcStats() {
+ this.initialGcStats = GarbageCollectorStatistics.pollStats();
+ }
+
+ protected Map<String, GarbageCollectorStatistics> getInitialGcStats() {
+ return this.initialGcStats;
+ }
+
+ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
+ SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setCreator(creator.toData().toProto())
+ .setStartTime(this.startTime)
+ .setEndTime(System.currentTimeMillis())
+ .setInterval(this.interval)
+ .setThreadDumper(this.threadDumper.getMetadata())
+ .setDataAggregator(dataAggregator.getMetadata());
+
+ if (comment != null) {
+ metadata.setComment(comment);
+ }
+
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider();
+ metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ proto.setMetadata(metadata);
+ }
+
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator<ThreadNode> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ List<ThreadNode> data = dataAggregator.exportData();
+ data.sort(outputOrder);
+
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
+
+ for (ThreadNode entry : data) {
+ proto.addThreads(entry.toProto(mergeMode));
+ classSourceVisitor.visit(entry);
+ }
+
+ if (classSourceVisitor.hasMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getMapping());
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index b71aaee..845043f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -20,15 +20,14 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.proto.SparkProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.util.Comparator;
-import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
@@ -68,6 +67,6 @@ public interface Sampler {
CompletableFuture<Sampler> getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
+ SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index e99114a..9d54f50 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
@@ -22,7 +21,7 @@
package me.lucko.spark.common.sampler;
import me.lucko.spark.common.util.ThreadFinder;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index e63ebc8..9ad84df 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -20,9 +20,11 @@
package me.lucko.spark.common.sampler;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+import java.util.Collections;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -42,6 +44,11 @@ public interface ThreadGrouper {
}
@Override
+ public String getLabel(String group) {
+ return group;
+ }
+
+ @Override
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME;
}
@@ -55,14 +62,18 @@ public interface ThreadGrouper {
* separated from the pool name with any of one or more of ' ', '-', or '#'.</p>
*/
ThreadGrouper BY_POOL = new ThreadGrouper() {
+ private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+
+ // thread id -> group
private final Map<Long, String> cache = new ConcurrentHashMap<>();
- private final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+ // group -> thread ids
+ private final Map<String, Set<Long>> seen = new ConcurrentHashMap<>();
@Override
public String getGroup(long threadId, String threadName) {
- String group = this.cache.get(threadId);
- if (group != null) {
- return group;
+ String cached = this.cache.get(threadId);
+ if (cached != null) {
+ return cached;
}
Matcher matcher = this.pattern.matcher(threadName);
@@ -70,12 +81,22 @@ public interface ThreadGrouper {
return threadName;
}
- group = matcher.group(1).trim() + " (Combined)";
- this.cache.put(threadId, group); // we don't care about race conditions here
+ String group = matcher.group(1).trim();
+ this.cache.put(threadId, group);
+ this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId);
return group;
}
@Override
+ public String getLabel(String group) {
+ int count = this.seen.getOrDefault(group, Collections.emptySet()).size();
+ if (count == 0) {
+ return group;
+ }
+ return group + " (x" + count + ")";
+ }
+
+ @Override
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL;
}
@@ -86,9 +107,17 @@ public interface ThreadGrouper {
* the name "All".
*/
ThreadGrouper AS_ONE = new ThreadGrouper() {
+ private final Set<Long> seen = ConcurrentHashMap.newKeySet();
+
@Override
public String getGroup(long threadId, String threadName) {
- return "All";
+ this.seen.add(threadId);
+ return "root";
+ }
+
+ @Override
+ public String getLabel(String group) {
+ return "All (x" + this.seen.size() + ")";
}
@Override
@@ -106,6 +135,14 @@ public interface ThreadGrouper {
*/
String getGroup(long threadId, String threadName);
+ /**
+ * Gets the label to use for a given group.
+ *
+ * @param group the group
+ * @return the label
+ */
+ String getLabel(String group);
+
SamplerMetadata.DataAggregator.ThreadGrouper asProto();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
index 4fa8ff4..adcedcd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
@@ -23,20 +23,19 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Comparator;
-import java.util.Map;
/**
* Methods of ordering {@link ThreadNode}s in the output data.
*/
-public enum ThreadNodeOrder implements Comparator<Map.Entry<String, ThreadNode>> {
+public enum ThreadNodeOrder implements Comparator<ThreadNode> {
/**
* Order by the name of the thread (alphabetically)
*/
BY_NAME {
@Override
- public int compare(Map.Entry<String, ThreadNode> o1, Map.Entry<String, ThreadNode> o2) {
- return o1.getKey().compareTo(o2.getKey());
+ public int compare(ThreadNode o1, ThreadNode o2) {
+ return o1.getThreadLabel().compareTo(o2.getThreadLabel());
}
},
@@ -45,8 +44,8 @@ public enum ThreadNodeOrder implements Comparator<Map.Entry<String, ThreadNode>>
*/
BY_TIME {
@Override
- public int compare(Map.Entry<String, ThreadNode> o1, Map.Entry<String, ThreadNode> o2) {
- return -Double.compare(o1.getValue().getTotalTime(), o2.getValue().getTotalTime());
+ public int compare(ThreadNode o1, ThreadNode o2) {
+ return -Double.compare(o1.getTotalTime(), o2.getTotalTime());
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
index 7640d60..ad9dee4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
@@ -23,6 +23,8 @@ package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -50,7 +52,11 @@ public abstract class AbstractDataAggregator implements DataAggregator {
}
@Override
- public Map<String, ThreadNode> getData() {
- return this.threadData;
+ public List<ThreadNode> exportData() {
+ List<ThreadNode> data = new ArrayList<>(this.threadData.values());
+ for (ThreadNode node : data) {
+ node.setThreadLabel(this.threadGrouper.getLabel(node.getThreadGroup()));
+ }
+ return data;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 8b90639..5590a96 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -21,9 +21,9 @@
package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
-import java.util.Map;
+import java.util.List;
/**
* Aggregates sampling data.
@@ -35,7 +35,7 @@ public interface DataAggregator {
*
* @return the output data
*/
- Map<String, ThreadNode> getData();
+ List<ThreadNode> exportData();
/**
* Gets metadata about the data aggregator instance.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
index 594d56e..3de3943 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
@@ -24,7 +24,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator;
import me.lucko.spark.common.sampler.node.StackTraceNode;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
/**
* Data aggregator for {@link AsyncSampler}.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index f1d7209..d642a53 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -20,13 +20,14 @@
package me.lucko.spark.common.sampler.async;
-import com.google.common.collect.ImmutableSetMultimap;
-import com.google.common.collect.Multimap;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Table;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.util.TemporaryFiles;
import one.profiler.AsyncProfiler;
+import one.profiler.Events;
import java.io.InputStream;
import java.net.URL;
@@ -45,22 +46,31 @@ public enum AsyncProfilerAccess {
/** An instance of the async-profiler Java API. */
private final AsyncProfiler profiler;
+ /** The event to use for profiling */
+ private final ProfilingEvent profilingEvent;
+
/** If profiler is null, contains the reason why setup failed */
private final Exception setupException;
AsyncProfilerAccess() {
AsyncProfiler profiler;
+ ProfilingEvent profilingEvent = null;
Exception setupException = null;
try {
profiler = load();
- ensureCpuEventSupported(profiler);
+ if (isEventSupported(profiler, ProfilingEvent.CPU, false)) {
+ profilingEvent = ProfilingEvent.CPU;
+ } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) {
+ profilingEvent = ProfilingEvent.WALL;
+ }
} catch (Exception e) {
profiler = null;
setupException = e;
}
this.profiler = profiler;
+ this.profilingEvent = profilingEvent;
this.setupException = setupException;
}
@@ -71,11 +81,18 @@ public enum AsyncProfilerAccess {
return this.profiler;
}
+ public ProfilingEvent getProfilingEvent() {
+ return this.profilingEvent;
+ }
+
public boolean checkSupported(SparkPlatform platform) {
if (this.setupException != null) {
if (this.setupException instanceof UnsupportedSystemException) {
platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" +
this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
+ } else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) {
+ platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed.");
+ platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc");
} else {
platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage());
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler");
@@ -91,18 +108,20 @@ public enum AsyncProfilerAccess {
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
- Multimap<String, String> supported = ImmutableSetMultimap.<String, String>builder()
- .put("linux", "amd64")
- .put("macosx", "amd64")
- .put("macosx", "aarch64")
+ Table<String, String, String> supported = ImmutableTable.<String, String, String>builder()
+ .put("linux", "amd64", "linux/amd64")
+ .put("linux", "aarch64", "linux/aarch64")
+ .put("macosx", "amd64", "macos")
+ .put("macosx", "aarch64", "macos")
.build();
- if (!supported.containsEntry(os, arch)) {
+ String libPath = supported.get(os, arch);
+ if (libPath == null) {
throw new UnsupportedSystemException(os, arch);
}
// extract the profiler binary from the spark jar file
- String resource = os + "/libasyncProfiler.so";
+ String resource = "spark/" + libPath + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
@@ -118,7 +137,7 @@ public enum AsyncProfilerAccess {
try {
return AsyncProfiler.getInstance(extractPath.toAbsolutePath().toString());
} catch (UnsatisfiedLinkError e) {
- throw new RuntimeException("A runtime error occurred whilst loading the native library", e);
+ throw new NativeLoadingException(e);
}
}
@@ -126,12 +145,37 @@ public enum AsyncProfilerAccess {
* Checks the {@code profiler} to ensure the CPU event is supported.
*
* @param profiler the profiler instance
- * @throws Exception if the event is not supported
+ * @return if the event is supported
*/
- private static void ensureCpuEventSupported(AsyncProfiler profiler) throws Exception {
- String resp = profiler.execute("check,event=cpu").trim();
- if (!resp.equalsIgnoreCase("ok")) {
- throw new UnsupportedOperationException("CPU event is not supported");
+ private static boolean isEventSupported(AsyncProfiler profiler, ProfilingEvent event, boolean throwException) {
+ try {
+ String resp = profiler.execute("check,event=" + event).trim();
+ if (resp.equalsIgnoreCase("ok")) {
+ return true;
+ } else if (throwException) {
+ throw new IllegalArgumentException(resp);
+ }
+ } catch (Exception e) {
+ if (throwException) {
+ throw new RuntimeException("Event " + event + " is not supported", e);
+ }
+ }
+ return false;
+ }
+
+ enum ProfilingEvent {
+ CPU(Events.CPU),
+ WALL(Events.WALL);
+
+ private final String id;
+
+ ProfilingEvent(String id) {
+ this.id = id;
+ }
+
+ @Override
+ public String toString() {
+ return this.id;
}
}
@@ -140,4 +184,10 @@ public enum AsyncProfilerAccess {
super(os + '/' + arch);
}
}
+
+ private static final class NativeLoadingException extends RuntimeException {
+ public NativeLoadingException(Throwable cause) {
+ super("A runtime error occurred whilst loading the native library", cause);
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 1837cbc..5cb7fdc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -22,8 +22,8 @@ package me.lucko.spark.common.sampler.async;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.AbstractSampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
@@ -32,7 +32,7 @@ import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.TemporaryFiles;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import one.profiler.AsyncProfiler;
@@ -40,10 +40,8 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
@@ -100,7 +98,7 @@ public class AsyncSampler extends AbstractSampler {
throw new RuntimeException("Unable to create temporary output file", e);
}
- String command = "start,event=cpu,interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
+ String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
if (this.threadDumper instanceof ThreadDumper.Specific) {
command += ",filter";
}
@@ -117,6 +115,7 @@ public class AsyncSampler extends AbstractSampler {
}
}
+ recordInitialGcStats();
scheduleTimeout();
}
@@ -145,7 +144,14 @@ public class AsyncSampler extends AbstractSampler {
*/
@Override
public void stop() {
- this.profiler.stop();
+ try {
+ this.profiler.stop();
+ } catch (IllegalStateException e) {
+ if (!e.getMessage().equals("Profiler is not active")) { // ignore
+ throw e;
+ }
+ }
+
if (this.timeoutExecutor != null) {
this.timeoutExecutor.shutdown();
@@ -154,38 +160,11 @@ public class AsyncSampler extends AbstractSampler {
}
@Override
- public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
- SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ SamplerData.Builder proto = SamplerData.newBuilder();
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
aggregateOutput();
-
- List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
index a705f2d..e0cc4e9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
@@ -59,6 +59,7 @@ public class JfrReader implements Closeable {
public final Dictionary<AsyncStackTraceElement> stackFrames = new Dictionary<>(); // spark
public final Map<Integer, String> frameTypes = new HashMap<>();
public final Map<Integer, String> threadStates = new HashMap<>();
+ public final Map<String, String> settings = new HashMap<>();
private int executionSample;
private int nativeMethodSample;
@@ -67,6 +68,8 @@ public class JfrReader implements Closeable {
private int allocationSample;
private int monitorEnter;
private int threadPark;
+ private int activeSetting;
+ private boolean activeSettingHasStack;
public JfrReader(Path path) throws IOException { // spark - Path instead of String
this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String
@@ -129,6 +132,8 @@ public class JfrReader implements Closeable {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false);
} else if (type == threadPark) {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true);
+ } else if (type == activeSetting) {
+ readActiveSetting();
}
if ((pos += size) <= buf.limit()) {
@@ -170,6 +175,17 @@ public class JfrReader implements Closeable {
return new ContendedLock(time, tid, stackTraceId, duration, classId);
}
+ private void readActiveSetting() {
+ long time = getVarlong();
+ long duration = getVarlong();
+ int tid = getVarint();
+ if (activeSettingHasStack) getVarint();
+ long id = getVarlong();
+ String name = getString();
+ String value = getString();
+ settings.put(name, value);
+ }
+
private boolean readChunk(int pos) throws IOException {
if (pos + CHUNK_HEADER_SIZE > buf.limit() || buf.getInt(pos) != CHUNK_SIGNATURE) {
throw new IOException("Not a valid JFR file");
@@ -424,6 +440,8 @@ public class JfrReader implements Closeable {
allocationSample = getTypeId("jdk.ObjectAllocationSample");
monitorEnter = getTypeId("jdk.JavaMonitorEnter");
threadPark = getTypeId("jdk.ThreadPark");
+ activeSetting = getTypeId("jdk.ActiveSetting");
+ activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null;
}
private int getTypeId(String typeName) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
index 54d9e1c..cc530d6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
@@ -27,7 +27,7 @@ import me.lucko.spark.common.sampler.node.StackTraceNode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import java.lang.management.ThreadInfo;
-import java.util.Map;
+import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -86,7 +86,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
}
@Override
- public Map<String, ThreadNode> getData() {
+ public List<ThreadNode> exportData() {
// wait for all pending data to be inserted
this.workerPool.shutdown();
try {
@@ -95,7 +95,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
e.printStackTrace();
}
- return super.getData();
+ return super.exportData();
}
private static boolean isSleeping(ThreadInfo thread) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 02d5f01..cfa0a0f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
@@ -23,8 +22,8 @@ package me.lucko.spark.common.sampler.java;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.AbstractSampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
@@ -32,16 +31,12 @@ import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.proto.SparkProtos.SamplerData;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
-import java.util.ArrayList;
import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
@@ -129,36 +124,10 @@ public class JavaSampler extends AbstractSampler implements Runnable {
}
@Override
- public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
- List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
index e7113a1..39e21aa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
@@ -22,7 +22,7 @@ package me.lucko.spark.common.sampler.java;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.concurrent.ExecutorService;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index 018a3b8..e817828 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -24,12 +24,11 @@ import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -102,13 +101,13 @@ public class TickedDataAggregator extends JavaDataAggregator {
}
@Override
- public Map<String, ThreadNode> getData() {
+ public List<ThreadNode> exportData() {
// push the current tick
synchronized (this.mutex) {
pushCurrentTick();
}
- return super.getData();
+ return super.exportData();
}
private final class TickList implements Runnable {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index 18f67ba..fd2be8d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index f935fb2..b0d9237 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
@@ -22,7 +21,7 @@
package me.lucko.spark.common.sampler.node;
import me.lucko.spark.common.util.MethodDisambiguator;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -65,8 +64,8 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
return this.description.parentLineNumber;
}
- public SparkProtos.StackTraceNode toProto(MergeMode mergeMode) {
- SparkProtos.StackTraceNode.Builder proto = SparkProtos.StackTraceNode.newBuilder()
+ public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode) {
+ SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder()
.setTime(getTotalTime())
.setClassName(this.description.className)
.setMethodName(this.description.methodName);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index 5cac33d..ed97443 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -20,7 +20,7 @@
package me.lucko.spark.common.sampler.node;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
/**
* The root of a sampling stack for a given thread / thread group.
@@ -28,17 +28,34 @@ import me.lucko.spark.proto.SparkProtos;
public final class ThreadNode extends AbstractNode {
/**
- * The name of this thread
+ * The name of this thread / thread group
*/
- private final String threadName;
+ private final String name;
- public ThreadNode(String threadName) {
- this.threadName = threadName;
+ /**
+ * The label used to describe this thread in the viewer
+ */
+ public String label;
+
+ public ThreadNode(String name) {
+ this.name = name;
+ }
+
+ public String getThreadLabel() {
+ return this.label != null ? this.label : this.name;
+ }
+
+ public String getThreadGroup() {
+ return this.name;
+ }
+
+ public void setThreadLabel(String label) {
+ this.label = label;
}
- public SparkProtos.ThreadNode toProto(MergeMode mergeMode) {
- SparkProtos.ThreadNode.Builder proto = SparkProtos.ThreadNode.newBuilder()
- .setName(this.threadName)
+ public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) {
+ SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder()
+ .setName(getThreadLabel())
.setTime(getTotalTime());
for (StackTraceNode child : exportChildren(mergeMode)) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
index a6e8745..2a31e0d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.tick;
-import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
public abstract class AbstractTickHook implements TickHook {
- private final Set<Callback> tasks = new HashSet<>();
+ private final Set<Callback> tasks = new CopyOnWriteArraySet<>();
private int tick = 0;
protected void onTick() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
index 74a814d..431a641 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
@@ -20,11 +20,11 @@
package me.lucko.spark.common.tick;
-import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
public abstract class AbstractTickReporter implements TickReporter {
- private final Set<Callback> tasks = new HashSet<>();
+ private final Set<Callback> tasks = new CopyOnWriteArraySet<>();
protected void onTick(double duration) {
for (Callback r : this.tasks) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
index 29ee5bb..c2ca1b1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -1,26 +1,21 @@
/*
- * This file is part of bytebin, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.common.util;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
index 42a04f7..bd9ec37 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
@@ -26,6 +26,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.io.IOException;
+import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
@@ -87,8 +88,22 @@ public interface ClassSourceLookup {
*/
interface ByUrl extends ClassSourceLookup {
- default String identifyUrl(URL url) throws URISyntaxException {
- return url.getProtocol().equals("file") ? identifyFile(Paths.get(url.toURI())) : null;
+ default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
+ Path path = null;
+
+ String protocol = url.getProtocol();
+ if (protocol.equals("file")) {
+ path = Paths.get(url.toURI());
+ } else if (protocol.equals("jar")) {
+ URL innerUrl = new URL(url.getPath());
+ path = Paths.get(innerUrl.getPath().split("!")[0]);
+ }
+
+ if (path != null) {
+ return identifyFile(path.toAbsolutePath().normalize());
+ }
+
+ return null;
}
default String identifyFile(Path path) {
@@ -123,7 +138,7 @@ public interface ClassSourceLookup {
*/
class ByCodeSource implements ClassSourceLookup, ByUrl {
@Override
- public @Nullable String identify(Class<?> clazz) throws URISyntaxException {
+ public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException {
ProtectionDomain protectionDomain = clazz.getProtectionDomain();
if (protectionDomain == null) {
return null;
@@ -148,12 +163,12 @@ public interface ClassSourceLookup {
static Visitor createVisitor(ClassSourceLookup lookup) {
if (lookup == ClassSourceLookup.NO_OP) {
- return NoOpVistitor.INSTANCE; // don't bother!
+ return NoOpVisitor.INSTANCE; // don't bother!
}
return new VisitorImpl(lookup);
}
- enum NoOpVistitor implements Visitor {
+ enum NoOpVisitor implements Visitor {
INSTANCE;
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
new file mode 100644
index 0000000..9295c25
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
@@ -0,0 +1,100 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import org.tukaani.xz.LZMA2Options;
+import org.tukaani.xz.LZMAOutputStream;
+import org.tukaani.xz.XZOutputStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.function.LongConsumer;
+import java.util.zip.GZIPOutputStream;
+
+public enum Compression {
+ GZIP {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ },
+ XZ {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ },
+ LZMA {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ };
+
+ public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
+
+ private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
+ byte[] buf = new byte[1024 * 64];
+ long total = 0;
+ long iterations = 0;
+ while (true) {
+ int r = from.read(buf);
+ if (r == -1) {
+ break;
+ }
+ to.write(buf, 0, r);
+ total += r;
+
+ // report progress every 5MB
+ if (iterations++ % ((1024 / 64) * 5) == 0) {
+ progress.accept(total);
+ }
+ }
+ return total;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
index 492d4ea..c4a3d66 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
@@ -20,6 +20,11 @@
package me.lucko.spark.common.util;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.TextColor;
+
+import java.util.Locale;
+
public enum FormatUtil {
;
@@ -31,10 +36,30 @@ public enum FormatUtil {
}
public static String formatBytes(long bytes) {
- if (bytes == 0) {
+ if (bytes <= 0) {
return "0 bytes";
}
int sizeIndex = (int) (Math.log(bytes) / Math.log(1024));
- return String.format("%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + SIZE_UNITS[sizeIndex];
+ return String.format(Locale.ENGLISH, "%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + SIZE_UNITS[sizeIndex];
+ }
+
+ public static Component formatBytes(long bytes, TextColor color, String suffix) {
+ String value;
+ String unit;
+
+ if (bytes <= 0) {
+ value = "0";
+ unit = "KB" + suffix;
+ } else {
+ int sizeIndex = (int) (Math.log(bytes) / Math.log(1024));
+ value = String.format(Locale.ENGLISH, "%.1f", bytes / Math.pow(1024, sizeIndex));
+ unit = SIZE_UNITS[sizeIndex] + suffix;
+ }
+
+ return Component.text()
+ .append(Component.text(value, color))
+ .append(Component.space())
+ .append(Component.text(unit))
+ .build();
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
new file mode 100644
index 0000000..7d688d7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
@@ -0,0 +1,84 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility for reading from /proc/ on Linux systems.
+ */
+public enum LinuxProc {
+
+ /**
+ * Information about the system CPU.
+ */
+ CPUINFO("/proc/cpuinfo"),
+
+ /**
+ * Information about the system memory.
+ */
+ MEMINFO("/proc/meminfo"),
+
+ /**
+ * Information about the system network usage.
+ */
+ NET_DEV("/proc/net/dev");
+
+ private final Path path;
+
+ LinuxProc(String path) {
+ this.path = resolvePath(path);
+ }
+
+ private static @Nullable Path resolvePath(String path) {
+ try {
+ Path p = Paths.get(path);
+ if (Files.isReadable(p)) {
+ return p;
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+ return null;
+ }
+
+ public @NonNull List<String> read() {
+ if (this.path != null) {
+ try {
+ return Files.readAllLines(this.path, StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+
+ return Collections.emptyList();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
index 87c41a4..65753bc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
@@ -21,6 +21,7 @@
package me.lucko.spark.common.util;
import me.lucko.spark.api.statistic.misc.DoubleAverageInfo;
+import me.lucko.spark.proto.SparkProtos;
import java.math.BigDecimal;
import java.math.RoundingMode;
@@ -39,6 +40,12 @@ public class RollingAverage implements DoubleAverageInfo {
this.samples = new ArrayDeque<>(this.windowSize + 1);
}
+ public int getSamples() {
+ synchronized (this) {
+ return this.samples.size();
+ }
+ }
+
public void add(BigDecimal num) {
synchronized (this) {
this.total = this.total.add(num);
@@ -105,4 +112,14 @@ public class RollingAverage implements DoubleAverageInfo {
return sortedSamples[rank].doubleValue();
}
+ public SparkProtos.RollingAverageValues toProto() {
+ return SparkProtos.RollingAverageValues.newBuilder()
+ .setMean(mean())
+ .setMax(max())
+ .setMin(min())
+ .setMedian(median())
+ .setPercentile95(percentile95th())
+ .build();
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
new file mode 100644
index 0000000..156fa0d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
@@ -0,0 +1,49 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class SparkThreadFactory implements ThreadFactory, Thread.UncaughtExceptionHandler {
+ private static final AtomicInteger poolNumber = new AtomicInteger(1);
+ private final AtomicInteger threadNumber = new AtomicInteger(1);
+ private final String namePrefix;
+
+ public SparkThreadFactory() {
+ this.namePrefix = "spark-worker-pool-" +
+ poolNumber.getAndIncrement() +
+ "-thread-";
+ }
+
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread(r, this.namePrefix + this.threadNumber.getAndIncrement());
+ t.setUncaughtExceptionHandler(this);
+ t.setDaemon(true);
+ return t;
+ }
+
+ @Override
+ public void uncaughtException(Thread t, Throwable e) {
+ System.err.println("Uncaught exception thrown by thread " + t.getName());
+ e.printStackTrace();
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
new file mode 100644
index 0000000..22ee9bb
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
@@ -0,0 +1,189 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import com.google.common.base.Strings;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.format.TextColor;
+
+import java.lang.management.MemoryUsage;
+import java.util.Locale;
+
+import static net.kyori.adventure.text.Component.text;
+import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
+import static net.kyori.adventure.text.format.NamedTextColor.RED;
+import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
+
+public enum StatisticFormatter {
+ ;
+
+ private static final String BAR_TRUE_CHARACTER = "┃";
+ private static final String BAR_FALSE_CHARACTER = "╻";
+
+ public static TextComponent formatTps(double tps) {
+ TextColor color;
+ if (tps > 18.0) {
+ color = GREEN;
+ } else if (tps > 16.0) {
+ color = YELLOW;
+ } else {
+ color = RED;
+ }
+
+ return text((tps > 20.0 ? "*" : "") + Math.min(Math.round(tps * 100.0) / 100.0, 20.0), color);
+ }
+
+ public static TextComponent formatTickDurations(RollingAverage average) {
+ return text()
+ .append(formatTickDuration(average.min()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.median()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.percentile95th()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.max()))
+ .build();
+ }
+
+ public static TextComponent formatTickDuration(double duration) {
+ TextColor color;
+ if (duration >= 50d) {
+ color = RED;
+ } else if (duration >= 40d) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text(String.format(Locale.ENGLISH, "%.1f", duration), color);
+ }
+
+ public static TextComponent formatCpuUsage(double usage) {
+ TextColor color;
+ if (usage > 0.9) {
+ color = RED;
+ } else if (usage > 0.65) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text(FormatUtil.percent(usage, 1d), color);
+ }
+
+ public static TextComponent formatPingRtts(double min, double median, double percentile95th, double max) {
+ return text()
+ .append(formatPingRtt(min))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(median))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(percentile95th))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(max))
+ .build();
+ }
+
+ public static TextComponent formatPingRtt(double ping) {
+ TextColor color;
+ if (ping >= 200) {
+ color = RED;
+ } else if (ping >= 100) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text((int) Math.ceil(ping), color);
+ }
+
+ public static TextComponent generateMemoryUsageDiagram(MemoryUsage usage, int length) {
+ double used = usage.getUsed();
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_TRUE_CHARACTER, usedChars)).color(YELLOW);
+ if (committedChars > usedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_FALSE_CHARACTER, RED));
+ }
+ if (length > committedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (length - committedChars)), GRAY));
+ }
+
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(line.build())
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+
+ public static TextComponent generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
+ double used = usage.getUsed();
+ double collectionUsed = used;
+ if (collectionUsage != null) {
+ collectionUsed = collectionUsage.getUsed();
+ }
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int collectionUsedChars = (int) ((collectionUsed * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_TRUE_CHARACTER, collectionUsedChars)).color(YELLOW);
+
+ if (usedChars > collectionUsedChars) {
+ line.append(Component.text(BAR_TRUE_CHARACTER, RED));
+ line.append(text(Strings.repeat(BAR_TRUE_CHARACTER, (usedChars - collectionUsedChars) - 1), YELLOW));
+ }
+ if (committedChars > usedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_FALSE_CHARACTER, YELLOW));
+ }
+ if (length > committedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (length - committedChars)), GRAY));
+ }
+
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(line.build())
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+
+ public static TextComponent generateDiskUsageDiagram(double used, double max, int length) {
+ int usedChars = (int) ((used * length) / max);
+ int freeChars = length - usedChars;
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(text(Strings.repeat(BAR_TRUE_CHARACTER, usedChars), YELLOW))
+ .append(text(Strings.repeat(BAR_FALSE_CHARACTER, freeChars), GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index 4305a51..ec0aa88 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -5,34 +5,17 @@ package spark;
option java_package = "me.lucko.spark.proto";
option java_outer_classname = "SparkProtos";
-message CommandSenderMetadata {
- Type type = 1;
- string name = 2;
- string unique_id = 3;
-
- enum Type {
- OTHER = 0;
- PLAYER = 1;
- }
-}
-
-message MemoryUsage {
- int64 used = 1;
- int64 committed = 2;
- int64 max = 3;
-}
-
message PlatformMetadata {
Type type = 1;
string name = 2;
string version = 3;
string minecraft_version = 4; // optional
-
- int32 n_cpus = 5;
- MemoryUsage heapUsage = 6;
-
int32 spark_version = 7;
+ // replaced
+ reserved 5, 6;
+ reserved "n_cpus", "heap_usage";
+
enum Type {
SERVER = 0;
CLIENT = 1;
@@ -40,80 +23,125 @@ message PlatformMetadata {
}
}
-message HeapData {
- HeapMetadata metadata = 1;
- repeated HeapEntry entries = 2;
-}
+message SystemStatistics {
+ Cpu cpu = 1;
+ Memory memory = 2;
+ map<string, Gc> gc = 3;
+ Disk disk = 4;
+ Os os = 5;
+ Java java = 6;
+ int64 uptime = 7;
+ map<string, NetInterface> net = 8;
+
+ message Cpu {
+ int32 threads = 1;
+ Usage process_usage = 2;
+ Usage system_usage = 3;
+ string model_name = 4; // optional
+
+ message Usage {
+ double last1m = 1;
+ double last15m = 2;
+ }
+ }
-message HeapMetadata {
- CommandSenderMetadata creator = 1;
- PlatformMetadata platform_metadata = 2;
-}
+ message Memory {
+ MemoryPool physical = 1;
+ MemoryPool swap = 2;
-message HeapEntry {
- int32 order = 1;
- int32 instances = 2;
- int64 size = 3;
- string type = 4;
-}
+ message MemoryPool {
+ int64 used = 1;
+ int64 total = 2;
+ }
+ }
-message SamplerData {
- SamplerMetadata metadata = 1;
- repeated ThreadNode threads = 2;
- map<string, string> class_sources = 3; // optional
+ message Gc {
+ int64 total = 1;
+ double avg_time = 2;
+ double avg_frequency = 3;
+ }
+
+ message Disk {
+ int64 used = 1;
+ int64 total = 2;
+ }
+
+ message Os {
+ string arch = 1;
+ string name = 2;
+ string version = 3;
+ }
+
+ message Java {
+ string vendor = 1;
+ string version = 2;
+ string vendor_version = 3;
+ string vm_args = 4;
+ }
+
+ message NetInterface {
+ RollingAverageValues rx_bytes_per_second = 1;
+ RollingAverageValues tx_bytes_per_second = 2;
+ RollingAverageValues rx_packets_per_second = 3;
+ RollingAverageValues tx_packets_per_second = 4;
+ }
}
-message SamplerMetadata {
- CommandSenderMetadata creator = 1;
- int64 start_time = 2;
- int32 interval = 3;
- ThreadDumper thread_dumper = 4;
- DataAggregator data_aggregator = 5;
- string comment = 6;
- PlatformMetadata platform_metadata = 7;
-
- message ThreadDumper {
- Type type = 1;
- repeated int64 ids = 2; // optional
- repeated string patterns = 3; // optional
-
- enum Type {
- ALL = 0;
- SPECIFIC = 1;
- REGEX = 2;
+message PlatformStatistics {
+ Memory memory = 1;
+ map<string, Gc> gc = 2;
+ int64 uptime = 3;
+ Tps tps = 4; // optional
+ Mspt mspt = 5; // optional
+ Ping ping = 6; // optional
+ int64 player_count = 7;
+
+ message Memory {
+ MemoryPool heap = 1;
+
+ message MemoryPool {
+ int64 used = 1;
+ int64 total = 2;
}
}
- message DataAggregator {
- Type type = 1;
- ThreadGrouper thread_grouper = 2;
- int64 tick_length_threshold = 3; // optional
+ message Gc {
+ int64 total = 1;
+ double avg_time = 2;
+ double avg_frequency = 3;
+ }
- enum Type {
- SIMPLE = 0;
- TICKED = 1;
- }
+ message Tps {
+ double last1m = 1;
+ double last5m = 2;
+ double last15m = 3;
+ }
- enum ThreadGrouper {
- BY_NAME = 0;
- BY_POOL = 1;
- AS_ONE = 2;
- }
+ message Mspt {
+ RollingAverageValues last1m = 1;
+ RollingAverageValues last5m = 2;
+ }
+
+ message Ping {
+ RollingAverageValues last15m = 1;
}
}
-message StackTraceNode {
- double time = 1;
- repeated StackTraceNode children = 2;
- string class_name = 3;
- string method_name = 4;
- int32 parent_line_number = 5; // optional
- int32 line_number = 6; // optional
- string method_desc = 7; // optional
+message RollingAverageValues {
+ double mean = 1;
+ double max = 2;
+ double min = 3;
+ double median = 4;
+ double percentile95 = 5;
}
-message ThreadNode {
- string name = 1;
- double time = 2;
- repeated StackTraceNode children = 3;
+message CommandSenderMetadata {
+ Type type = 1;
+ string name = 2;
+ string unique_id = 3;
+
+ enum Type {
+ OTHER = 0;
+ PLAYER = 1;
+ }
}
diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-common/src/main/proto/spark/spark_heap.proto
new file mode 100644
index 0000000..59f2b85
--- /dev/null
+++ b/spark-common/src/main/proto/spark/spark_heap.proto
@@ -0,0 +1,27 @@
+syntax = "proto3";
+
+package spark;
+
+import "spark/spark.proto";
+
+option java_package = "me.lucko.spark.proto";
+option java_outer_classname = "SparkHeapProtos";
+
+message HeapData {
+ HeapMetadata metadata = 1;
+ repeated HeapEntry entries = 2;
+}
+
+message HeapMetadata {
+ CommandSenderMetadata creator = 1;
+ PlatformMetadata platform_metadata = 2;
+ PlatformStatistics platform_statistics = 3;
+ SystemStatistics system_statistics = 4;
+}
+
+message HeapEntry {
+ int32 order = 1;
+ int32 instances = 2;
+ int64 size = 3;
+ string type = 4;
+}
diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto
new file mode 100644
index 0000000..51bdd64
--- /dev/null
+++ b/spark-common/src/main/proto/spark/spark_sampler.proto
@@ -0,0 +1,73 @@
+syntax = "proto3";
+
+package spark;
+
+import "spark/spark.proto";
+
+option java_package = "me.lucko.spark.proto";
+option java_outer_classname = "SparkSamplerProtos";
+
+message SamplerData {
+ SamplerMetadata metadata = 1;
+ repeated ThreadNode threads = 2;
+ map<string, string> class_sources = 3; // optional
+}
+
+message SamplerMetadata {
+ CommandSenderMetadata creator = 1;
+ int64 start_time = 2;
+ int32 interval = 3;
+ ThreadDumper thread_dumper = 4;
+ DataAggregator data_aggregator = 5;
+ string comment = 6;
+ PlatformMetadata platform_metadata = 7;
+ PlatformStatistics platform_statistics = 8;
+ SystemStatistics system_statistics = 9;
+ map<string, string> server_configurations = 10;
+ int64 end_time = 11;
+
+ message ThreadDumper {
+ Type type = 1;
+ repeated int64 ids = 2; // optional
+ repeated string patterns = 3; // optional
+
+ enum Type {
+ ALL = 0;
+ SPECIFIC = 1;
+ REGEX = 2;
+ }
+ }
+
+ message DataAggregator {
+ Type type = 1;
+ ThreadGrouper thread_grouper = 2;
+ int64 tick_length_threshold = 3; // optional
+
+ enum Type {
+ SIMPLE = 0;
+ TICKED = 1;
+ }
+
+ enum ThreadGrouper {
+ BY_NAME = 0;
+ BY_POOL = 1;
+ AS_ONE = 2;
+ }
+ }
+}
+
+message ThreadNode {
+ string name = 1;
+ double time = 2;
+ repeated StackTraceNode children = 3;
+}
+
+message StackTraceNode {
+ double time = 1;
+ repeated StackTraceNode children = 2;
+ string class_name = 3;
+ string method_name = 4;
+ int32 parent_line_number = 5; // optional
+ int32 line_number = 6; // optional
+ string method_desc = 7; // optional
+}
diff --git a/spark-common/src/main/resources/linux/libasyncProfiler.so b/spark-common/src/main/resources/linux/libasyncProfiler.so
deleted file mode 100755
index ddee900..0000000
--- a/spark-common/src/main/resources/linux/libasyncProfiler.so
+++ /dev/null
Binary files differ
diff --git a/spark-common/src/main/resources/macosx/libasyncProfiler.so b/spark-common/src/main/resources/macosx/libasyncProfiler.so
deleted file mode 100755
index 75daf6e..0000000
--- a/spark-common/src/main/resources/macosx/libasyncProfiler.so
+++ /dev/null
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
new file mode 100755
index 0000000..35f83b2
--- /dev/null
+++ b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
new file mode 100755
index 0000000..edbf103
--- /dev/null
+++ b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
new file mode 100755
index 0000000..ab818e9
--- /dev/null
+++ b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
Binary files differ