aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorlucko <git@lucko.me>2022-06-09 22:13:58 +0100
committerGitHub <noreply@github.com>2022-06-09 22:13:58 +0100
commit4a16a1a2f4eb09f706b4a541e3d31618de29420b (patch)
treecc320ee2e6551f2157a2d54968f8ba14f6713d08
parent32ab78c71c5be97da7329a4f7c4035289a3490b1 (diff)
parentecc3714e6441ace0eb78156b2b4475ca050280db (diff)
downloadspark-4a16a1a2f4eb09f706b4a541e3d31618de29420b.tar.gz
spark-4a16a1a2f4eb09f706b4a541e3d31618de29420b.tar.bz2
spark-4a16a1a2f4eb09f706b4a541e3d31618de29420b.zip
Merge pull request #213 from embeddedt/forge-1.7.10
Align 1.7.10 with Spark 1.9
-rw-r--r--HEADER.txt17
-rw-r--r--README.md11
-rw-r--r--build.gradle27
-rw-r--r--gradle.properties3
-rw-r--r--settings.gradle22
-rw-r--r--spark-api/HEADER.txt22
-rw-r--r--spark-api/build.gradle4
-rw-r--r--spark-bukkit/build.gradle29
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java4
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java57
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java138
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java16
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java27
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java56
-rw-r--r--spark-bungeecord/build.gradle29
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java4
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java47
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java6
-rw-r--r--spark-common/build.gradle10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java87
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java22
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java14
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java322
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java87
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java31
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java51
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java71
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java153
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java (renamed from spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkGameHooks.java)21
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java88
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java274
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java141
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java149
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java81
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java40
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java17
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java38
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java188
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java136
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java64
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java59
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java77
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java53
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java80
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java53
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java18
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java41
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java33
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java25
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/Compression.java100
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java84
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java17
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java49
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java189
-rw-r--r--spark-common/src/main/proto/spark/spark.proto192
-rw-r--r--spark-common/src/main/proto/spark/spark_heap.proto27
-rw-r--r--spark-common/src/main/proto/spark/spark_sampler.proto73
-rwxr-xr-xspark-common/src/main/resources/linux/libasyncProfiler.sobin398099 -> 0 bytes
-rwxr-xr-xspark-common/src/main/resources/macosx/libasyncProfiler.sobin599568 -> 0 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.sobin0 -> 328432 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.sobin0 -> 342239 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/macos/libasyncProfiler.sobin0 -> 688400 bytes
-rw-r--r--spark-fabric/build.gradle16
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java24
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java4
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java6
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java47
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java35
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientPlayerEntityMixin.java52
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java58
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java80
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java62
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java24
-rw-r--r--spark-fabric/src/main/resources/fabric.mod.json5
-rw-r--r--spark-fabric/src/main/resources/spark.mixins.json11
-rw-r--r--spark-forge/build.gradle7
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java9
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java4
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java47
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java12
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java75
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java78
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java62
-rw-r--r--spark-forge/src/main/resources/META-INF/accesstransformer.cfg1
-rw-r--r--spark-forge1122/build.gradle82
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122CommandSender.java75
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122SparkMod.java83
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickReporter.java66
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ClientSparkPlugin.java90
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ServerSparkPlugin.java98
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122SparkPlugin.java153
-rw-r--r--spark-forge1122/src/main/resources/mcmod.info7
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java4
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickHook.java6
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickReporter.java6
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java6
-rw-r--r--spark-minestom/build.gradle50
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java49
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java65
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlatformInfo.java (renamed from spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122PlatformInfo.java)22
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java41
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java183
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickHook.java (renamed from spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickHook.java)41
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java48
-rw-r--r--spark-minestom/src/main/resources/extension.json5
-rw-r--r--spark-nukkit/build.gradle2
-rw-r--r--spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java4
-rw-r--r--spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java47
-rw-r--r--spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java6
-rw-r--r--spark-sponge/build.gradle19
-rw-r--r--spark-sponge7/build.gradle (renamed from spark-universal/build.gradle)20
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java44
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java (renamed from spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeCommandSender.java)4
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java (renamed from spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlatformInfo.java)6
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java47
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java (renamed from spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java)47
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java (renamed from spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickHook.java)6
-rw-r--r--spark-sponge8/build.gradle2
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java84
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java1
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java4
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java47
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java16
-rw-r--r--spark-velocity/build.gradle2
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java4
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java46
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java6
-rw-r--r--spark-velocity4/build.gradle2
-rw-r--r--spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java4
-rw-r--r--spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java46
-rw-r--r--spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java6
-rw-r--r--spark-waterdog/build.gradle2
-rw-r--r--spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java4
-rw-r--r--spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java47
-rw-r--r--spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java6
158 files changed, 4714 insertions, 1800 deletions
diff --git a/HEADER.txt b/HEADER.txt
new file mode 100644
index 0000000..3457e84
--- /dev/null
+++ b/HEADER.txt
@@ -0,0 +1,17 @@
+This file is part of spark.
+
+ Copyright (c) lucko (Luck) <luck@lucko.me>
+ Copyright (c) contributors
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. \ No newline at end of file
diff --git a/README.md b/README.md
index 368e137..74db6fb 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,11 @@
<h1 align="center">
<img
alt="spark"
- src="https://i.imgur.com/cJ4sYV5.png">
+ src="https://i.imgur.com/ykHn9vx.png">
</h1>
<h3 align="center">
- spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.
+ spark is a performance profiler for Minecraft clients, servers and proxies.
</h3>
#### Useful Links
@@ -18,10 +18,9 @@
spark is made up of three separate components:
-| CPU Profiler | Memory Inspection | Server Health Reporting |
-| :-------------------------------------------: | :--------------------------------------: | :----------------------------------------: |
-| [![](https://i.imgur.com/ggSGzRq.png)](#zap-cpu-profiler) | [![](https://i.imgur.com/BsdTxqA.png)](#zap-memory-inspection) | [![](https://i.imgur.com/SrKEmA6.png)](#zap-server-health-reporting) |
-| Diagnose performance issues. | Diagnose memory issues. | Keep track of overall server health. |
+* **CPU Profiler**: Diagnose performance issues.
+* **Memory Inspection**: Diagnose memory issues.
+* **Server Health Reporting**: Keep track of overall server health.
### :zap: CPU Profiler
diff --git a/build.gradle b/build.gradle
index 0383c91..a56b089 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,6 +1,10 @@
+plugins {
+ id 'org.cadixdev.licenser' version '0.6.1' apply false
+}
+
allprojects {
group = 'me.lucko'
- version = '1.6-SNAPSHOT'
+ version = '1.9-SNAPSHOT'
configurations {
compileClasspath // Fabric-loom needs this for remap jar for some reason
@@ -11,9 +15,12 @@ subprojects {
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'java-library'
+ apply plugin: 'org.cadixdev.licenser'
ext {
- pluginVersion = '1.6.4'
+ baseVersion = '1.9'
+ patchVersion = determinePatchVersion()
+ pluginVersion = baseVersion + '.' + patchVersion
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
}
@@ -28,6 +35,11 @@ subprojects {
duplicatesStrategy = DuplicatesStrategy.INCLUDE
}
+ license {
+ header = rootProject.file('HEADER.txt')
+ include '**/*.java'
+ }
+
repositories {
mavenCentral()
maven { url "https://oss.sonatype.org/content/repositories/snapshots/" }
@@ -36,3 +48,14 @@ subprojects {
}
}
+
+def determinePatchVersion() {
+ def tagInfo = new ByteArrayOutputStream()
+ exec {
+ commandLine 'git', 'describe', '--tags'
+ standardOutput = tagInfo
+ }
+ tagInfo = tagInfo.toString()
+
+ return tagInfo.contains('-') ? tagInfo.split("-")[1] : 0
+} \ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
index dd68d76..bf86fb7 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -1,2 +1 @@
-org.gradle.jvmargs=-Xmx1G
-org.gradle.parallel=true \ No newline at end of file
+org.gradle.jvmargs=-Xmx2G \ No newline at end of file
diff --git a/settings.gradle b/settings.gradle
index e1e8d89..fe12f64 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -1,6 +1,5 @@
pluginManagement {
repositories {
- jcenter()
maven {
name = 'Fabric'
url = 'https://maven.fabricmc.net/'
@@ -13,24 +12,5 @@ rootProject.name = 'spark'
include (
'spark-api',
'spark-common',
- 'spark-forge1122'
-)
-if (JavaVersion.current() != JavaVersion.VERSION_1_8){
- include (
- 'spark-bukkit',
- 'spark-bungeecord',
- 'spark-velocity',
- 'spark-velocity4',
- 'spark-sponge',
- 'spark-sponge8',
- 'spark-forge',
- 'spark-fabric',
- 'spark-nukkit',
- 'spark-waterdog',
- 'spark-universal'
- )
-} else {
- include (
'spark-forge1710'
- )
-}
+)
diff --git a/spark-api/HEADER.txt b/spark-api/HEADER.txt
new file mode 100644
index 0000000..63aafcb
--- /dev/null
+++ b/spark-api/HEADER.txt
@@ -0,0 +1,22 @@
+This file is part of spark, licensed under the MIT License.
+
+ Copyright (c) lucko (Luck) <luck@lucko.me>
+ Copyright (c) contributors
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE. \ No newline at end of file
diff --git a/spark-api/build.gradle b/spark-api/build.gradle
index 31458af..0fbe9e1 100644
--- a/spark-api/build.gradle
+++ b/spark-api/build.gradle
@@ -9,6 +9,10 @@ dependencies {
compileOnly 'org.jetbrains:annotations:20.1.0'
}
+license {
+ header = project.file('HEADER.txt')
+}
+
publishing {
//repositories {
// maven {
diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle
index b66134d..8e111e8 100644
--- a/spark-bukkit/build.gradle
+++ b/spark-bukkit/build.gradle
@@ -1,3 +1,7 @@
+plugins {
+ id 'com.github.johnrengelman.shadow' version '7.0.0'
+}
+
dependencies {
implementation project(':spark-common')
implementation('me.lucko:adventure-platform-bukkit:4.9.4') {
@@ -28,4 +32,27 @@ processResources {
)
include 'plugin.yml'
}
-} \ No newline at end of file
+}
+
+shadowJar {
+ archiveName = "spark-${project.pluginVersion}-bukkit.jar"
+
+ relocate 'okio', 'me.lucko.spark.lib.okio'
+ relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
+ relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
+ relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
+ relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
+ relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
+ relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java
index 3f0c155..2bf17ac 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlatformInfo.java
@@ -20,14 +20,14 @@
package me.lucko.spark.bukkit;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import org.bukkit.Server;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
-public class BukkitPlatformInfo extends AbstractPlatformInfo {
+public class BukkitPlatformInfo implements PlatformInfo {
private final Server server;
public BukkitPlatformInfo(Server server) {
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java
new file mode 100644
index 0000000..2cf58cf
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitPlayerPingProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.bukkit;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import org.bukkit.Server;
+import org.bukkit.entity.Player;
+
+import java.util.Map;
+
+public class BukkitPlayerPingProvider implements PlayerPingProvider {
+
+ public static boolean isSupported() {
+ try {
+ Player.Spigot.class.getMethod("getPing");
+ return true;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+
+ private final Server server;
+
+ public BukkitPlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers()) {
+ builder.put(player.getName(), player.spigot().getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
new file mode 100644
index 0000000..953e171
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -0,0 +1,138 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.bukkit;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonSerializer;
+
+import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
+import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader;
+
+import org.bukkit.configuration.MemorySection;
+import org.bukkit.configuration.file.YamlConfiguration;
+
+import co.aikar.timings.TimingsManager;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+public class BukkitServerConfigProvider extends AbstractServerConfigProvider<BukkitServerConfigProvider.FileType> {
+ private static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ /** A map of provided files and their type */
+ private static final Map<String, FileType> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public BukkitServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ @Override
+ protected JsonElement load(String path, FileType type) throws IOException {
+ Path filePath = Paths.get(path);
+ if (!Files.exists(filePath)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
+ Map<String, Object> values;
+
+ if (type == FileType.PROPERTIES) {
+ PropertiesFileReader propertiesReader = new PropertiesFileReader(reader);
+ values = propertiesReader.readProperties();
+ } else if (type == FileType.YAML) {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ values = config.getValues(false);
+ } else {
+ throw new IllegalArgumentException("Unknown file type: " + type);
+ }
+
+ return GSON.toJsonTree(values);
+ }
+ }
+
+ enum FileType {
+ PROPERTIES,
+ YAML
+ }
+
+ static {
+ ImmutableMap.Builder<String, FileType> files = ImmutableMap.<String, FileType>builder()
+ .put("server.properties", FileType.PROPERTIES)
+ .put("bukkit.yml", FileType.YAML)
+ .put("spigot.yml", FileType.YAML)
+ .put("paper.yml", FileType.YAML)
+ .put("purpur.yml", FileType.YAML);
+
+ for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
+ files.put(config, FileType.YAML);
+ }
+
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("database")
+ .add("settings.bungeecord-addresses")
+ .add("settings.velocity-support.secret")
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("level-seed")
+ .add("world-settings.*.feature-seeds")
+ .add("world-settings.*.seed-*")
+ .addAll(getTimingsHiddenConfigs())
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = files.build();
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+ private static List<String> getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
+ }
+
+ private static List<String> getTimingsHiddenConfigs() {
+ try {
+ return TimingsManager.hiddenConfigs;
+ } catch (NoClassDefFoundError e) {
+ return Collections.emptyList();
+ }
+ }
+
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index f81a176..9727277 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -25,7 +25,9 @@ import me.lucko.spark.bukkit.placeholder.SparkMVdWPlaceholders;
import me.lucko.spark.bukkit.placeholder.SparkPlaceholderApi;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -172,6 +174,20 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ if (BukkitPlayerPingProvider.isSupported()) {
+ return new BukkitPlayerPingProvider(getServer());
+ } else {
+ return null;
+ }
+ }
+
+ @Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new BukkitServerConfigProvider();
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
index 5c4d0db..e604321 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/CommandMapUtil.java
@@ -1,26 +1,21 @@
/*
- * This file is part of helper, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.bukkit;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
index 6e14bdb..5b57857 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
@@ -3,7 +3,7 @@
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
- *
+ *
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
@@ -21,9 +21,9 @@
package me.lucko.spark.bukkit.placeholder;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.command.modules.HealthModule;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.util.StatisticFormatter;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.TextComponent;
@@ -42,22 +42,22 @@ enum SparkPlaceholderProvider {
switch (placeholder) {
case "tps":
return Component.text()
- .append(HealthModule.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build();
case "tps_5s":
- return HealthModule.formatTps(tickStatistics.tps5Sec());
+ return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
case "tps_10s":
- return HealthModule.formatTps(tickStatistics.tps10Sec());
+ return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
case "tps_1m":
- return HealthModule.formatTps(tickStatistics.tps1Min());
+ return StatisticFormatter.formatTps(tickStatistics.tps1Min());
case "tps_5m":
- return HealthModule.formatTps(tickStatistics.tps5Min());
+ return StatisticFormatter.formatTps(tickStatistics.tps5Min());
case "tps_15m":
- return HealthModule.formatTps(tickStatistics.tps15Min());
+ return StatisticFormatter.formatTps(tickStatistics.tps15Min());
}
}
@@ -70,13 +70,13 @@ enum SparkPlaceholderProvider {
switch (placeholder) {
case "tickduration":
return Component.text()
- .append(HealthModule.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(HealthModule.formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build();
case "tickduration_10s":
- return HealthModule.formatTickDurations(tickStatistics.duration10Sec());
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
case "tickduration_1m":
- return HealthModule.formatTickDurations(tickStatistics.duration1Min());
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
}
}
@@ -84,28 +84,28 @@ enum SparkPlaceholderProvider {
switch (placeholder) {
case "cpu_system":
return Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.build();
case "cpu_system_10s":
- return HealthModule.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
case "cpu_system_1m":
- return HealthModule.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
case "cpu_system_15m":
- return HealthModule.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
case "cpu_process":
return Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.build();
case "cpu_process_10s":
- return HealthModule.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
case "cpu_process_1m":
- return HealthModule.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
case "cpu_process_15m":
- return HealthModule.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
}
}
diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle
index f1a6db5..ccea89d 100644
--- a/spark-bungeecord/build.gradle
+++ b/spark-bungeecord/build.gradle
@@ -1,3 +1,7 @@
+plugins {
+ id 'com.github.johnrengelman.shadow' version '7.0.0'
+}
+
dependencies {
implementation project(':spark-common')
implementation('me.lucko:adventure-platform-bungeecord:4.9.4') {
@@ -18,4 +22,27 @@ processResources {
)
include 'bungee.yml'
}
-} \ No newline at end of file
+}
+
+shadowJar {
+ archiveName = "spark-${project.pluginVersion}-bungeecord.jar"
+
+ relocate 'okio', 'me.lucko.spark.lib.okio'
+ relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
+ relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
+ relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
+ relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
+ relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
+ relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java
index 0ac9fbd..fc5c588 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlatformInfo.java
@@ -20,11 +20,11 @@
package me.lucko.spark.bungeecord;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import net.md_5.bungee.api.ProxyServer;
-public class BungeeCordPlatformInfo extends AbstractPlatformInfo {
+public class BungeeCordPlatformInfo implements PlatformInfo {
private final ProxyServer proxy;
public BungeeCordPlatformInfo(ProxyServer proxy) {
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java
new file mode 100644
index 0000000..37955a3
--- /dev/null
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.bungeecord;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.md_5.bungee.api.ProxyServer;
+import net.md_5.bungee.api.connection.ProxiedPlayer;
+
+import java.util.Map;
+
+public class BungeeCordPlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public BungeeCordPlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (ProxiedPlayer player : this.proxy.getPlayers()) {
+ builder.put(player.getName(), player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
index ebdfe18..e259adc 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
@@ -22,6 +22,7 @@ package me.lucko.spark.bungeecord;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -91,6 +92,11 @@ public class BungeeCordSparkPlugin extends Plugin implements SparkPlugin {
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new BungeeCordPlayerPingProvider(getProxy());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new BungeeCordPlatformInfo(getProxy());
}
diff --git a/spark-common/build.gradle b/spark-common/build.gradle
index 60f823c..554eec2 100644
--- a/spark-common/build.gradle
+++ b/spark-common/build.gradle
@@ -2,9 +2,13 @@ plugins {
id 'com.google.protobuf' version '0.8.16'
}
+license {
+ exclude '**/sampler/async/jfr/**'
+}
+
dependencies {
api project(':spark-api')
- implementation 'com.github.jvm-profiling-tools:async-profiler:v2.5'
+ implementation 'com.github.jvm-profiling-tools:async-profiler:v2.7'
implementation 'org.ow2.asm:asm:9.1'
implementation 'com.google.protobuf:protobuf-javalite:3.15.6'
implementation 'com.squareup.okhttp3:okhttp:3.14.1'
@@ -32,8 +36,8 @@ dependencies {
processResources {
from(sourceSets.main.resources.srcDirs) {
- include 'linux/libasyncProfiler.so'
- include 'macosx/libasyncProfiler.so'
+ include 'spark/linux/libasyncProfiler.so'
+ include 'spark/macosx/libasyncProfiler.so'
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 57f8732..0ef4556 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -40,7 +40,11 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.PlatformStatisticsProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
@@ -63,7 +67,9 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
+import java.util.logging.Level;
import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
@@ -89,6 +95,7 @@ public class SparkPlatform {
private final String viewerUrl;
private final OkHttpClient httpClient;
private final BytebinClient bytebinClient;
+ private final boolean disableResponseBroadcast;
private final List<CommandModule> commandModules;
private final List<Command> commands;
private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
@@ -96,6 +103,8 @@ public class SparkPlatform {
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
+ private final PingStatistics pingStatistics;
+ private final PlatformStatisticsProvider statisticsProvider;
private Map<String, GarbageCollectorStatistics> startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
private final AtomicBoolean enabled = new AtomicBoolean(false);
@@ -111,6 +120,8 @@ public class SparkPlatform {
this.httpClient = new OkHttpClient();
this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin");
+ this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false);
+
this.commandModules = ImmutableList.of(
new SamplerModule(),
new HealthModule(),
@@ -131,7 +142,12 @@ public class SparkPlatform {
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
- this.tickStatistics = this.tickHook != null ? new TickStatistics() : null;
+ this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null;
+
+ PlayerPingProvider pingProvider = plugin.createPlayerPingProvider();
+ this.pingStatistics = pingProvider != null ? new PingStatistics(pingProvider) : null;
+
+ this.statisticsProvider = new PlatformStatisticsProvider(this);
}
public void enable() {
@@ -147,7 +163,11 @@ public class SparkPlatform {
this.tickReporter.addCallback(this.tickStatistics);
this.tickReporter.start();
}
+ if (this.pingStatistics != null) {
+ this.pingStatistics.start();
+ }
CpuMonitor.ensureMonitoring();
+ NetworkMonitor.ensureMonitoring();
// poll startup GC statistics after plugins & the world have loaded
this.plugin.executeAsync(() -> {
@@ -167,6 +187,9 @@ public class SparkPlatform {
if (this.tickReporter != null) {
this.tickReporter.close();
}
+ if (this.pingStatistics != null) {
+ this.pingStatistics.close();
+ }
for (CommandModule module : this.commandModules) {
module.close();
@@ -198,6 +221,10 @@ public class SparkPlatform {
return this.bytebinClient;
}
+ public boolean shouldBroadcastResponse() {
+ return !this.disableResponseBroadcast;
+ }
+
public List<Command> getCommands() {
return this.commands;
}
@@ -214,6 +241,10 @@ public class SparkPlatform {
return this.tickReporter;
}
+ public PlatformStatisticsProvider getStatisticsProvider() {
+ return this.statisticsProvider;
+ }
+
public ClassSourceLookup createClassSourceLookup() {
return this.plugin.createClassSourceLookup();
}
@@ -222,6 +253,10 @@ public class SparkPlatform {
return this.tickStatistics;
}
+ public PingStatistics getPingStatistics() {
+ return this.pingStatistics;
+ }
+
public Map<String, GarbageCollectorStatistics> getStartupGcStatistics() {
return this.startupGcStatistics;
}
@@ -255,12 +290,62 @@ public class SparkPlatform {
}
public void executeCommand(CommandSender sender, String[] args) {
+ AtomicReference<Thread> executorThread = new AtomicReference<>();
+ AtomicReference<Thread> timeoutThread = new AtomicReference<>();
+ AtomicBoolean completed = new AtomicBoolean(false);
+
+ // execute the command
this.plugin.executeAsync(() -> {
+ executorThread.set(Thread.currentThread());
this.commandExecuteLock.lock();
try {
executeCommand0(sender, args);
+ } catch (Exception e) {
+ this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command");
+ e.printStackTrace();
} finally {
this.commandExecuteLock.unlock();
+ executorThread.set(null);
+ completed.set(true);
+
+ Thread timeout = timeoutThread.get();
+ if (timeout != null) {
+ timeout.interrupt();
+ }
+ }
+ });
+
+ // schedule a task to detect timeouts
+ this.plugin.executeAsync(() -> {
+ timeoutThread.set(Thread.currentThread());
+ try {
+ for (int i = 1; i <= 3; i++) {
+ try {
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+
+ if (completed.get()) {
+ return;
+ }
+
+ Thread executor = executorThread.get();
+ if (executor == null) {
+ getPlugin().log(Level.WARNING, "A command execution has not completed after " +
+ (i * 5) + " seconds but there is no executor present. Perhaps the executor shutdown?");
+
+ } else {
+ String stackTrace = Arrays.stream(executor.getStackTrace())
+ .map(el -> " " + el.toString())
+ .collect(Collectors.joining("\n"));
+
+ getPlugin().log(Level.WARNING, "A command execution has not completed after " +
+ (i * 5) + " seconds, it might be stuck. Trace: \n" + stackTrace);
+ }
+ }
+ } finally {
+ timeoutThread.set(null);
}
});
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index f312916..b817df1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -22,7 +22,9 @@ package me.lucko.spark.common;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -121,6 +123,26 @@ public interface SparkPlugin {
}
/**
+ * Creates a player ping provider function.
+ *
+ * <p>Returns {@code null} if the platform does not support querying player pings</p>
+ *
+ * @return the player ping provider function
+ */
+ default PlayerPingProvider createPlayerPingProvider() {
+ return null;
+ }
+
+ /**
+ * Creates a server config provider.
+ *
+ * @return the server config provider function
+ */
+ default ServerConfigProvider createServerConfigProvider() {
+ return ServerConfigProvider.NO_OP;
+ }
+
+ /**
* Gets information for the platform.
*
* @return information about the platform
diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
index 8d289aa..fc14c67 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
@@ -36,10 +36,8 @@ public class GarbageCollectorInfo implements GarbageCollector {
this.name = name;
this.totalCollections = stats.getCollectionCount();
this.totalTime = stats.getCollectionTime();
-
- double totalTimeDouble = this.totalTime;
- this.averageTime = this.totalCollections == 0 ? 0 : totalTimeDouble / this.totalCollections;
- this.averageFrequency = this.totalCollections == 0 ? 0 : (long) ((serverUptime - totalTimeDouble) / this.totalCollections);
+ this.averageTime = stats.getAverageCollectionTime();
+ this.averageFrequency = stats.getAverageCollectionFrequency(serverUptime);
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
index a9e2229..d1481bd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/CommandResponseHandler.java
@@ -88,12 +88,20 @@ public class CommandResponseHandler {
}
public void broadcast(Component message) {
- allSenders(sender -> sender.sendMessage(message));
+ if (this.platform.shouldBroadcastResponse()) {
+ allSenders(sender -> sender.sendMessage(message));
+ } else {
+ reply(message);
+ }
}
public void broadcast(Iterable<Component> message) {
- Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message);
- allSenders(sender -> sender.sendMessage(joinedMsg));
+ if (this.platform.shouldBroadcastResponse()) {
+ Component joinedMsg = Component.join(JoinConfiguration.separator(Component.newline()), message);
+ allSenders(sender -> sender.sendMessage(joinedMsg));
+ } else {
+ reply(message);
+ }
}
public void replyPrefixed(Component message) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index 51fa905..16eadc8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -20,8 +20,6 @@
package me.lucko.spark.common.command.modules;
-import com.google.common.base.Strings;
-
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
@@ -30,25 +28,29 @@ import me.lucko.spark.common.command.CommandResponseHandler;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.net.Direction;
+import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.ping.PingSummary;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.common.util.StatisticFormatter;
import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.TextComponent;
-import net.kyori.adventure.text.format.TextColor;
-import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
-import java.nio.file.FileStore;
-import java.nio.file.Files;
-import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
import java.util.function.Consumer;
import static net.kyori.adventure.text.Component.empty;
@@ -60,13 +62,10 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
-import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class HealthModule implements CommandModule {
- private static final double MSPT_95_PERCENTILE = 0.95d;
-
@Override
public void registerCommands(Consumer<Command> consumer) {
consumer.accept(Command.builder()
@@ -77,10 +76,19 @@ public class HealthModule implements CommandModule {
);
consumer.accept(Command.builder()
+ .aliases("ping")
+ .argumentUsage("player", "username")
+ .executor(HealthModule::ping)
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--player"))
+ .build()
+ );
+
+ consumer.accept(Command.builder()
.aliases("healthreport", "health", "ht")
.argumentUsage("memory", null)
+ .argumentUsage("network", null)
.executor(HealthModule::healthReport)
- .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory"))
+ .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory", "--network"))
.build()
);
}
@@ -91,11 +99,11 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("TPS from last 5s, 10s, 1m, 5m, 15m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatTps(tickStatistics.tps5Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps10Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps1Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps5Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
);
resp.replyPrefixed(empty());
@@ -104,8 +112,8 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("Tick durations (min/med/95%ile/max ms) from last 10s, 1m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
- .append(formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
);
resp.replyPrefixed(empty());
@@ -115,22 +123,67 @@ public class HealthModule implements CommandModule {
resp.replyPrefixed(text("CPU usage from last 10s, 1m, 15m:"));
resp.replyPrefixed(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.append(text(" (system)", DARK_GRAY))
.build()
);
resp.replyPrefixed(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.append(text(" (process)", DARK_GRAY))
.build()
);
}
+ private static void ping(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
+ PingStatistics pingStatistics = platform.getPingStatistics();
+ if (pingStatistics == null) {
+ resp.replyPrefixed(text("Ping data is not available on this platform."));
+ return;
+ }
+
+ // lookup for specific player
+ Set<String> players = arguments.stringFlag("player");
+ if (!players.isEmpty()) {
+ for (String player : players) {
+ PingStatistics.PlayerPing playerPing = pingStatistics.query(player);
+ if (playerPing == null) {
+ resp.replyPrefixed(text("Ping data is not available for '" + player + "'."));
+ } else {
+ resp.replyPrefixed(text()
+ .content("Player ")
+ .append(text(playerPing.name(), WHITE))
+ .append(text(" has "))
+ .append(StatisticFormatter.formatPingRtt(playerPing.ping()))
+ .append(text(" ms ping."))
+ .build()
+ );
+ }
+ }
+ return;
+ }
+
+ PingSummary summary = pingStatistics.currentSummary();
+ RollingAverage average = pingStatistics.getPingAverage();
+
+ if (summary.total() == 0 && average.getSamples() == 0) {
+ resp.replyPrefixed(text("There is not enough data to show ping averages yet. Please try again later."));
+ return;
+ }
+
+ resp.replyPrefixed(text("Average Pings (min/med/95%ile/max ms) from now, last 15m:"));
+ resp.replyPrefixed(text()
+ .content(" ")
+ .append(StatisticFormatter.formatPingRtts(summary.min(), summary.median(), summary.percentile95th(), summary.max())).append(text("; "))
+ .append(StatisticFormatter.formatPingRtts(average.min(), average.median(), average.percentile95th(), average.max()))
+ .build()
+ );
+ }
+
private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
resp.replyPrefixed(text("Generating server health report..."));
List<Component> report = new LinkedList<>();
@@ -150,11 +203,9 @@ public class HealthModule implements CommandModule {
addDetailedMemoryStats(report, memoryMXBean);
}
- try {
- addDiskStats(report);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ addNetworkStats(report, arguments.boolFlag("network"));
+
+ addDiskStats(report);
resp.reply(report);
}
@@ -168,11 +219,11 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatTps(tickStatistics.tps5Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps10Sec())).append(text(", "))
- .append(formatTps(tickStatistics.tps1Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps5Min())).append(text(", "))
- .append(formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
);
report.add(empty());
@@ -186,8 +237,8 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
- .append(formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
);
report.add(empty());
@@ -203,17 +254,17 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.append(text(" (system)", DARK_GRAY))
.build()
);
report.add(text()
.content(" ")
- .append(formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
- .append(formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.append(text(" (process)", DARK_GRAY))
.build()
);
@@ -241,7 +292,7 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateMemoryUsageDiagram(heapUsage, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateMemoryUsageDiagram(heapUsage, 60)).build());
report.add(empty());
}
@@ -292,7 +343,7 @@ public class HealthModule implements CommandModule {
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateMemoryPoolDiagram(usage, collectionUsage, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateMemoryPoolDiagram(usage, collectionUsage, 60)).build());
if (collectionUsage != null) {
report.add(text()
@@ -309,10 +360,56 @@ public class HealthModule implements CommandModule {
}
}
- private static void addDiskStats(List<Component> report) throws IOException {
- FileStore fileStore = Files.getFileStore(Paths.get("."));
- long totalSpace = fileStore.getTotalSpace();
- long usedSpace = totalSpace - fileStore.getUsableSpace();
+ private static void addNetworkStats(List<Component> report, boolean detailed) {
+ List<Component> averagesReport = new LinkedList<>();
+
+ for (Map.Entry<String, NetworkInterfaceAverages> ent : NetworkMonitor.systemAverages().entrySet()) {
+ String interfaceName = ent.getKey();
+ NetworkInterfaceAverages averages = ent.getValue();
+
+ for (Direction direction : Direction.values()) {
+ long bytesPerSec = (long) averages.bytesPerSecond(direction).mean();
+ long packetsPerSec = (long) averages.packetsPerSecond(direction).mean();
+
+ if (detailed || bytesPerSec > 0 || packetsPerSec > 0) {
+ averagesReport.add(text()
+ .color(GRAY)
+ .content(" ")
+ .append(FormatUtil.formatBytes(bytesPerSec, GREEN, "/s"))
+ .append(text(" / "))
+ .append(text(String.format(Locale.ENGLISH, "%,d", packetsPerSec), WHITE))
+ .append(text(" pps "))
+ .append(text().color(DARK_GRAY)
+ .append(text('('))
+ .append(text(interfaceName + " " + direction.abbrev(), WHITE))
+ .append(text(')'))
+ )
+ .build()
+ );
+ }
+ }
+ }
+
+ if (!averagesReport.isEmpty()) {
+ report.add(text()
+ .append(text(">", DARK_GRAY, BOLD))
+ .append(space())
+ .append(text("Network usage: (system, last 15m)", GOLD))
+ .build()
+ );
+ report.addAll(averagesReport);
+ report.add(empty());
+ }
+ }
+
+ private static void addDiskStats(List<Component> report) {
+ long total = DiskUsage.getTotal();
+ long used = DiskUsage.getUsed();
+
+ if (total == 0 || used == 0) {
+ return;
+ }
+
report.add(text()
.append(text(">", DARK_GRAY, BOLD))
.append(space())
@@ -321,138 +418,19 @@ public class HealthModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(text(FormatUtil.formatBytes(usedSpace), WHITE))
+ .append(text(FormatUtil.formatBytes(used), WHITE))
.append(space())
.append(text("/", GRAY))
.append(space())
- .append(text(FormatUtil.formatBytes(totalSpace), WHITE))
+ .append(text(FormatUtil.formatBytes(total), WHITE))
.append(text(" "))
.append(text("(", GRAY))
- .append(text(FormatUtil.percent(usedSpace, totalSpace), GREEN))
+ .append(text(FormatUtil.percent(used, total), GREEN))
.append(text(")", GRAY))
.build()
);
- report.add(text().content(" ").append(generateDiskUsageDiagram(usedSpace, totalSpace, 40)).build());
+ report.add(text().content(" ").append(StatisticFormatter.generateDiskUsageDiagram(used, total, 60)).build());
report.add(empty());
}
- public static TextComponent formatTps(double tps) {
- TextColor color;
- if (tps > 18.0) {
- color = GREEN;
- } else if (tps > 16.0) {
- color = YELLOW;
- } else {
- color = RED;
- }
-
- return text((tps > 20.0 ? "*" : "") + Math.min(Math.round(tps * 100.0) / 100.0, 20.0), color);
- }
-
- public static TextComponent formatTickDurations(RollingAverage average) {
- return text()
- .append(formatTickDuration(average.min()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.median()))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.percentile(MSPT_95_PERCENTILE)))
- .append(text('/', GRAY))
- .append(formatTickDuration(average.max()))
- .build();
- }
-
- public static TextComponent formatTickDuration(double duration) {
- TextColor color;
- if (duration >= 50d) {
- color = RED;
- } else if (duration >= 40d) {
- color = YELLOW;
- } else {
- color = GREEN;
- }
-
- return text(String.format("%.1f", duration), color);
- }
-
- public static TextComponent formatCpuUsage(double usage) {
- TextColor color;
- if (usage > 0.9) {
- color = RED;
- } else if (usage > 0.65) {
- color = YELLOW;
- } else {
- color = GREEN;
- }
-
- return text(FormatUtil.percent(usage, 1d), color);
- }
-
- private static TextComponent generateMemoryUsageDiagram(MemoryUsage usage, int length) {
- double used = usage.getUsed();
- double committed = usage.getCommitted();
- double max = usage.getMax();
-
- int usedChars = (int) ((used * length) / max);
- int committedChars = (int) ((committed * length) / max);
-
- TextComponent.Builder line = text().content(Strings.repeat("/", usedChars)).color(GRAY);
- if (committedChars > usedChars) {
- line.append(text(Strings.repeat(" ", (committedChars - usedChars) - 1)));
- line.append(text("|", YELLOW));
- }
- if (length > committedChars) {
- line.append(text(Strings.repeat(" ", (length - committedChars))));
- }
-
- return text()
- .append(text("[", DARK_GRAY))
- .append(line.build())
- .append(text("]", DARK_GRAY))
- .build();
- }
-
- private static TextComponent generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
- double used = usage.getUsed();
- double collectionUsed = used;
- if (collectionUsage != null) {
- collectionUsed = collectionUsage.getUsed();
- }
- double committed = usage.getCommitted();
- double max = usage.getMax();
-
- int usedChars = (int) ((used * length) / max);
- int collectionUsedChars = (int) ((collectionUsed * length) / max);
- int committedChars = (int) ((committed * length) / max);
-
- TextComponent.Builder line = text().content(Strings.repeat("/", collectionUsedChars)).color(GRAY);
-
- if (usedChars > collectionUsedChars) {
- line.append(text("|", RED));
- line.append(text(Strings.repeat("/", (usedChars - collectionUsedChars) - 1), GRAY));
- }
- if (committedChars > usedChars) {
- line.append(text(Strings.repeat(" ", (committedChars - usedChars) - 1)));
- line.append(text("|", YELLOW));
- }
- if (length > committedChars) {
- line.append(text(Strings.repeat(" ", (length - committedChars))));
- }
-
- return text()
- .append(text("[", DARK_GRAY))
- .append(line.build())
- .append(text("]", DARK_GRAY))
- .build();
- }
-
- private static TextComponent generateDiskUsageDiagram(double used, double max, int length) {
- int usedChars = (int) ((used * length) / max);
- String line = Strings.repeat("/", usedChars) + Strings.repeat(" ", length - usedChars);
- return text()
- .append(text("[", DARK_GRAY))
- .append(text(line, GRAY))
- .append(text("]", DARK_GRAY))
- .build();
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 70f6c3c..1030f35 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -30,20 +30,15 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.heapdump.HeapDump;
import me.lucko.spark.common.heapdump.HeapDumpSummary;
+import me.lucko.spark.common.util.Compression;
import me.lucko.spark.common.util.FormatUtil;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkHeapProtos;
import net.kyori.adventure.text.event.ClickEvent;
-import org.tukaani.xz.LZMA2Options;
-import org.tukaani.xz.LZMAOutputStream;
-import org.tukaani.xz.XZOutputStream;
-
import okhttp3.MediaType;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
@@ -51,7 +46,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.LongConsumer;
-import java.util.zip.GZIPOutputStream;
import static net.kyori.adventure.text.Component.text;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
@@ -98,7 +92,7 @@ public class HeapAnalysisModule implements CommandModule {
return;
}
- SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender);
+ SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender);
boolean saveToFile = false;
if (arguments.boolFlag("save-to-file")) {
@@ -117,7 +111,7 @@ public class HeapAnalysisModule implements CommandModule {
);
platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
- } catch (IOException e) {
+ } catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED));
e.printStackTrace();
saveToFile = true;
@@ -175,11 +169,11 @@ public class HeapAnalysisModule implements CommandModule {
platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
- CompressionMethod compressionMethod = null;
+ Compression compressionMethod = null;
Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
if (compressArgs.hasNext()) {
try {
- compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
+ compressionMethod = Compression.valueOf(compressArgs.next().toUpperCase());
} catch (IllegalArgumentException e) {
// ignore
}
@@ -194,7 +188,7 @@ public class HeapAnalysisModule implements CommandModule {
}
}
- private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException {
+ private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, Compression method) throws IOException {
resp.broadcastPrefixed(text("Compressing heap dump, please wait..."));
long size = Files.size(file);
@@ -244,71 +238,4 @@ public class HeapAnalysisModule implements CommandModule {
);
}
- public enum CompressionMethod {
- GZIP {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- XZ {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- LZMA {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- };
-
- public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
-
- private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
- byte[] buf = new byte[1024 * 64];
- long total = 0;
- long iterations = 0;
- while (true) {
- int r = from.read(buf);
- if (r == -1) {
- break;
- }
- to.write(buf, 0, r);
- total += r;
-
- // report progress every 5MB
- if (iterations++ % ((1024 / 64) * 5) == 0) {
- progress.accept(total);
- }
- }
- return total;
- }
- }
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 2dd07c9..970d062 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -40,7 +40,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.MethodDisambiguator;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
import net.kyori.adventure.text.event.ClickEvent;
@@ -305,7 +305,7 @@ public class SamplerModule implements CommandModule {
}
private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
- SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
+ SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
boolean saveToFile = false;
if (saveToFileFlag) {
@@ -324,7 +324,7 @@ public class SamplerModule implements CommandModule {
);
platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
- } catch (IOException e) {
+ } catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED));
e.printStackTrace();
saveToFile = true;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
index f1a6d10..9975df5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/CompletionSupplier.java
@@ -1,26 +1,21 @@
/*
- * This file is part of LuckPerms, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.common.command.tabcomplete;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
index d2b2622..9707f55 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/tabcomplete/TabCompleter.java
@@ -1,26 +1,21 @@
/*
- * This file is part of LuckPerms, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.common.command.tabcomplete;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index 34fd6c4..c0980e7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -20,11 +20,11 @@
package me.lucko.spark.common.heapdump;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.proto.SparkProtos;
-import me.lucko.spark.proto.SparkProtos.HeapData;
-import me.lucko.spark.proto.SparkProtos.HeapEntry;
+import me.lucko.spark.proto.SparkHeapProtos.HeapData;
+import me.lucko.spark.proto.SparkHeapProtos.HeapEntry;
+import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata;
import org.objectweb.asm.Type;
@@ -125,13 +125,24 @@ public final class HeapDumpSummary {
this.entries = entries;
}
- public HeapData toProto(PlatformInfo platformInfo, CommandSender creator) {
+ public HeapData toProto(SparkPlatform platform, CommandSender creator) {
+ HeapMetadata.Builder metadata = HeapMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setCreator(creator.toData().toProto());
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
HeapData.Builder proto = HeapData.newBuilder();
- proto.setMetadata(SparkProtos.HeapMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
- .build()
- );
+ proto.setMetadata(metadata);
for (Entry entry : this.entries) {
proto.addEntries(entry.toProto());
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
new file mode 100644
index 0000000..635ae20
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MonitoringExecutor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+
+public enum MonitoringExecutor {
+ ;
+
+ /** The executor used to monitor & calculate rolling averages. */
+ public static final ScheduledExecutorService INSTANCE = Executors.newSingleThreadScheduledExecutor(r -> {
+ Thread thread = Executors.defaultThreadFactory().newThread(r);
+ thread.setName("spark-monitor");
+ thread.setDaemon(true);
+ return thread;
+ });
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
new file mode 100644
index 0000000..9bbe0f8
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -0,0 +1,51 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.cpu;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import java.util.regex.Pattern;
+
+/**
+ * Small utility to query the CPU model on Linux systems.
+ */
+public enum CpuInfo {
+ ;
+
+ private static final Pattern SPACE_COLON_SPACE_PATTERN = Pattern.compile("\\s+:\\s");
+
+ /**
+ * Queries the CPU model.
+ *
+ * @return the cpu model
+ */
+ public static String queryCpuModel() {
+ for (String line : LinuxProc.CPUINFO.read()) {
+ String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line);
+
+ if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) {
+ return splitLine[1];
+ }
+ }
+ return "";
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
index 43e1f90..b4ab831 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
@@ -20,12 +20,11 @@
package me.lucko.spark.common.monitor.cpu;
+import me.lucko.spark.common.monitor.MonitoringExecutor;
import me.lucko.spark.common.util.RollingAverage;
import java.lang.management.ManagementFactory;
import java.math.BigDecimal;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.management.JMX;
@@ -42,13 +41,6 @@ public enum CpuMonitor {
private static final String OPERATING_SYSTEM_BEAN = "java.lang:type=OperatingSystem";
/** The OperatingSystemMXBean instance */
private static final OperatingSystemMXBean BEAN;
- /** The executor used to monitor & calculate rolling averages. */
- private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-cpu-monitor");
- thread.setDaemon(true);
- return thread;
- });
// Rolling averages for system/process data
private static final RollingAverage SYSTEM_AVERAGE_10_SEC = new RollingAverage(10);
@@ -68,7 +60,7 @@ public enum CpuMonitor {
}
// schedule rolling average calculations.
- EXECUTOR.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, 1, TimeUnit.SECONDS);
+ MonitoringExecutor.INSTANCE.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, 1, TimeUnit.SECONDS);
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
new file mode 100644
index 0000000..7a4ada4
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/disk/DiskUsage.java
@@ -0,0 +1,71 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.disk;
+
+import java.io.IOException;
+import java.nio.file.FileStore;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/**
+ * Exposes the system disk usage.
+ */
+public enum DiskUsage {
+ ;
+
+ private static final FileStore FILE_STORE;
+
+ static {
+ FileStore fileStore = null;
+ try {
+ fileStore = Files.getFileStore(Paths.get("."));
+ } catch (IOException e) {
+ // ignore
+ }
+ FILE_STORE = fileStore;
+ }
+
+ public static long getUsed() {
+ if (FILE_STORE == null) {
+ return 0;
+ }
+
+ try {
+ long total = FILE_STORE.getTotalSpace();
+ return total - FILE_STORE.getUsableSpace();
+ } catch (IOException e) {
+ return 0;
+ }
+ }
+
+ public static long getTotal() {
+ if (FILE_STORE == null) {
+ return 0;
+ }
+
+ try {
+ return FILE_STORE.getTotalSpace();
+ } catch (IOException e) {
+ return 0;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
index c831ea1..cfd12a1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/GarbageCollectorStatistics.java
@@ -74,6 +74,8 @@ public class GarbageCollectorStatistics {
this(bean.getCollectionCount(), bean.getCollectionTime());
}
+ // all times in milliseconds
+
public long getCollectionCount() {
return this.collectionCount;
}
@@ -82,6 +84,14 @@ public class GarbageCollectorStatistics {
return this.collectionTime;
}
+ public double getAverageCollectionTime() {
+ return this.collectionCount == 0 ? 0 : (double) this.collectionTime / this.collectionCount;
+ }
+
+ public long getAverageCollectionFrequency(long serverUptime) {
+ return this.collectionCount == 0 ? 0 : (long) ((serverUptime - (double) this.collectionTime) / this.collectionCount);
+ }
+
public GarbageCollectorStatistics subtract(GarbageCollectorStatistics other) {
if (other == ZERO || (other.collectionCount == 0 && other.collectionTime == 0)) {
return this;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
new file mode 100644
index 0000000..226f75b
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -0,0 +1,153 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.memory;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import java.lang.management.ManagementFactory;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+
+/**
+ * Utility to query information about system memory usage.
+ */
+public enum MemoryInfo {
+ ;
+
+ /** The object name of the com.sun.management.OperatingSystemMXBean */
+ private static final String OPERATING_SYSTEM_BEAN = "java.lang:type=OperatingSystem";
+ /** The OperatingSystemMXBean instance */
+ private static final OperatingSystemMXBean BEAN;
+
+ /** The format used by entries in /proc/meminfo */
+ private static final Pattern PROC_MEMINFO_VALUE = Pattern.compile("^(\\w+):\\s*(\\d+) kB$");
+
+ static {
+ try {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(OPERATING_SYSTEM_BEAN);
+ BEAN = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, OperatingSystemMXBean.class);
+ } catch (Exception e) {
+ throw new UnsupportedOperationException("OperatingSystemMXBean is not supported by the system", e);
+ }
+ }
+
+ /* Swap */
+
+ public static long getUsedSwap() {
+ return BEAN.getTotalSwapSpaceSize() - BEAN.getFreeSwapSpaceSize();
+ }
+
+ public static long getTotalSwap() {
+ return BEAN.getTotalSwapSpaceSize();
+ }
+
+ /* Physical Memory */
+
+ public static long getUsedPhysicalMemory() {
+ return getTotalPhysicalMemory() - getAvailablePhysicalMemory();
+ }
+
+ public static long getTotalPhysicalMemory() {
+ // try to read from /proc/meminfo on linux systems
+ for (String line : LinuxProc.MEMINFO.read()) {
+ Matcher matcher = PROC_MEMINFO_VALUE.matcher(line);
+ if (matcher.matches()) {
+ String label = matcher.group(1);
+ long value = Long.parseLong(matcher.group(2)) * 1024; // kB -> B
+
+ if (label.equals("MemTotal")) {
+ return value;
+ }
+ }
+ }
+
+ // fallback to JVM measure
+ return BEAN.getTotalPhysicalMemorySize();
+ }
+
+ public static long getAvailablePhysicalMemory() {
+ boolean present = false;
+ long free = 0, buffers = 0, cached = 0, sReclaimable = 0;
+
+ for (String line : LinuxProc.MEMINFO.read()) {
+ Matcher matcher = PROC_MEMINFO_VALUE.matcher(line);
+ if (matcher.matches()) {
+ present = true;
+
+ String label = matcher.group(1);
+ long value = Long.parseLong(matcher.group(2)) * 1024; // kB -> B
+
+ // if MemAvailable is set, just return that
+ if (label.equals("MemAvailable")) {
+ return value;
+ }
+
+ // otherwise, record MemFree, Buffers, Cached and SReclaimable
+ switch (label) {
+ case "MemFree":
+ free = value;
+ break;
+ case "Buffers":
+ buffers = value;
+ break;
+ case "Cached":
+ cached = value;
+ break;
+ case "SReclaimable":
+ sReclaimable = value;
+ break;
+ }
+ }
+ }
+
+ // estimate how much is "available" - not exact but this is probably good enough.
+ // most Linux systems (assuming they have been updated in the last ~8 years) will
+ // have MemAvailable set, and we return that instead if present
+ //
+ // useful ref: https://www.linuxatemyram.com/
+ if (present) {
+ return free + buffers + cached + sReclaimable;
+ }
+
+ // fallback to what the JVM understands as "free"
+ // on non-linux systems, this is probably good enough to estimate what's available
+ return BEAN.getFreePhysicalMemorySize();
+ }
+
+ /* Virtual Memory */
+
+ public static long getTotalVirtualMemory() {
+ return BEAN.getCommittedVirtualMemorySize();
+ }
+
+ public interface OperatingSystemMXBean {
+ long getCommittedVirtualMemorySize();
+ long getTotalSwapSpaceSize();
+ long getFreeSwapSpaceSize();
+ long getFreePhysicalMemorySize();
+ long getTotalPhysicalMemorySize();
+ }
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkGameHooks.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
index 5eec53d..d4d11ff 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkGameHooks.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/Direction.java
@@ -18,23 +18,20 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.fabric;
+package me.lucko.spark.common.monitor.net;
-import java.util.function.Predicate;
+public enum Direction {
-public enum FabricSparkGameHooks {
- INSTANCE;
+ RECEIVE("rx"),
+ TRANSMIT("tx");
- // Use events from Fabric API later
- // Return true to abort sending to server
- private Predicate<String> chatSendCallback = s -> false;
+ private final String abbreviation;
- public void setChatSendCallback(Predicate<String> callback) {
- this.chatSendCallback = callback;
+ Direction(String abbreviation) {
+ this.abbreviation = abbreviation;
}
- public boolean tryProcessChat(String message) {
- return this.chatSendCallback.test(message);
+ public String abbrev() {
+ return this.abbreviation;
}
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
new file mode 100644
index 0000000..0ce0639
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceAverages.java
@@ -0,0 +1,88 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import me.lucko.spark.common.util.RollingAverage;
+
+import java.math.BigDecimal;
+
+public final class NetworkInterfaceAverages {
+ private final RollingAverage rxBytesPerSecond;
+ private final RollingAverage txBytesPerSecond;
+ private final RollingAverage rxPacketsPerSecond;
+ private final RollingAverage txPacketsPerSecond;
+
+ NetworkInterfaceAverages(int windowSize) {
+ this.rxBytesPerSecond = new RollingAverage(windowSize);
+ this.txBytesPerSecond = new RollingAverage(windowSize);
+ this.rxPacketsPerSecond = new RollingAverage(windowSize);
+ this.txPacketsPerSecond = new RollingAverage(windowSize);
+ }
+
+ void accept(NetworkInterfaceInfo info, RateCalculator rateCalculator) {
+ this.rxBytesPerSecond.add(rateCalculator.calculate(info.getReceivedBytes()));
+ this.txBytesPerSecond.add(rateCalculator.calculate(info.getTransmittedBytes()));
+ this.rxPacketsPerSecond.add(rateCalculator.calculate(info.getReceivedPackets()));
+ this.txPacketsPerSecond.add(rateCalculator.calculate(info.getTransmittedPackets()));
+ }
+
+ interface RateCalculator {
+ BigDecimal calculate(long value);
+ }
+
+ public RollingAverage bytesPerSecond(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return rxBytesPerSecond();
+ case TRANSMIT:
+ return txBytesPerSecond();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public RollingAverage packetsPerSecond(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return rxPacketsPerSecond();
+ case TRANSMIT:
+ return txPacketsPerSecond();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public RollingAverage rxBytesPerSecond() {
+ return this.rxBytesPerSecond;
+ }
+
+ public RollingAverage rxPacketsPerSecond() {
+ return this.rxPacketsPerSecond;
+ }
+
+ public RollingAverage txBytesPerSecond() {
+ return this.txBytesPerSecond;
+ }
+
+ public RollingAverage txPacketsPerSecond() {
+ return this.txPacketsPerSecond;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
new file mode 100644
index 0000000..bd9e187
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
@@ -0,0 +1,274 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.util.LinuxProc;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.IntStream;
+
+/**
+ * Exposes information/statistics about a network interface.
+ */
+public final class NetworkInterfaceInfo {
+ public static final NetworkInterfaceInfo ZERO = new NetworkInterfaceInfo("", 0, 0, 0, 0, 0, 0);
+
+ private final String name;
+ private final long rxBytes;
+ private final long rxPackets;
+ private final long rxErrors;
+ private final long txBytes;
+ private final long txPackets;
+ private final long txErrors;
+
+ public NetworkInterfaceInfo(String name, long rxBytes, long rxPackets, long rxErrors, long txBytes, long txPackets, long txErrors) {
+ this.name = name;
+ this.rxBytes = rxBytes;
+ this.rxPackets = rxPackets;
+ this.rxErrors = rxErrors;
+ this.txBytes = txBytes;
+ this.txPackets = txPackets;
+ this.txErrors = txErrors;
+ }
+
+ /**
+ * Gets the name of the network interface.
+ *
+ * @return the interface name
+ */
+ public String getName() {
+ return this.name;
+ }
+
+ /**
+ * Gets the total number of bytes of data received by the interface.
+ *
+ * @return the total received bytes
+ */
+ public long getReceivedBytes() {
+ return this.rxBytes;
+ }
+
+ /**
+ * Gets the total number of packets of data received by the interface.
+ *
+ * @return the total received packets
+ */
+ public long getReceivedPackets() {
+ return this.rxPackets;
+ }
+
+ /**
+ * Gets the total number of receive errors detected by the device driver.
+ *
+ * @return the total receive errors
+ */
+ public long getReceiveErrors() {
+ return this.rxErrors;
+ }
+
+ /**
+ * Gets the total number of bytes of data transmitted by the interface.
+ *
+ * @return the total transmitted bytes
+ */
+ public long getTransmittedBytes() {
+ return this.txBytes;
+ }
+
+ /**
+ * Gets the total number of packets of data transmitted by the interface.
+ *
+ * @return the total transmitted packets
+ */
+ public long getTransmittedPackets() {
+ return this.txPackets;
+ }
+
+ /**
+ * Gets the total number of transmit errors detected by the device driver.
+ *
+ * @return the total transmit errors
+ */
+ public long getTransmitErrors() {
+ return this.txErrors;
+ }
+
+ public long getBytes(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return getReceivedBytes();
+ case TRANSMIT:
+ return getTransmittedBytes();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public long getPackets(Direction direction) {
+ switch (direction) {
+ case RECEIVE:
+ return getReceivedPackets();
+ case TRANSMIT:
+ return getTransmittedPackets();
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public boolean isZero() {
+ return this.rxBytes == 0 && this.rxPackets == 0 && this.rxErrors == 0 &&
+ this.txBytes == 0 && this.txPackets == 0 && this.txErrors == 0;
+ }
+
+ public NetworkInterfaceInfo subtract(NetworkInterfaceInfo other) {
+ if (other == ZERO || other.isZero()) {
+ return this;
+ }
+
+ return new NetworkInterfaceInfo(
+ this.name,
+ this.rxBytes - other.rxBytes,
+ this.rxPackets - other.rxPackets,
+ this.rxErrors - other.rxErrors,
+ this.txBytes - other.txBytes,
+ this.txPackets - other.txPackets,
+ this.txErrors - other.txErrors
+ );
+ }
+
+ /**
+ * Calculate the difference between two readings in order to calculate the rate.
+ *
+ * @param current the polled values
+ * @param previous the previously polled values
+ * @return the difference
+ */
+ public static @NonNull Map<String, NetworkInterfaceInfo> difference(Map<String, NetworkInterfaceInfo> current, Map<String, NetworkInterfaceInfo> previous) {
+ if (previous == null || previous.isEmpty()) {
+ return current;
+ }
+
+ ImmutableMap.Builder<String, NetworkInterfaceInfo> builder = ImmutableMap.builder();
+ for (NetworkInterfaceInfo netInf : current.values()) {
+ String name = netInf.getName();
+ builder.put(name, netInf.subtract(previous.getOrDefault(name, ZERO)));
+ }
+ return builder.build();
+ }
+
+ /**
+ * Queries the network interface statistics for the system.
+ *
+ * <p>Returns an empty {@link Map} if no statistics could be gathered.</p>
+ *
+ * @return the system net stats
+ */
+ public static @NonNull Map<String, NetworkInterfaceInfo> pollSystem() {
+ try {
+ List<String> output = LinuxProc.NET_DEV.read();
+ return read(output);
+ } catch (Exception e) {
+ return Collections.emptyMap();
+ }
+ }
+
+ private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$");
+
+ private static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) {
+ // Inter-| Receive | Transmit
+ // face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
+ // lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0
+ // eth0: 1215645 2751 0 0 0 0 0 0 1782404 4324 0 0 0 427 0 0
+ // ppp0: 1622270 5552 1 0 0 0 0 0 354130 5669 0 0 0 0 0 0
+ // tap0: 7714 81 0 0 0 0 0 0 7714 81 0 0 0 0 0 0
+
+ if (output.size() < 3) {
+ // no data
+ return Collections.emptyMap();
+ }
+
+ String header = output.get(1);
+ String[] categories = header.split("\\|");
+ if (categories.length != 3) {
+ // unknown format
+ return Collections.emptyMap();
+ }
+
+ List<String> rxFields = Arrays.asList(categories[1].trim().split("\\s+"));
+ List<String> txFields = Arrays.asList(categories[2].trim().split("\\s+"));
+
+ int rxFieldsLength = rxFields.size();
+ int txFieldsLength = txFields.size();
+
+ int fieldRxBytes = rxFields.indexOf("bytes");
+ int fieldRxPackets = rxFields.indexOf("packets");
+ int fieldRxErrors = rxFields.indexOf("errs");
+
+ int fieldTxBytes = rxFieldsLength + txFields.indexOf("bytes");
+ int fieldTxPackets = rxFieldsLength + txFields.indexOf("packets");
+ int fieldTxErrors = rxFieldsLength + txFields.indexOf("errs");
+
+ int expectedFields = rxFieldsLength + txFieldsLength;
+
+ if (IntStream.of(fieldRxBytes, fieldRxPackets, fieldRxErrors, fieldTxBytes, fieldTxPackets, fieldTxErrors).anyMatch(i -> i == -1)) {
+ // missing required fields
+ return Collections.emptyMap();
+ }
+
+ ImmutableMap.Builder<String, NetworkInterfaceInfo> builder = ImmutableMap.builder();
+
+ for (String line : output.subList(2, output.size())) {
+ Matcher matcher = PROC_NET_DEV_PATTERN.matcher(line);
+ if (matcher.matches()) {
+ String interfaceName = matcher.group(1);
+ String[] stringValues = matcher.group(2).trim().split("\\s+");
+
+ if (stringValues.length != expectedFields) {
+ continue;
+ }
+
+ long[] values = Arrays.stream(stringValues).mapToLong(Long::parseLong).toArray();
+ builder.put(interfaceName, new NetworkInterfaceInfo(
+ interfaceName,
+ values[fieldRxBytes],
+ values[fieldRxPackets],
+ values[fieldRxErrors],
+ values[fieldTxBytes],
+ values[fieldTxPackets],
+ values[fieldTxErrors]
+ ));
+ }
+ }
+
+ return builder.build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
new file mode 100644
index 0000000..79ab8d9
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkMonitor.java
@@ -0,0 +1,141 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.net;
+
+import me.lucko.spark.common.monitor.MonitoringExecutor;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
+import java.util.regex.Pattern;
+
+/**
+ * Exposes and monitors the system/process network usage.
+ */
+public enum NetworkMonitor {
+ ;
+
+ // Latest readings
+ private static final AtomicReference<Map<String, NetworkInterfaceInfo>> SYSTEM = new AtomicReference<>();
+
+ // a pattern to match the interface names to exclude from monitoring
+ // ignore: virtual eth adapters + container bridge networks
+ private static final Pattern INTERFACES_TO_IGNORE = Pattern.compile("^(veth\\w+)|(br-\\w+)$");
+
+ // Rolling averages for system/process data over 15 mins
+ private static final Map<String, NetworkInterfaceAverages> SYSTEM_AVERAGES = new ConcurrentHashMap<>();
+
+ // poll every minute, keep rolling averages for 15 mins
+ private static final int POLL_INTERVAL = 60;
+ private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15);
+ private static final int WINDOW_SIZE = WINDOW_SIZE_SECONDS / POLL_INTERVAL; // 15
+
+ static {
+ // schedule rolling average calculations.
+ MonitoringExecutor.INSTANCE.scheduleAtFixedRate(new RollingAverageCollectionTask(), 1, POLL_INTERVAL, TimeUnit.SECONDS);
+ }
+
+ /**
+ * Ensures that the static initializer has been called.
+ */
+ @SuppressWarnings("EmptyMethod")
+ public static void ensureMonitoring() {
+ // intentionally empty
+ }
+
+ public static Map<String, NetworkInterfaceInfo> systemTotals() {
+ Map<String, NetworkInterfaceInfo> values = SYSTEM.get();
+ return values == null ? Collections.emptyMap() : values;
+ }
+
+ public static Map<String, NetworkInterfaceAverages> systemAverages() {
+ return Collections.unmodifiableMap(SYSTEM_AVERAGES);
+ }
+
+ /**
+ * Task to poll network activity and add to the rolling averages in the enclosing class.
+ */
+ private static final class RollingAverageCollectionTask implements Runnable {
+ private static final BigDecimal POLL_INTERVAL_DECIMAL = BigDecimal.valueOf(POLL_INTERVAL);
+
+ @Override
+ public void run() {
+ Map<String, NetworkInterfaceInfo> values = pollAndDiff(NetworkInterfaceInfo::pollSystem, SYSTEM);
+ if (values != null) {
+ submit(SYSTEM_AVERAGES, values);
+ }
+ }
+
+ /**
+ * Submits the incoming values into the rolling averages map.
+ *
+ * @param values the values
+ */
+ private static void submit(Map<String, NetworkInterfaceAverages> rollingAveragesMap, Map<String, NetworkInterfaceInfo> values) {
+ // ensure all incoming keys are present in the rolling averages map
+ for (String key : values.keySet()) {
+ if (!INTERFACES_TO_IGNORE.matcher(key).matches()) {
+ rollingAveragesMap.computeIfAbsent(key, k -> new NetworkInterfaceAverages(WINDOW_SIZE));
+ }
+ }
+
+ // submit a value (0 if unknown) to each rolling average instance in the map
+ for (Map.Entry<String, NetworkInterfaceAverages> entry : rollingAveragesMap.entrySet()) {
+ String interfaceName = entry.getKey();
+ NetworkInterfaceAverages rollingAvgs = entry.getValue();
+
+ NetworkInterfaceInfo info = values.getOrDefault(interfaceName, NetworkInterfaceInfo.ZERO);
+ rollingAvgs.accept(info, RollingAverageCollectionTask::calculateRate);
+ }
+ }
+
+ private static BigDecimal calculateRate(long value) {
+ return BigDecimal.valueOf(value).divide(POLL_INTERVAL_DECIMAL, RoundingMode.HALF_UP);
+ }
+
+ private static Map<String, NetworkInterfaceInfo> pollAndDiff(Supplier<Map<String, NetworkInterfaceInfo>> poller, AtomicReference<Map<String, NetworkInterfaceInfo>> valueReference) {
+ // poll the latest value from the supplier
+ Map<String, NetworkInterfaceInfo> latest = poller.get();
+
+ // update the value ref.
+ // if the previous value was null, and the new value is empty, keep it null
+ Map<String, NetworkInterfaceInfo> previous = valueReference.getAndUpdate(prev -> {
+ if (prev == null && latest.isEmpty()) {
+ return null;
+ } else {
+ return latest;
+ }
+ });
+
+ if (previous == null) {
+ return null;
+ }
+
+ return NetworkInterfaceInfo.difference(latest, previous);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
new file mode 100644
index 0000000..49fcbe1
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java
@@ -0,0 +1,149 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import me.lucko.spark.common.monitor.MonitoringExecutor;
+import me.lucko.spark.common.util.RollingAverage;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.math.BigDecimal;
+import java.util.Map;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Provides statistics for player ping RTT to the server.
+ */
+public final class PingStatistics implements Runnable, AutoCloseable {
+ private static final int QUERY_RATE_SECONDS = 10;
+ private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15); // 900
+ private static final int WINDOW_SIZE = WINDOW_SIZE_SECONDS / QUERY_RATE_SECONDS; // 90
+
+ /** The platform function that provides player ping times */
+ private final PlayerPingProvider provider;
+ /** Rolling average of the median ping across all players */
+ private final RollingAverage rollingAverage = new RollingAverage(WINDOW_SIZE);
+
+ /** The scheduler task that polls pings and calculates the rolling average */
+ private ScheduledFuture<?> future;
+
+ public PingStatistics(PlayerPingProvider provider) {
+ this.provider = provider;
+ }
+
+ /**
+ * Starts the statistics monitor
+ */
+ public void start() {
+ if (this.future != null) {
+ throw new IllegalStateException();
+ }
+ this.future = MonitoringExecutor.INSTANCE.scheduleAtFixedRate(this, QUERY_RATE_SECONDS, QUERY_RATE_SECONDS, TimeUnit.SECONDS);
+ }
+
+ @Override
+ public void close() {
+ if (this.future != null) {
+ this.future.cancel(false);
+ this.future = null;
+ }
+ }
+
+ @Override
+ public void run() {
+ PingSummary summary = currentSummary();
+ if (summary.total() == 0) {
+ return;
+ }
+
+ this.rollingAverage.add(BigDecimal.valueOf(summary.median()));
+ }
+
+ /**
+ * Gets the ping rolling average.
+ *
+ * @return the rolling average
+ */
+ public RollingAverage getPingAverage() {
+ return this.rollingAverage;
+ }
+
+ /**
+ * Queries a summary of current player pings.
+ *
+ * @return a summary of current pings
+ */
+ public PingSummary currentSummary() {
+ Map<String, Integer> results = this.provider.poll();
+ int[] values = results.values().stream().filter(ping -> ping > 0).mapToInt(i -> i).toArray();
+ return values.length == 0
+ ? new PingSummary(new int[]{0})
+ : new PingSummary(values);
+ }
+
+ /**
+ * Queries the ping of a given player.
+ *
+ * @param playerName the name of the player
+ * @return the ping, if available
+ */
+ public @Nullable PlayerPing query(String playerName) {
+ Map<String, Integer> results = this.provider.poll();
+
+ // try exact match
+ Integer result = results.get(playerName);
+ if (result != null) {
+ return new PlayerPing(playerName, result);
+ }
+
+ // try case-insensitive match
+ for (Map.Entry<String, Integer> entry : results.entrySet()) {
+ if (entry.getKey().equalsIgnoreCase(playerName)) {
+ return new PlayerPing(
+ entry.getKey(),
+ entry.getValue()
+ );
+ }
+ }
+
+ return null;
+ }
+
+ public static final class PlayerPing {
+ private final String name;
+ private final int ping;
+
+ PlayerPing(String name, int ping) {
+ this.name = name;
+ this.ping = ping;
+ }
+
+ public String name() {
+ return this.name;
+ }
+
+ public int ping() {
+ return this.ping;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
new file mode 100644
index 0000000..024d27d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java
@@ -0,0 +1,81 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import java.util.Arrays;
+
+public final class PingSummary {
+
+ private final int[] values;
+ private final int total;
+ private final int max;
+ private final int min;
+ private final double mean;
+
+ public PingSummary(int[] values) {
+ Arrays.sort(values);
+ this.values = values;
+
+ int total = 0;
+ for (int value : values) {
+ total += value;
+ }
+ this.total = total;
+
+ this.mean = (double) total / values.length;
+ this.max = values[values.length - 1];
+ this.min = values[0];
+ }
+
+ public int total() {
+ return this.total;
+ }
+
+ public double mean() {
+ return this.mean;
+ }
+
+ public int max() {
+ return this.max;
+ }
+
+ public int min() {
+ return this.min;
+ }
+
+ public int percentile(double percentile) {
+ if (percentile < 0 || percentile > 1) {
+ throw new IllegalArgumentException("Invalid percentile " + percentile);
+ }
+
+ int rank = (int) Math.ceil(percentile * (this.values.length - 1));
+ return this.values[rank];
+ }
+
+ public double median() {
+ return percentile(0.50d);
+ }
+
+ public double percentile95th() {
+ return percentile(0.95d);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
new file mode 100644
index 0000000..7576573
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PlayerPingProvider.java
@@ -0,0 +1,40 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.ping;
+
+import java.util.Map;
+
+/**
+ * Provides information about player ping RTT.
+ */
+@FunctionalInterface
+public interface PlayerPingProvider {
+
+ /**
+ * Poll current player pings in milliseconds.
+ *
+ * <p>The map keys are player names and the values are the ping values.</p>
+ *
+ * @return a map of player pings
+ */
+ Map<String, Integer> poll();
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
index 31b58e9..bd2b834 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/tick/TickStatistics.java
@@ -56,7 +56,8 @@ public class TickStatistics implements TickHook.Callback, TickReporter.Callback
private boolean durationSupported = false;
private final RollingAverage tickDuration10Sec = new RollingAverage(TPS * 10);
private final RollingAverage tickDuration1Min = new RollingAverage(TPS * 60);
- private final RollingAverage[] tickDurationAverages = {this.tickDuration10Sec, this.tickDuration1Min};
+ private final RollingAverage tickDuration5Min = new RollingAverage(TPS * 60 * 5);
+ private final RollingAverage[] tickDurationAverages = {this.tickDuration10Sec, this.tickDuration1Min, this.tickDuration5Min};
private long last = 0;
@@ -131,6 +132,13 @@ public class TickStatistics implements TickHook.Callback, TickReporter.Callback
return this.tickDuration1Min;
}
+ public RollingAverage duration5Min() {
+ if (!this.durationSupported) {
+ return null;
+ }
+ return this.tickDuration5Min;
+ }
+
/**
* Rolling average calculator.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
deleted file mode 100644
index 645d5b2..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/AbstractPlatformInfo.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package me.lucko.spark.common.platform;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryUsage;
-
-public abstract class AbstractPlatformInfo implements PlatformInfo {
-
- @Override
- public int getNCpus() {
- return Runtime.getRuntime().availableProcessors();
- }
-
- @Override
- public MemoryUsage getHeapUsage() {
- return ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
- }
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
index 80fb85f..082389d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
@@ -20,13 +20,12 @@
package me.lucko.spark.common.platform;
-import me.lucko.spark.proto.SparkProtos;
import me.lucko.spark.proto.SparkProtos.PlatformMetadata;
-import java.lang.management.MemoryUsage;
-
public interface PlatformInfo {
+ int DATA_VERSION = 2;
+
Type getType();
String getName();
@@ -35,18 +34,13 @@ public interface PlatformInfo {
String getMinecraftVersion();
- int getNCpus();
-
- MemoryUsage getHeapUsage();
-
default int getSparkVersion() {
// does not necessarily correspond to the plugin/mod version
- // this is like a data version I suppose
- return 1;
+ return DATA_VERSION;
}
default Data toData() {
- return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getNCpus(), getHeapUsage(), getSparkVersion());
+ return new Data(getType(), getName(), getVersion(), getMinecraftVersion(), getSparkVersion());
}
enum Type {
@@ -70,17 +64,13 @@ public interface PlatformInfo {
private final String name;
private final String version;
private final String minecraftVersion;
- private final int nCpus;
- private final MemoryUsage heapUsage;
private final int sparkVersion;
- public Data(Type type, String name, String version, String minecraftVersion, int nCpus, MemoryUsage heapUsage, int sparkVersion) {
+ public Data(Type type, String name, String version, String minecraftVersion, int sparkVersion) {
this.type = type;
this.name = name;
this.version = version;
this.minecraftVersion = minecraftVersion;
- this.nCpus = nCpus;
- this.heapUsage = heapUsage;
this.sparkVersion = sparkVersion;
}
@@ -100,33 +90,15 @@ public interface PlatformInfo {
return this.minecraftVersion;
}
- public int getNCpus() {
- return this.nCpus;
- }
-
- public MemoryUsage getHeapUsage() {
- return this.heapUsage;
- }
-
public int getSparkVersion() {
return this.sparkVersion;
}
- public SparkProtos.MemoryUsage getHeapUsageProto() {
- return SparkProtos.MemoryUsage.newBuilder()
- .setUsed(this.heapUsage.getUsed())
- .setCommitted(this.heapUsage.getCommitted())
- .setMax(this.heapUsage.getMax())
- .build();
- }
-
public PlatformMetadata toProto() {
PlatformMetadata.Builder proto = PlatformMetadata.newBuilder()
.setType(this.type.toProto())
.setName(this.name)
.setVersion(this.version)
- .setNCpus(this.nCpus)
- .setHeapUsage(getHeapUsageProto())
.setSparkVersion(this.sparkVersion);
if (this.minecraftVersion != null) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
new file mode 100644
index 0000000..f35bbbe
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -0,0 +1,188 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuInfo;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.disk.DiskUsage;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.memory.MemoryInfo;
+import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
+import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.ping.PingStatistics;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
+import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryUsage;
+import java.lang.management.RuntimeMXBean;
+import java.util.Map;
+
+public class PlatformStatisticsProvider {
+ private final SparkPlatform platform;
+
+ public PlatformStatisticsProvider(SparkPlatform platform) {
+ this.platform = platform;
+ }
+
+ public SystemStatistics getSystemStatistics() {
+ RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
+
+ SystemStatistics.Builder builder = SystemStatistics.newBuilder()
+ .setCpu(SystemStatistics.Cpu.newBuilder()
+ .setThreads(Runtime.getRuntime().availableProcessors())
+ .setProcessUsage(SystemStatistics.Cpu.Usage.newBuilder()
+ .setLast1M(CpuMonitor.processLoad1MinAvg())
+ .setLast15M(CpuMonitor.processLoad15MinAvg())
+ .build()
+ )
+ .setSystemUsage(SystemStatistics.Cpu.Usage.newBuilder()
+ .setLast1M(CpuMonitor.systemLoad1MinAvg())
+ .setLast15M(CpuMonitor.systemLoad15MinAvg())
+ .build()
+ )
+ .setModelName(CpuInfo.queryCpuModel())
+ .build()
+ )
+ .setMemory(SystemStatistics.Memory.newBuilder()
+ .setPhysical(SystemStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(MemoryInfo.getUsedPhysicalMemory())
+ .setTotal(MemoryInfo.getTotalPhysicalMemory())
+ .build()
+ )
+ .setSwap(SystemStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(MemoryInfo.getUsedSwap())
+ .setTotal(MemoryInfo.getTotalSwap())
+ .build()
+ )
+ .build()
+ )
+ .setDisk(SystemStatistics.Disk.newBuilder()
+ .setTotal(DiskUsage.getTotal())
+ .setUsed(DiskUsage.getUsed())
+ .build()
+ )
+ .setOs(SystemStatistics.Os.newBuilder()
+ .setArch(System.getProperty("os.arch"))
+ .setName(System.getProperty("os.name"))
+ .setVersion(System.getProperty("os.version"))
+ .build()
+ )
+ .setJava(SystemStatistics.Java.newBuilder()
+ .setVendor(System.getProperty("java.vendor", "unknown"))
+ .setVersion(System.getProperty("java.version", "unknown"))
+ .setVendorVersion(System.getProperty("java.vendor.version", "unknown"))
+ .setVmArgs(String.join(" ", runtimeBean.getInputArguments()))
+ .build()
+ );
+
+ long uptime = runtimeBean.getUptime();
+ builder.setUptime(uptime);
+
+ Map<String, GarbageCollectorStatistics> gcStats = GarbageCollectorStatistics.pollStats();
+ gcStats.forEach((name, statistics) -> builder.putGc(
+ name,
+ SystemStatistics.Gc.newBuilder()
+ .setTotal(statistics.getCollectionCount())
+ .setAvgTime(statistics.getAverageCollectionTime())
+ .setAvgFrequency(statistics.getAverageCollectionFrequency(uptime))
+ .build()
+ ));
+
+ Map<String, NetworkInterfaceAverages> networkInterfaceStats = NetworkMonitor.systemAverages();
+ networkInterfaceStats.forEach((name, statistics) -> builder.putNet(
+ name,
+ SystemStatistics.NetInterface.newBuilder()
+ .setRxBytesPerSecond(statistics.rxBytesPerSecond().toProto())
+ .setRxPacketsPerSecond(statistics.rxPacketsPerSecond().toProto())
+ .setTxBytesPerSecond(statistics.txBytesPerSecond().toProto())
+ .setTxPacketsPerSecond(statistics.txPacketsPerSecond().toProto())
+ .build()
+ ));
+
+ return builder.build();
+ }
+
+ public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStatistics> startingGcStatistics) {
+ PlatformStatistics.Builder builder = PlatformStatistics.newBuilder();
+
+ MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+ builder.setMemory(PlatformStatistics.Memory.newBuilder()
+ .setHeap(PlatformStatistics.Memory.MemoryPool.newBuilder()
+ .setUsed(memoryUsage.getUsed())
+ .setTotal(memoryUsage.getCommitted())
+ .build()
+ )
+ .build()
+ );
+
+ long uptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime();
+ builder.setUptime(uptime);
+
+ if (startingGcStatistics != null) {
+ Map<String, GarbageCollectorStatistics> gcStats = GarbageCollectorStatistics.pollStatsSubtractInitial(startingGcStatistics);
+ gcStats.forEach((name, statistics) -> builder.putGc(
+ name,
+ PlatformStatistics.Gc.newBuilder()
+ .setTotal(statistics.getCollectionCount())
+ .setAvgTime(statistics.getAverageCollectionTime())
+ .setAvgFrequency(statistics.getAverageCollectionFrequency(uptime))
+ .build()
+ ));
+ }
+
+ TickStatistics tickStatistics = this.platform.getTickStatistics();
+ if (tickStatistics != null) {
+ builder.setTps(PlatformStatistics.Tps.newBuilder()
+ .setLast1M(tickStatistics.tps1Min())
+ .setLast5M(tickStatistics.tps5Min())
+ .setLast15M(tickStatistics.tps15Min())
+ .build()
+ );
+ if (tickStatistics.isDurationSupported()) {
+ builder.setMspt(PlatformStatistics.Mspt.newBuilder()
+ .setLast1M(tickStatistics.duration1Min().toProto())
+ .setLast5M(tickStatistics.duration5Min().toProto())
+ .build()
+ );
+ }
+ }
+
+ PingStatistics pingStatistics = this.platform.getPingStatistics();
+ if (pingStatistics != null && pingStatistics.getPingAverage().getSamples() != 0) {
+ builder.setPing(PlatformStatistics.Ping.newBuilder()
+ .setLast15M(pingStatistics.getPingAverage().toProto())
+ .build()
+ );
+ }
+
+ PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType();
+ if (platformType != PlatformInfo.Type.CLIENT) {
+ long playerCount = this.platform.getPlugin().getCommandSenders().count() - 1; // includes console
+ builder.setPlayerCount(playerCount);
+ }
+
+ return builder.build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
new file mode 100644
index 0000000..ead2131
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -0,0 +1,136 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Abstract implementation of {@link ServerConfigProvider}.
+ *
+ * <p>This implementation is able to delete hidden paths from
+ * the configurations before they are sent to the viewer.</p>
+ *
+ * @param <T> the file type
+ */
+public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
+ private final Map<String, T> files;
+ private final Collection<String> hiddenPaths;
+
+ protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
+ this.files = files;
+ this.hiddenPaths = hiddenPaths;
+ }
+
+ @Override
+ public final Map<String, JsonElement> loadServerConfigurations() {
+ ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
+
+ this.files.forEach((path, type) -> {
+ try {
+ JsonElement json = load(path, type);
+ if (json != null) {
+ delete(json, this.hiddenPaths);
+ builder.put(path, json);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ });
+
+ return builder.build();
+ }
+
+ /**
+ * Loads a file from the system.
+ *
+ * @param path the name of the file to load
+ * @param type the type of the file
+ * @return the loaded file
+ * @throws IOException if an error occurs performing i/o
+ */
+ protected abstract JsonElement load(String path, T type) throws IOException;
+
+ /**
+ * Deletes the given paths from the json element.
+ *
+ * @param json the json element
+ * @param paths the paths to delete
+ */
+ private static void delete(JsonElement json, Collection<String> paths) {
+ for (String path : paths) {
+ Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
+ delete(json, pathDeque);
+ }
+ }
+
+ private static void delete(JsonElement json, Deque<String> path) {
+ if (path.isEmpty()) {
+ return;
+ }
+ if (!json.isJsonObject()) {
+ return;
+ }
+
+ JsonObject jsonObject = json.getAsJsonObject();
+ String expected = path.removeFirst().replace("<dot>", ".");
+
+ Collection<String> keys;
+ if (expected.equals("*")) {
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .collect(Collectors.toList());
+ } else if (expected.endsWith("*")) {
+ String pattern = expected.substring(0, expected.length() - 1);
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .filter(key -> key.startsWith(pattern))
+ .collect(Collectors.toList());
+ } else if (jsonObject.has(expected)) {
+ keys = Collections.singletonList(expected);
+ } else {
+ keys = Collections.emptyList();
+ }
+
+ for (String key : keys) {
+ if (path.isEmpty()) {
+ jsonObject.remove(key);
+ } else {
+ Deque<String> pathCopy = keys.size() > 1
+ ? new LinkedList<>(path)
+ : path;
+
+ delete(jsonObject.get(key), pathCopy);
+ }
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
new file mode 100644
index 0000000..8fc89d7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
@@ -0,0 +1,64 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import java.io.FilterReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * A {@link Reader} that can parse a .properties file.
+ */
+public class PropertiesFileReader extends FilterReader {
+
+ public PropertiesFileReader(Reader in) {
+ super(in);
+ }
+
+ public Map<String, Object> readProperties() throws IOException {
+ Properties properties = new Properties();
+ properties.load(this);
+
+ Map<String, Object> values = new HashMap<>();
+ properties.forEach((k, v) -> {
+ String key = k.toString();
+ String value = v.toString();
+
+ if ("true".equals(value) || "false".equals(value)) {
+ values.put(key, Boolean.parseBoolean(value));
+ } else if (value.matches("\\d+")) {
+ try {
+ values.put(key, Long.parseLong(value));
+ } catch (NumberFormatException e) {
+ values.put(key, value);
+ }
+ } else {
+ values.put(key, value);
+ }
+ });
+
+ return values;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
new file mode 100644
index 0000000..1fc2391
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -0,0 +1,59 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Function to export server configuration files for access within the spark viewer.
+ */
+@FunctionalInterface
+public interface ServerConfigProvider {
+
+ /**
+ * Loads a map of the server configuration files.
+ *
+ * <p>The key is the name of the file and the value is a
+ * {@link JsonElement} of the contents.</p>
+ *
+ * @return the exported server configurations
+ */
+ Map<String, JsonElement> loadServerConfigurations();
+
+ default Map<String, String> exportServerConfigurations() {
+ return loadServerConfigurations().entrySet()
+ .stream()
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ e -> e.getValue().toString()
+ ));
+ }
+
+ /**
+ * A no-op implementation
+ */
+ ServerConfigProvider NO_OP = Collections::emptyMap;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index bae93b1..ce466a0 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -20,6 +20,20 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+import me.lucko.spark.common.sampler.node.MergeMode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
@@ -42,6 +56,9 @@ public abstract class AbstractSampler implements Sampler {
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture<Sampler> future = new CompletableFuture<>();
+ /** The garbage collector statistics when profiling started */
+ protected Map<String, GarbageCollectorStatistics> initialGcStats;
+
protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
this.interval = interval;
this.threadDumper = threadDumper;
@@ -65,4 +82,64 @@ public abstract class AbstractSampler implements Sampler {
public CompletableFuture<Sampler> getFuture() {
return this.future;
}
+
+ protected void recordInitialGcStats() {
+ this.initialGcStats = GarbageCollectorStatistics.pollStats();
+ }
+
+ protected Map<String, GarbageCollectorStatistics> getInitialGcStats() {
+ return this.initialGcStats;
+ }
+
+ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
+ SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
+ .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
+ .setCreator(creator.toData().toProto())
+ .setStartTime(this.startTime)
+ .setEndTime(System.currentTimeMillis())
+ .setInterval(this.interval)
+ .setThreadDumper(this.threadDumper.getMetadata())
+ .setDataAggregator(dataAggregator.getMetadata());
+
+ if (comment != null) {
+ metadata.setComment(comment);
+ }
+
+ try {
+ metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider();
+ metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ proto.setMetadata(metadata);
+ }
+
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator<ThreadNode> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ List<ThreadNode> data = dataAggregator.exportData();
+ data.sort(outputOrder);
+
+ ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
+
+ for (ThreadNode entry : data) {
+ proto.addThreads(entry.toProto(mergeMode));
+ classSourceVisitor.visit(entry);
+ }
+
+ if (classSourceVisitor.hasMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getMapping());
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index b71aaee..845043f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -20,15 +20,14 @@
package me.lucko.spark.common.sampler;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.proto.SparkProtos.SamplerData;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.util.Comparator;
-import java.util.Map;
import java.util.concurrent.CompletableFuture;
/**
@@ -68,6 +67,6 @@ public interface Sampler {
CompletableFuture<Sampler> getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
+ SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index e99114a..9d54f50 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
@@ -22,7 +21,7 @@
package me.lucko.spark.common.sampler;
import me.lucko.spark.common.util.ThreadFinder;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index e63ebc8..9ad84df 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -20,9 +20,11 @@
package me.lucko.spark.common.sampler;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+import java.util.Collections;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -42,6 +44,11 @@ public interface ThreadGrouper {
}
@Override
+ public String getLabel(String group) {
+ return group;
+ }
+
+ @Override
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME;
}
@@ -55,14 +62,18 @@ public interface ThreadGrouper {
* separated from the pool name with any of one or more of ' ', '-', or '#'.</p>
*/
ThreadGrouper BY_POOL = new ThreadGrouper() {
+ private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+
+ // thread id -> group
private final Map<Long, String> cache = new ConcurrentHashMap<>();
- private final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+ // group -> thread ids
+ private final Map<String, Set<Long>> seen = new ConcurrentHashMap<>();
@Override
public String getGroup(long threadId, String threadName) {
- String group = this.cache.get(threadId);
- if (group != null) {
- return group;
+ String cached = this.cache.get(threadId);
+ if (cached != null) {
+ return cached;
}
Matcher matcher = this.pattern.matcher(threadName);
@@ -70,12 +81,22 @@ public interface ThreadGrouper {
return threadName;
}
- group = matcher.group(1).trim() + " (Combined)";
- this.cache.put(threadId, group); // we don't care about race conditions here
+ String group = matcher.group(1).trim();
+ this.cache.put(threadId, group);
+ this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId);
return group;
}
@Override
+ public String getLabel(String group) {
+ int count = this.seen.getOrDefault(group, Collections.emptySet()).size();
+ if (count == 0) {
+ return group;
+ }
+ return group + " (x" + count + ")";
+ }
+
+ @Override
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL;
}
@@ -86,9 +107,17 @@ public interface ThreadGrouper {
* the name "All".
*/
ThreadGrouper AS_ONE = new ThreadGrouper() {
+ private final Set<Long> seen = ConcurrentHashMap.newKeySet();
+
@Override
public String getGroup(long threadId, String threadName) {
- return "All";
+ this.seen.add(threadId);
+ return "root";
+ }
+
+ @Override
+ public String getLabel(String group) {
+ return "All (x" + this.seen.size() + ")";
}
@Override
@@ -106,6 +135,14 @@ public interface ThreadGrouper {
*/
String getGroup(long threadId, String threadName);
+ /**
+ * Gets the label to use for a given group.
+ *
+ * @param group the group
+ * @return the label
+ */
+ String getLabel(String group);
+
SamplerMetadata.DataAggregator.ThreadGrouper asProto();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
index 4fa8ff4..adcedcd 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
@@ -23,20 +23,19 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.sampler.node.ThreadNode;
import java.util.Comparator;
-import java.util.Map;
/**
* Methods of ordering {@link ThreadNode}s in the output data.
*/
-public enum ThreadNodeOrder implements Comparator<Map.Entry<String, ThreadNode>> {
+public enum ThreadNodeOrder implements Comparator<ThreadNode> {
/**
* Order by the name of the thread (alphabetically)
*/
BY_NAME {
@Override
- public int compare(Map.Entry<String, ThreadNode> o1, Map.Entry<String, ThreadNode> o2) {
- return o1.getKey().compareTo(o2.getKey());
+ public int compare(ThreadNode o1, ThreadNode o2) {
+ return o1.getThreadLabel().compareTo(o2.getThreadLabel());
}
},
@@ -45,8 +44,8 @@ public enum ThreadNodeOrder implements Comparator<Map.Entry<String, ThreadNode>>
*/
BY_TIME {
@Override
- public int compare(Map.Entry<String, ThreadNode> o1, Map.Entry<String, ThreadNode> o2) {
- return -Double.compare(o1.getValue().getTotalTime(), o2.getValue().getTotalTime());
+ public int compare(ThreadNode o1, ThreadNode o2) {
+ return -Double.compare(o1.getTotalTime(), o2.getTotalTime());
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
index 7640d60..ad9dee4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
@@ -23,6 +23,8 @@ package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -50,7 +52,11 @@ public abstract class AbstractDataAggregator implements DataAggregator {
}
@Override
- public Map<String, ThreadNode> getData() {
- return this.threadData;
+ public List<ThreadNode> exportData() {
+ List<ThreadNode> data = new ArrayList<>(this.threadData.values());
+ for (ThreadNode node : data) {
+ node.setThreadLabel(this.threadGrouper.getLabel(node.getThreadGroup()));
+ }
+ return data;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 8b90639..5590a96 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -21,9 +21,9 @@
package me.lucko.spark.common.sampler.aggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
-import java.util.Map;
+import java.util.List;
/**
* Aggregates sampling data.
@@ -35,7 +35,7 @@ public interface DataAggregator {
*
* @return the output data
*/
- Map<String, ThreadNode> getData();
+ List<ThreadNode> exportData();
/**
* Gets metadata about the data aggregator instance.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
index 594d56e..3de3943 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
@@ -24,7 +24,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator;
import me.lucko.spark.common.sampler.node.StackTraceNode;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
/**
* Data aggregator for {@link AsyncSampler}.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index f1d7209..d642a53 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -20,13 +20,14 @@
package me.lucko.spark.common.sampler.async;
-import com.google.common.collect.ImmutableSetMultimap;
-import com.google.common.collect.Multimap;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Table;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.util.TemporaryFiles;
import one.profiler.AsyncProfiler;
+import one.profiler.Events;
import java.io.InputStream;
import java.net.URL;
@@ -45,22 +46,31 @@ public enum AsyncProfilerAccess {
/** An instance of the async-profiler Java API. */
private final AsyncProfiler profiler;
+ /** The event to use for profiling */
+ private final ProfilingEvent profilingEvent;
+
/** If profiler is null, contains the reason why setup failed */
private final Exception setupException;
AsyncProfilerAccess() {
AsyncProfiler profiler;
+ ProfilingEvent profilingEvent = null;
Exception setupException = null;
try {
profiler = load();
- ensureCpuEventSupported(profiler);
+ if (isEventSupported(profiler, ProfilingEvent.CPU, false)) {
+ profilingEvent = ProfilingEvent.CPU;
+ } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) {
+ profilingEvent = ProfilingEvent.WALL;
+ }
} catch (Exception e) {
profiler = null;
setupException = e;
}
this.profiler = profiler;
+ this.profilingEvent = profilingEvent;
this.setupException = setupException;
}
@@ -71,11 +81,18 @@ public enum AsyncProfilerAccess {
return this.profiler;
}
+ public ProfilingEvent getProfilingEvent() {
+ return this.profilingEvent;
+ }
+
public boolean checkSupported(SparkPlatform platform) {
if (this.setupException != null) {
if (this.setupException instanceof UnsupportedSystemException) {
platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" +
this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
+ } else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) {
+ platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed.");
+ platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc");
} else {
platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage());
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler");
@@ -91,18 +108,20 @@ public enum AsyncProfilerAccess {
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
- Multimap<String, String> supported = ImmutableSetMultimap.<String, String>builder()
- .put("linux", "amd64")
- .put("macosx", "amd64")
- .put("macosx", "aarch64")
+ Table<String, String, String> supported = ImmutableTable.<String, String, String>builder()
+ .put("linux", "amd64", "linux/amd64")
+ .put("linux", "aarch64", "linux/aarch64")
+ .put("macosx", "amd64", "macos")
+ .put("macosx", "aarch64", "macos")
.build();
- if (!supported.containsEntry(os, arch)) {
+ String libPath = supported.get(os, arch);
+ if (libPath == null) {
throw new UnsupportedSystemException(os, arch);
}
// extract the profiler binary from the spark jar file
- String resource = os + "/libasyncProfiler.so";
+ String resource = "spark/" + libPath + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
@@ -118,7 +137,7 @@ public enum AsyncProfilerAccess {
try {
return AsyncProfiler.getInstance(extractPath.toAbsolutePath().toString());
} catch (UnsatisfiedLinkError e) {
- throw new RuntimeException("A runtime error occurred whilst loading the native library", e);
+ throw new NativeLoadingException(e);
}
}
@@ -126,12 +145,37 @@ public enum AsyncProfilerAccess {
* Checks the {@code profiler} to ensure the CPU event is supported.
*
* @param profiler the profiler instance
- * @throws Exception if the event is not supported
+ * @return if the event is supported
*/
- private static void ensureCpuEventSupported(AsyncProfiler profiler) throws Exception {
- String resp = profiler.execute("check,event=cpu").trim();
- if (!resp.equalsIgnoreCase("ok")) {
- throw new UnsupportedOperationException("CPU event is not supported");
+ private static boolean isEventSupported(AsyncProfiler profiler, ProfilingEvent event, boolean throwException) {
+ try {
+ String resp = profiler.execute("check,event=" + event).trim();
+ if (resp.equalsIgnoreCase("ok")) {
+ return true;
+ } else if (throwException) {
+ throw new IllegalArgumentException(resp);
+ }
+ } catch (Exception e) {
+ if (throwException) {
+ throw new RuntimeException("Event " + event + " is not supported", e);
+ }
+ }
+ return false;
+ }
+
+ enum ProfilingEvent {
+ CPU(Events.CPU),
+ WALL(Events.WALL);
+
+ private final String id;
+
+ ProfilingEvent(String id) {
+ this.id = id;
+ }
+
+ @Override
+ public String toString() {
+ return this.id;
}
}
@@ -140,4 +184,10 @@ public enum AsyncProfilerAccess {
super(os + '/' + arch);
}
}
+
+ private static final class NativeLoadingException extends RuntimeException {
+ public NativeLoadingException(Throwable cause) {
+ super("A runtime error occurred whilst loading the native library", cause);
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 1837cbc..5cb7fdc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -22,8 +22,8 @@ package me.lucko.spark.common.sampler.async;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.AbstractSampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
@@ -32,7 +32,7 @@ import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.TemporaryFiles;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import one.profiler.AsyncProfiler;
@@ -40,10 +40,8 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
@@ -100,7 +98,7 @@ public class AsyncSampler extends AbstractSampler {
throw new RuntimeException("Unable to create temporary output file", e);
}
- String command = "start,event=cpu,interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
+ String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
if (this.threadDumper instanceof ThreadDumper.Specific) {
command += ",filter";
}
@@ -117,6 +115,7 @@ public class AsyncSampler extends AbstractSampler {
}
}
+ recordInitialGcStats();
scheduleTimeout();
}
@@ -145,7 +144,14 @@ public class AsyncSampler extends AbstractSampler {
*/
@Override
public void stop() {
- this.profiler.stop();
+ try {
+ this.profiler.stop();
+ } catch (IllegalStateException e) {
+ if (!e.getMessage().equals("Profiler is not active")) { // ignore
+ throw e;
+ }
+ }
+
if (this.timeoutExecutor != null) {
this.timeoutExecutor.shutdown();
@@ -154,38 +160,11 @@ public class AsyncSampler extends AbstractSampler {
}
@Override
- public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
- SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ SamplerData.Builder proto = SamplerData.newBuilder();
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
aggregateOutput();
-
- List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
index a705f2d..e0cc4e9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
@@ -59,6 +59,7 @@ public class JfrReader implements Closeable {
public final Dictionary<AsyncStackTraceElement> stackFrames = new Dictionary<>(); // spark
public final Map<Integer, String> frameTypes = new HashMap<>();
public final Map<Integer, String> threadStates = new HashMap<>();
+ public final Map<String, String> settings = new HashMap<>();
private int executionSample;
private int nativeMethodSample;
@@ -67,6 +68,8 @@ public class JfrReader implements Closeable {
private int allocationSample;
private int monitorEnter;
private int threadPark;
+ private int activeSetting;
+ private boolean activeSettingHasStack;
public JfrReader(Path path) throws IOException { // spark - Path instead of String
this.ch = FileChannel.open(path, StandardOpenOption.READ); // spark - Path instead of String
@@ -129,6 +132,8 @@ public class JfrReader implements Closeable {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(false);
} else if (type == threadPark) {
if (cls == null || cls == ContendedLock.class) return (E) readContendedLock(true);
+ } else if (type == activeSetting) {
+ readActiveSetting();
}
if ((pos += size) <= buf.limit()) {
@@ -170,6 +175,17 @@ public class JfrReader implements Closeable {
return new ContendedLock(time, tid, stackTraceId, duration, classId);
}
+ private void readActiveSetting() {
+ long time = getVarlong();
+ long duration = getVarlong();
+ int tid = getVarint();
+ if (activeSettingHasStack) getVarint();
+ long id = getVarlong();
+ String name = getString();
+ String value = getString();
+ settings.put(name, value);
+ }
+
private boolean readChunk(int pos) throws IOException {
if (pos + CHUNK_HEADER_SIZE > buf.limit() || buf.getInt(pos) != CHUNK_SIGNATURE) {
throw new IOException("Not a valid JFR file");
@@ -424,6 +440,8 @@ public class JfrReader implements Closeable {
allocationSample = getTypeId("jdk.ObjectAllocationSample");
monitorEnter = getTypeId("jdk.JavaMonitorEnter");
threadPark = getTypeId("jdk.ThreadPark");
+ activeSetting = getTypeId("jdk.ActiveSetting");
+ activeSettingHasStack = activeSetting >= 0 && typesByName.get("jdk.ActiveSetting").field("stackTrace") != null;
}
private int getTypeId(String typeName) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
index 54d9e1c..cc530d6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
@@ -27,7 +27,7 @@ import me.lucko.spark.common.sampler.node.StackTraceNode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import java.lang.management.ThreadInfo;
-import java.util.Map;
+import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -86,7 +86,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
}
@Override
- public Map<String, ThreadNode> getData() {
+ public List<ThreadNode> exportData() {
// wait for all pending data to be inserted
this.workerPool.shutdown();
try {
@@ -95,7 +95,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
e.printStackTrace();
}
- return super.getData();
+ return super.exportData();
}
private static boolean isSleeping(ThreadInfo thread) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 02d5f01..cfa0a0f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
@@ -23,8 +22,8 @@ package me.lucko.spark.common.sampler.java;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.AbstractSampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
@@ -32,16 +31,12 @@ import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.proto.SparkProtos.SamplerData;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
-import java.util.ArrayList;
import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
@@ -129,36 +124,10 @@ public class JavaSampler extends AbstractSampler implements Runnable {
}
@Override
- public SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
- final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
- .setPlatformMetadata(platformInfo.toData().toProto())
- .setCreator(creator.toData().toProto())
- .setStartTime(this.startTime)
- .setInterval(this.interval)
- .setThreadDumper(this.threadDumper.getMetadata())
- .setDataAggregator(this.dataAggregator.getMetadata());
-
- if (comment != null) {
- metadata.setComment(comment);
- }
-
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
- proto.setMetadata(metadata.build());
-
- List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(outputOrder);
-
- ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
-
- for (Map.Entry<String, ThreadNode> entry : data) {
- proto.addThreads(entry.getValue().toProto(mergeMode));
- classSourceVisitor.visit(entry.getValue());
- }
-
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
- }
-
+ writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
+ writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
index e7113a1..39e21aa 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
@@ -22,7 +22,7 @@ package me.lucko.spark.common.sampler.java;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.concurrent.ExecutorService;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index 018a3b8..e817828 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -24,12 +24,11 @@ import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.proto.SparkProtos.SamplerMetadata;
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.lang.management.ThreadInfo;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -102,13 +101,13 @@ public class TickedDataAggregator extends JavaDataAggregator {
}
@Override
- public Map<String, ThreadNode> getData() {
+ public List<ThreadNode> exportData() {
// push the current tick
synchronized (this.mutex) {
pushCurrentTick();
}
- return super.getData();
+ return super.exportData();
}
private final class TickList implements Runnable {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index 18f67ba..fd2be8d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index f935fb2..b0d9237 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -1,7 +1,6 @@
/*
* This file is part of spark.
*
- * Copyright (C) Albert Pham <http://www.sk89q.com>
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
@@ -22,7 +21,7 @@
package me.lucko.spark.common.sampler.node;
import me.lucko.spark.common.util.MethodDisambiguator;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -65,8 +64,8 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
return this.description.parentLineNumber;
}
- public SparkProtos.StackTraceNode toProto(MergeMode mergeMode) {
- SparkProtos.StackTraceNode.Builder proto = SparkProtos.StackTraceNode.newBuilder()
+ public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode) {
+ SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder()
.setTime(getTotalTime())
.setClassName(this.description.className)
.setMethodName(this.description.methodName);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index 5cac33d..ed97443 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -20,7 +20,7 @@
package me.lucko.spark.common.sampler.node;
-import me.lucko.spark.proto.SparkProtos;
+import me.lucko.spark.proto.SparkSamplerProtos;
/**
* The root of a sampling stack for a given thread / thread group.
@@ -28,17 +28,34 @@ import me.lucko.spark.proto.SparkProtos;
public final class ThreadNode extends AbstractNode {
/**
- * The name of this thread
+ * The name of this thread / thread group
*/
- private final String threadName;
+ private final String name;
- public ThreadNode(String threadName) {
- this.threadName = threadName;
+ /**
+ * The label used to describe this thread in the viewer
+ */
+ public String label;
+
+ public ThreadNode(String name) {
+ this.name = name;
+ }
+
+ public String getThreadLabel() {
+ return this.label != null ? this.label : this.name;
+ }
+
+ public String getThreadGroup() {
+ return this.name;
+ }
+
+ public void setThreadLabel(String label) {
+ this.label = label;
}
- public SparkProtos.ThreadNode toProto(MergeMode mergeMode) {
- SparkProtos.ThreadNode.Builder proto = SparkProtos.ThreadNode.newBuilder()
- .setName(this.threadName)
+ public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) {
+ SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder()
+ .setName(getThreadLabel())
.setTime(getTotalTime());
for (StackTraceNode child : exportChildren(mergeMode)) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
index a6e8745..2a31e0d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickHook.java
@@ -20,12 +20,12 @@
package me.lucko.spark.common.tick;
-import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
public abstract class AbstractTickHook implements TickHook {
- private final Set<Callback> tasks = new HashSet<>();
+ private final Set<Callback> tasks = new CopyOnWriteArraySet<>();
private int tick = 0;
protected void onTick() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
index 74a814d..431a641 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/tick/AbstractTickReporter.java
@@ -20,11 +20,11 @@
package me.lucko.spark.common.tick;
-import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
public abstract class AbstractTickReporter implements TickReporter {
- private final Set<Callback> tasks = new HashSet<>();
+ private final Set<Callback> tasks = new CopyOnWriteArraySet<>();
protected void onTick(double duration) {
for (Callback r : this.tasks) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
index 29ee5bb..c2ca1b1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -1,26 +1,21 @@
/*
- * This file is part of bytebin, licensed under the MIT License.
+ * This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
*
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package me.lucko.spark.common.util;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
index 42a04f7..bd9ec37 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
@@ -26,6 +26,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.io.IOException;
+import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
@@ -87,8 +88,22 @@ public interface ClassSourceLookup {
*/
interface ByUrl extends ClassSourceLookup {
- default String identifyUrl(URL url) throws URISyntaxException {
- return url.getProtocol().equals("file") ? identifyFile(Paths.get(url.toURI())) : null;
+ default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
+ Path path = null;
+
+ String protocol = url.getProtocol();
+ if (protocol.equals("file")) {
+ path = Paths.get(url.toURI());
+ } else if (protocol.equals("jar")) {
+ URL innerUrl = new URL(url.getPath());
+ path = Paths.get(innerUrl.getPath().split("!")[0]);
+ }
+
+ if (path != null) {
+ return identifyFile(path.toAbsolutePath().normalize());
+ }
+
+ return null;
}
default String identifyFile(Path path) {
@@ -123,7 +138,7 @@ public interface ClassSourceLookup {
*/
class ByCodeSource implements ClassSourceLookup, ByUrl {
@Override
- public @Nullable String identify(Class<?> clazz) throws URISyntaxException {
+ public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException {
ProtectionDomain protectionDomain = clazz.getProtectionDomain();
if (protectionDomain == null) {
return null;
@@ -148,12 +163,12 @@ public interface ClassSourceLookup {
static Visitor createVisitor(ClassSourceLookup lookup) {
if (lookup == ClassSourceLookup.NO_OP) {
- return NoOpVistitor.INSTANCE; // don't bother!
+ return NoOpVisitor.INSTANCE; // don't bother!
}
return new VisitorImpl(lookup);
}
- enum NoOpVistitor implements Visitor {
+ enum NoOpVisitor implements Visitor {
INSTANCE;
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
new file mode 100644
index 0000000..9295c25
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
@@ -0,0 +1,100 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import org.tukaani.xz.LZMA2Options;
+import org.tukaani.xz.LZMAOutputStream;
+import org.tukaani.xz.XZOutputStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.function.LongConsumer;
+import java.util.zip.GZIPOutputStream;
+
+public enum Compression {
+ GZIP {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ },
+ XZ {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ },
+ LZMA {
+ @Override
+ public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
+ try (InputStream in = Files.newInputStream(file)) {
+ try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
+ copy(in, compressionOut, progressHandler);
+ }
+ }
+ }
+ return compressedFile;
+ }
+ };
+
+ public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
+
+ private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
+ byte[] buf = new byte[1024 * 64];
+ long total = 0;
+ long iterations = 0;
+ while (true) {
+ int r = from.read(buf);
+ if (r == -1) {
+ break;
+ }
+ to.write(buf, 0, r);
+ total += r;
+
+ // report progress every 5MB
+ if (iterations++ % ((1024 / 64) * 5) == 0) {
+ progress.accept(total);
+ }
+ }
+ return total;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
index 492d4ea..c4a3d66 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
@@ -20,6 +20,11 @@
package me.lucko.spark.common.util;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.TextColor;
+
+import java.util.Locale;
+
public enum FormatUtil {
;
@@ -31,10 +36,30 @@ public enum FormatUtil {
}
public static String formatBytes(long bytes) {
- if (bytes == 0) {
+ if (bytes <= 0) {
return "0 bytes";
}
int sizeIndex = (int) (Math.log(bytes) / Math.log(1024));
- return String.format("%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + SIZE_UNITS[sizeIndex];
+ return String.format(Locale.ENGLISH, "%.1f", bytes / Math.pow(1024, sizeIndex)) + " " + SIZE_UNITS[sizeIndex];
+ }
+
+ public static Component formatBytes(long bytes, TextColor color, String suffix) {
+ String value;
+ String unit;
+
+ if (bytes <= 0) {
+ value = "0";
+ unit = "KB" + suffix;
+ } else {
+ int sizeIndex = (int) (Math.log(bytes) / Math.log(1024));
+ value = String.format(Locale.ENGLISH, "%.1f", bytes / Math.pow(1024, sizeIndex));
+ unit = SIZE_UNITS[sizeIndex] + suffix;
+ }
+
+ return Component.text()
+ .append(Component.text(value, color))
+ .append(Component.space())
+ .append(Component.text(unit))
+ .build();
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
new file mode 100644
index 0000000..7d688d7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
@@ -0,0 +1,84 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility for reading from /proc/ on Linux systems.
+ */
+public enum LinuxProc {
+
+ /**
+ * Information about the system CPU.
+ */
+ CPUINFO("/proc/cpuinfo"),
+
+ /**
+ * Information about the system memory.
+ */
+ MEMINFO("/proc/meminfo"),
+
+ /**
+ * Information about the system network usage.
+ */
+ NET_DEV("/proc/net/dev");
+
+ private final Path path;
+
+ LinuxProc(String path) {
+ this.path = resolvePath(path);
+ }
+
+ private static @Nullable Path resolvePath(String path) {
+ try {
+ Path p = Paths.get(path);
+ if (Files.isReadable(p)) {
+ return p;
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+ return null;
+ }
+
+ public @NonNull List<String> read() {
+ if (this.path != null) {
+ try {
+ return Files.readAllLines(this.path, StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+
+ return Collections.emptyList();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
index 87c41a4..65753bc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
@@ -21,6 +21,7 @@
package me.lucko.spark.common.util;
import me.lucko.spark.api.statistic.misc.DoubleAverageInfo;
+import me.lucko.spark.proto.SparkProtos;
import java.math.BigDecimal;
import java.math.RoundingMode;
@@ -39,6 +40,12 @@ public class RollingAverage implements DoubleAverageInfo {
this.samples = new ArrayDeque<>(this.windowSize + 1);
}
+ public int getSamples() {
+ synchronized (this) {
+ return this.samples.size();
+ }
+ }
+
public void add(BigDecimal num) {
synchronized (this) {
this.total = this.total.add(num);
@@ -105,4 +112,14 @@ public class RollingAverage implements DoubleAverageInfo {
return sortedSamples[rank].doubleValue();
}
+ public SparkProtos.RollingAverageValues toProto() {
+ return SparkProtos.RollingAverageValues.newBuilder()
+ .setMean(mean())
+ .setMax(max())
+ .setMin(min())
+ .setMedian(median())
+ .setPercentile95(percentile95th())
+ .build();
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
new file mode 100644
index 0000000..156fa0d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java
@@ -0,0 +1,49 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class SparkThreadFactory implements ThreadFactory, Thread.UncaughtExceptionHandler {
+ private static final AtomicInteger poolNumber = new AtomicInteger(1);
+ private final AtomicInteger threadNumber = new AtomicInteger(1);
+ private final String namePrefix;
+
+ public SparkThreadFactory() {
+ this.namePrefix = "spark-worker-pool-" +
+ poolNumber.getAndIncrement() +
+ "-thread-";
+ }
+
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread(r, this.namePrefix + this.threadNumber.getAndIncrement());
+ t.setUncaughtExceptionHandler(this);
+ t.setDaemon(true);
+ return t;
+ }
+
+ @Override
+ public void uncaughtException(Thread t, Throwable e) {
+ System.err.println("Uncaught exception thrown by thread " + t.getName());
+ e.printStackTrace();
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
new file mode 100644
index 0000000..22ee9bb
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/StatisticFormatter.java
@@ -0,0 +1,189 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import com.google.common.base.Strings;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.format.TextColor;
+
+import java.lang.management.MemoryUsage;
+import java.util.Locale;
+
+import static net.kyori.adventure.text.Component.text;
+import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
+import static net.kyori.adventure.text.format.NamedTextColor.RED;
+import static net.kyori.adventure.text.format.NamedTextColor.YELLOW;
+
+public enum StatisticFormatter {
+ ;
+
+ private static final String BAR_TRUE_CHARACTER = "┃";
+ private static final String BAR_FALSE_CHARACTER = "â•»";
+
+ public static TextComponent formatTps(double tps) {
+ TextColor color;
+ if (tps > 18.0) {
+ color = GREEN;
+ } else if (tps > 16.0) {
+ color = YELLOW;
+ } else {
+ color = RED;
+ }
+
+ return text((tps > 20.0 ? "*" : "") + Math.min(Math.round(tps * 100.0) / 100.0, 20.0), color);
+ }
+
+ public static TextComponent formatTickDurations(RollingAverage average) {
+ return text()
+ .append(formatTickDuration(average.min()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.median()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.percentile95th()))
+ .append(text('/', GRAY))
+ .append(formatTickDuration(average.max()))
+ .build();
+ }
+
+ public static TextComponent formatTickDuration(double duration) {
+ TextColor color;
+ if (duration >= 50d) {
+ color = RED;
+ } else if (duration >= 40d) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text(String.format(Locale.ENGLISH, "%.1f", duration), color);
+ }
+
+ public static TextComponent formatCpuUsage(double usage) {
+ TextColor color;
+ if (usage > 0.9) {
+ color = RED;
+ } else if (usage > 0.65) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text(FormatUtil.percent(usage, 1d), color);
+ }
+
+ public static TextComponent formatPingRtts(double min, double median, double percentile95th, double max) {
+ return text()
+ .append(formatPingRtt(min))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(median))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(percentile95th))
+ .append(text('/', GRAY))
+ .append(formatPingRtt(max))
+ .build();
+ }
+
+ public static TextComponent formatPingRtt(double ping) {
+ TextColor color;
+ if (ping >= 200) {
+ color = RED;
+ } else if (ping >= 100) {
+ color = YELLOW;
+ } else {
+ color = GREEN;
+ }
+
+ return text((int) Math.ceil(ping), color);
+ }
+
+ public static TextComponent generateMemoryUsageDiagram(MemoryUsage usage, int length) {
+ double used = usage.getUsed();
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_TRUE_CHARACTER, usedChars)).color(YELLOW);
+ if (committedChars > usedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_FALSE_CHARACTER, RED));
+ }
+ if (length > committedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (length - committedChars)), GRAY));
+ }
+
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(line.build())
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+
+ public static TextComponent generateMemoryPoolDiagram(MemoryUsage usage, MemoryUsage collectionUsage, int length) {
+ double used = usage.getUsed();
+ double collectionUsed = used;
+ if (collectionUsage != null) {
+ collectionUsed = collectionUsage.getUsed();
+ }
+ double committed = usage.getCommitted();
+ double max = usage.getMax();
+
+ int usedChars = (int) ((used * length) / max);
+ int collectionUsedChars = (int) ((collectionUsed * length) / max);
+ int committedChars = (int) ((committed * length) / max);
+
+ TextComponent.Builder line = text().content(Strings.repeat(BAR_TRUE_CHARACTER, collectionUsedChars)).color(YELLOW);
+
+ if (usedChars > collectionUsedChars) {
+ line.append(Component.text(BAR_TRUE_CHARACTER, RED));
+ line.append(text(Strings.repeat(BAR_TRUE_CHARACTER, (usedChars - collectionUsedChars) - 1), YELLOW));
+ }
+ if (committedChars > usedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (committedChars - usedChars) - 1), GRAY));
+ line.append(Component.text(BAR_FALSE_CHARACTER, YELLOW));
+ }
+ if (length > committedChars) {
+ line.append(text(Strings.repeat(BAR_FALSE_CHARACTER, (length - committedChars)), GRAY));
+ }
+
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(line.build())
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+
+ public static TextComponent generateDiskUsageDiagram(double used, double max, int length) {
+ int usedChars = (int) ((used * length) / max);
+ int freeChars = length - usedChars;
+ return text()
+ .append(text("[", DARK_GRAY))
+ .append(text(Strings.repeat(BAR_TRUE_CHARACTER, usedChars), YELLOW))
+ .append(text(Strings.repeat(BAR_FALSE_CHARACTER, freeChars), GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index 4305a51..ec0aa88 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -5,34 +5,17 @@ package spark;
option java_package = "me.lucko.spark.proto";
option java_outer_classname = "SparkProtos";
-message CommandSenderMetadata {
- Type type = 1;
- string name = 2;
- string unique_id = 3;
-
- enum Type {
- OTHER = 0;
- PLAYER = 1;
- }
-}
-
-message MemoryUsage {
- int64 used = 1;
- int64 committed = 2;
- int64 max = 3;
-}
-
message PlatformMetadata {
Type type = 1;
string name = 2;
string version = 3;
string minecraft_version = 4; // optional
-
- int32 n_cpus = 5;
- MemoryUsage heapUsage = 6;
-
int32 spark_version = 7;
+ // replaced
+ reserved 5, 6;
+ reserved "n_cpus", "heap_usage";
+
enum Type {
SERVER = 0;
CLIENT = 1;
@@ -40,80 +23,125 @@ message PlatformMetadata {
}
}
-message HeapData {
- HeapMetadata metadata = 1;
- repeated HeapEntry entries = 2;
-}
+message SystemStatistics {
+ Cpu cpu = 1;
+ Memory memory = 2;
+ map<string, Gc> gc = 3;
+ Disk disk = 4;
+ Os os = 5;
+ Java java = 6;
+ int64 uptime = 7;
+ map<string, NetInterface> net = 8;
+
+ message Cpu {
+ int32 threads = 1;
+ Usage process_usage = 2;
+ Usage system_usage = 3;
+ string model_name = 4; // optional
+
+ message Usage {
+ double last1m = 1;
+ double last15m = 2;
+ }
+ }
-message HeapMetadata {
- CommandSenderMetadata creator = 1;
- PlatformMetadata platform_metadata = 2;
-}
+ message Memory {
+ MemoryPool physical = 1;
+ MemoryPool swap = 2;
-message HeapEntry {
- int32 order = 1;
- int32 instances = 2;
- int64 size = 3;
- string type = 4;
-}
+ message MemoryPool {
+ int64 used = 1;
+ int64 total = 2;
+ }
+ }
-message SamplerData {
- SamplerMetadata metadata = 1;
- repeated ThreadNode threads = 2;
- map<string, string> class_sources = 3; // optional
+ message Gc {
+ int64 total = 1;
+ double avg_time = 2;
+ double avg_frequency = 3;
+ }
+
+ message Disk {
+ int64 used = 1;
+ int64 total = 2;
+ }
+
+ message Os {
+ string arch = 1;
+ string name = 2;
+ string version = 3;
+ }
+
+ message Java {
+ string vendor = 1;
+ string version = 2;
+ string vendor_version = 3;
+ string vm_args = 4;
+ }
+
+ message NetInterface {
+ RollingAverageValues rx_bytes_per_second = 1;
+ RollingAverageValues tx_bytes_per_second = 2;
+ RollingAverageValues rx_packets_per_second = 3;
+ RollingAverageValues tx_packets_per_second = 4;
+ }
}
-message SamplerMetadata {
- CommandSenderMetadata creator = 1;
- int64 start_time = 2;
- int32 interval = 3;
- ThreadDumper thread_dumper = 4;
- DataAggregator data_aggregator = 5;
- string comment = 6;
- PlatformMetadata platform_metadata = 7;
-
- message ThreadDumper {
- Type type = 1;
- repeated int64 ids = 2; // optional
- repeated string patterns = 3; // optional
-
- enum Type {
- ALL = 0;
- SPECIFIC = 1;
- REGEX = 2;
+message PlatformStatistics {
+ Memory memory = 1;
+ map<string, Gc> gc = 2;
+ int64 uptime = 3;
+ Tps tps = 4; // optional
+ Mspt mspt = 5; // optional
+ Ping ping = 6; // optional
+ int64 player_count = 7;
+
+ message Memory {
+ MemoryPool heap = 1;
+
+ message MemoryPool {
+ int64 used = 1;
+ int64 total = 2;
}
}
- message DataAggregator {
- Type type = 1;
- ThreadGrouper thread_grouper = 2;
- int64 tick_length_threshold = 3; // optional
+ message Gc {
+ int64 total = 1;
+ double avg_time = 2;
+ double avg_frequency = 3;
+ }
- enum Type {
- SIMPLE = 0;
- TICKED = 1;
- }
+ message Tps {
+ double last1m = 1;
+ double last5m = 2;
+ double last15m = 3;
+ }
- enum ThreadGrouper {
- BY_NAME = 0;
- BY_POOL = 1;
- AS_ONE = 2;
- }
+ message Mspt {
+ RollingAverageValues last1m = 1;
+ RollingAverageValues last5m = 2;
+ }
+
+ message Ping {
+ RollingAverageValues last15m = 1;
}
}
-message StackTraceNode {
- double time = 1;
- repeated StackTraceNode children = 2;
- string class_name = 3;
- string method_name = 4;
- int32 parent_line_number = 5; // optional
- int32 line_number = 6; // optional
- string method_desc = 7; // optional
+message RollingAverageValues {
+ double mean = 1;
+ double max = 2;
+ double min = 3;
+ double median = 4;
+ double percentile95 = 5;
}
-message ThreadNode {
- string name = 1;
- double time = 2;
- repeated StackTraceNode children = 3;
+message CommandSenderMetadata {
+ Type type = 1;
+ string name = 2;
+ string unique_id = 3;
+
+ enum Type {
+ OTHER = 0;
+ PLAYER = 1;
+ }
}
diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-common/src/main/proto/spark/spark_heap.proto
new file mode 100644
index 0000000..59f2b85
--- /dev/null
+++ b/spark-common/src/main/proto/spark/spark_heap.proto
@@ -0,0 +1,27 @@
+syntax = "proto3";
+
+package spark;
+
+import "spark/spark.proto";
+
+option java_package = "me.lucko.spark.proto";
+option java_outer_classname = "SparkHeapProtos";
+
+message HeapData {
+ HeapMetadata metadata = 1;
+ repeated HeapEntry entries = 2;
+}
+
+message HeapMetadata {
+ CommandSenderMetadata creator = 1;
+ PlatformMetadata platform_metadata = 2;
+ PlatformStatistics platform_statistics = 3;
+ SystemStatistics system_statistics = 4;
+}
+
+message HeapEntry {
+ int32 order = 1;
+ int32 instances = 2;
+ int64 size = 3;
+ string type = 4;
+}
diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto
new file mode 100644
index 0000000..51bdd64
--- /dev/null
+++ b/spark-common/src/main/proto/spark/spark_sampler.proto
@@ -0,0 +1,73 @@
+syntax = "proto3";
+
+package spark;
+
+import "spark/spark.proto";
+
+option java_package = "me.lucko.spark.proto";
+option java_outer_classname = "SparkSamplerProtos";
+
+message SamplerData {
+ SamplerMetadata metadata = 1;
+ repeated ThreadNode threads = 2;
+ map<string, string> class_sources = 3; // optional
+}
+
+message SamplerMetadata {
+ CommandSenderMetadata creator = 1;
+ int64 start_time = 2;
+ int32 interval = 3;
+ ThreadDumper thread_dumper = 4;
+ DataAggregator data_aggregator = 5;
+ string comment = 6;
+ PlatformMetadata platform_metadata = 7;
+ PlatformStatistics platform_statistics = 8;
+ SystemStatistics system_statistics = 9;
+ map<string, string> server_configurations = 10;
+ int64 end_time = 11;
+
+ message ThreadDumper {
+ Type type = 1;
+ repeated int64 ids = 2; // optional
+ repeated string patterns = 3; // optional
+
+ enum Type {
+ ALL = 0;
+ SPECIFIC = 1;
+ REGEX = 2;
+ }
+ }
+
+ message DataAggregator {
+ Type type = 1;
+ ThreadGrouper thread_grouper = 2;
+ int64 tick_length_threshold = 3; // optional
+
+ enum Type {
+ SIMPLE = 0;
+ TICKED = 1;
+ }
+
+ enum ThreadGrouper {
+ BY_NAME = 0;
+ BY_POOL = 1;
+ AS_ONE = 2;
+ }
+ }
+}
+
+message ThreadNode {
+ string name = 1;
+ double time = 2;
+ repeated StackTraceNode children = 3;
+}
+
+message StackTraceNode {
+ double time = 1;
+ repeated StackTraceNode children = 2;
+ string class_name = 3;
+ string method_name = 4;
+ int32 parent_line_number = 5; // optional
+ int32 line_number = 6; // optional
+ string method_desc = 7; // optional
+}
diff --git a/spark-common/src/main/resources/linux/libasyncProfiler.so b/spark-common/src/main/resources/linux/libasyncProfiler.so
deleted file mode 100755
index ddee900..0000000
--- a/spark-common/src/main/resources/linux/libasyncProfiler.so
+++ /dev/null
Binary files differ
diff --git a/spark-common/src/main/resources/macosx/libasyncProfiler.so b/spark-common/src/main/resources/macosx/libasyncProfiler.so
deleted file mode 100755
index 75daf6e..0000000
--- a/spark-common/src/main/resources/macosx/libasyncProfiler.so
+++ /dev/null
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
new file mode 100755
index 0000000..35f83b2
--- /dev/null
+++ b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
new file mode 100755
index 0000000..edbf103
--- /dev/null
+++ b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
new file mode 100755
index 0000000..ab818e9
--- /dev/null
+++ b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
Binary files differ
diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle
index d7e87cd..9da8e01 100644
--- a/spark-fabric/build.gradle
+++ b/spark-fabric/build.gradle
@@ -1,7 +1,7 @@
import net.fabricmc.loom.task.RemapJarTask
plugins {
- id 'fabric-loom' version '0.10.+'
+ id 'fabric-loom' version '0.12.+'
id 'com.github.johnrengelman.shadow' version '7.0.0'
}
@@ -28,19 +28,19 @@ configurations {
dependencies {
// https://modmuss50.me/fabric.html
- minecraft 'com.mojang:minecraft:1.18.1'
- mappings 'net.fabricmc:yarn:1.18.1+build.9:v2'
- modImplementation 'net.fabricmc:fabric-loader:0.12.12'
+ minecraft 'com.mojang:minecraft:1.19'
+ mappings 'net.fabricmc:yarn:1.19+build.1:v2'
+ modImplementation 'net.fabricmc:fabric-loader:0.14.7'
Set<String> apiModules = [
"fabric-api-base",
- "fabric-command-api-v1",
+ "fabric-command-api-v2",
"fabric-lifecycle-events-v1"
]
// Add each module as a dependency
apiModules.forEach {
- modImplementation(fabricApi.module(it, '0.45.0+1.18'))
+ modImplementation(fabricApi.module(it, '0.55.3+1.19'))
}
include(modImplementation('me.lucko:fabric-permissions-api:0.1-SNAPSHOT'))
@@ -67,7 +67,7 @@ processResources {
}
shadowJar {
- archiveFileName = 'spark-fabric-dev.jar'
+ archiveFileName = "spark-fabric-${project.pluginVersion}-dev.jar"
configurations = [project.configurations.shade]
relocate 'okio', 'me.lucko.spark.lib.okio'
@@ -89,7 +89,7 @@ task remappedShadowJar(type: RemapJarTask) {
dependsOn tasks.shadowJar
input = tasks.shadowJar.archiveFile
addNestedDependencies = true
- archiveFileName = 'spark-fabric.jar'
+ archiveFileName = "spark-${project.pluginVersion}-fabric.jar"
}
tasks.assemble.dependsOn tasks.remappedShadowJar
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java
index fa59079..7030680 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java
@@ -20,25 +20,47 @@
package me.lucko.spark.fabric;
+import com.google.common.collect.ImmutableMap;
+
import me.lucko.spark.common.util.ClassSourceLookup;
import net.fabricmc.loader.api.FabricLoader;
+import net.fabricmc.loader.api.ModContainer;
import java.nio.file.Path;
+import java.util.Collection;
+import java.util.Map;
public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource {
private final Path modsDirectory;
+ private final Map<Path, String> pathToModMap;
public FabricClassSourceLookup() {
- this.modsDirectory = FabricLoader.getInstance().getGameDir().resolve("mods").toAbsolutePath().normalize();
+ FabricLoader loader = FabricLoader.getInstance();
+ this.modsDirectory = loader.getGameDir().resolve("mods").toAbsolutePath().normalize();
+ this.pathToModMap = constructPathToModIdMap(loader.getAllMods());
}
@Override
public String identifyFile(Path path) {
+ String id = this.pathToModMap.get(path);
+ if (id != null) {
+ return id;
+ }
+
if (!path.startsWith(this.modsDirectory)) {
return null;
}
return super.identifyFileName(this.modsDirectory.relativize(path).toString());
}
+
+ private static Map<Path, String> constructPathToModIdMap(Collection<ModContainer> mods) {
+ ImmutableMap.Builder<Path, String> builder = ImmutableMap.builder();
+ for (ModContainer mod : mods) {
+ Path path = mod.getRootPath().toAbsolutePath().normalize();
+ builder.put(path, mod.getMetadata().getId());
+ }
+ return builder.build();
+ }
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java
index 14b3442..2138dbe 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricCommandSender.java
@@ -34,8 +34,6 @@ import net.minecraft.text.Text;
import java.util.UUID;
public class FabricCommandSender extends AbstractCommandSender<CommandOutput> {
- private static final UUID NIL_UUID = new UUID(0, 0);
-
private final FabricSparkPlugin plugin;
public FabricCommandSender(CommandOutput commandOutput, FabricSparkPlugin plugin) {
@@ -67,7 +65,7 @@ public class FabricCommandSender extends AbstractCommandSender<CommandOutput> {
@Override
public void sendMessage(Component message) {
Text component = Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(message));
- super.delegate.sendSystemMessage(component, NIL_UUID);
+ super.delegate.sendMessage(component);
}
@Override
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java
index 93ccda4..e298121 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlatformInfo.java
@@ -20,13 +20,13 @@
package me.lucko.spark.fabric;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import net.fabricmc.loader.api.FabricLoader;
import java.util.Optional;
-public class FabricPlatformInfo extends AbstractPlatformInfo {
+public class FabricPlatformInfo implements PlatformInfo {
private final Type type;
public FabricPlatformInfo(Type type) {
@@ -35,7 +35,7 @@ public class FabricPlatformInfo extends AbstractPlatformInfo {
@Override
public Type getType() {
- return type;
+ return this.type;
}
@Override
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java
new file mode 100644
index 0000000..bae6c41
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.network.ServerPlayerEntity;
+
+import java.util.Map;
+
+public class FabricPlayerPingProvider implements PlayerPingProvider {
+ private final MinecraftServer server;
+
+ public FabricPlayerPingProvider(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (ServerPlayerEntity player : this.server.getPlayerManager().getPlayerList()) {
+ builder.put(player.getGameProfile().getName(), player.pingMilliseconds);
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java
index 1dd4fd8..ad419f7 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricSparkMod.java
@@ -20,14 +20,21 @@
package me.lucko.spark.fabric;
+import com.mojang.brigadier.CommandDispatcher;
+
import me.lucko.spark.fabric.plugin.FabricClientSparkPlugin;
import me.lucko.spark.fabric.plugin.FabricServerSparkPlugin;
import net.fabricmc.api.ModInitializer;
+import net.fabricmc.fabric.api.command.v2.CommandRegistrationCallback;
import net.fabricmc.fabric.api.event.lifecycle.v1.ServerLifecycleEvents;
import net.fabricmc.loader.api.FabricLoader;
import net.fabricmc.loader.api.ModContainer;
import net.minecraft.client.MinecraftClient;
+import net.minecraft.command.CommandRegistryAccess;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.command.CommandManager.RegistrationEnvironment;
+import net.minecraft.server.command.ServerCommandSource;
import java.nio.file.Path;
import java.util.Objects;
@@ -38,6 +45,8 @@ public class FabricSparkMod implements ModInitializer {
private ModContainer container;
private Path configDirectory;
+ private FabricServerSparkPlugin activeServerPlugin = null;
+
@Override
public void onInitialize() {
FabricSparkMod.mod = this;
@@ -47,16 +56,36 @@ public class FabricSparkMod implements ModInitializer {
.orElseThrow(() -> new IllegalStateException("Unable to get container for spark"));
this.configDirectory = loader.getConfigDir().resolve("spark");
- // load hooks
- ServerLifecycleEvents.SERVER_STARTING.register(server -> FabricServerSparkPlugin.register(this, server));
+ // server event hooks
+ ServerLifecycleEvents.SERVER_STARTING.register(this::initializeServer);
+ ServerLifecycleEvents.SERVER_STOPPING.register(this::onServerStopping);
+ CommandRegistrationCallback.EVENT.register(this::onServerCommandRegister);
}
- // called be entrypoint defined in fabric.mod.json
+ // client (called be entrypoint defined in fabric.mod.json)
public static void initializeClient() {
Objects.requireNonNull(FabricSparkMod.mod, "mod");
FabricClientSparkPlugin.register(FabricSparkMod.mod, MinecraftClient.getInstance());
}
+ // server
+ public void initializeServer(MinecraftServer server) {
+ this.activeServerPlugin = FabricServerSparkPlugin.register(this, server);
+ }
+
+ public void onServerStopping(MinecraftServer stoppingServer) {
+ if (this.activeServerPlugin != null) {
+ this.activeServerPlugin.disable();
+ this.activeServerPlugin = null;
+ }
+ }
+
+ public void onServerCommandRegister(CommandDispatcher<ServerCommandSource> dispatcher, CommandRegistryAccess access, RegistrationEnvironment env) {
+ if (this.activeServerPlugin != null) {
+ this.activeServerPlugin.registerCommands(dispatcher);
+ }
+ }
+
public String getVersion() {
return this.container.getMetadata().getVersion().getFriendlyString();
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientPlayerEntityMixin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientPlayerEntityMixin.java
deleted file mode 100644
index 495b213..0000000
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientPlayerEntityMixin.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.fabric.mixin;
-
-import com.mojang.authlib.GameProfile;
-
-import me.lucko.spark.fabric.FabricSparkGameHooks;
-
-import net.minecraft.client.network.AbstractClientPlayerEntity;
-import net.minecraft.client.network.ClientPlayerEntity;
-import net.minecraft.client.world.ClientWorld;
-
-import org.spongepowered.asm.mixin.Mixin;
-import org.spongepowered.asm.mixin.injection.At;
-import org.spongepowered.asm.mixin.injection.Inject;
-import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
-import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
-
-@Mixin(ClientPlayerEntity.class)
-public abstract class ClientPlayerEntityMixin extends AbstractClientPlayerEntity {
-
- public ClientPlayerEntityMixin(ClientWorld clientWorld_1, GameProfile gameProfile_1) {
- super(clientWorld_1, gameProfile_1);
- }
-
- @Inject(method = "sendChatMessage(Ljava/lang/String;)V", at = @At("HEAD"),
- locals = LocalCapture.CAPTURE_FAILHARD,
- cancellable = true)
- public void onSendChatMessage(String message, CallbackInfo ci) {
- if (FabricSparkGameHooks.INSTANCE.tryProcessChat(message)) {
- ci.cancel();
- }
- }
-}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
index 359630d..dc2e7d9 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
@@ -1,12 +1,34 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
package me.lucko.spark.fabric.placeholder;
import eu.pb4.placeholders.PlaceholderAPI;
import eu.pb4.placeholders.PlaceholderResult;
+
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.command.modules.HealthModule;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.common.util.StatisticFormatter;
+
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
import net.minecraft.text.Text;
@@ -45,16 +67,16 @@ public class SparkFabricPlaceholderApi {
if (tps == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatTps(tps)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatTps(tps)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(HealthModule.formatTps(tickStatistics.tps15Min()))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
.build()
));
}
@@ -82,13 +104,13 @@ public class SparkFabricPlaceholderApi {
if (duration == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatTickDurations(duration)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatTickDurations(duration)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(HealthModule.formatTickDurations(tickStatistics.duration1Min()))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
.build()
));
}
@@ -113,14 +135,14 @@ public class SparkFabricPlaceholderApi {
if (usage == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatCpuUsage(usage)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
.build()
));
}
@@ -145,14 +167,14 @@ public class SparkFabricPlaceholderApi {
if (usage == null) {
return PlaceholderResult.invalid("Invalid argument");
} else {
- return PlaceholderResult.value(toText(HealthModule.formatCpuUsage(usage)));
+ return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
}
} else {
return PlaceholderResult.value(toText(
Component.text()
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(HealthModule.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
.build()
));
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
index 0948225..e94d697 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
@@ -33,101 +33,73 @@ import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
import me.lucko.spark.fabric.FabricPlatformInfo;
-import me.lucko.spark.fabric.FabricSparkGameHooks;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
+import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback;
+import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource;
import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientLifecycleEvents;
import net.minecraft.client.MinecraftClient;
-import net.minecraft.client.network.ClientPlayNetworkHandler;
-import net.minecraft.command.CommandSource;
+import net.minecraft.command.CommandRegistryAccess;
import net.minecraft.server.command.CommandOutput;
-import java.util.Arrays;
-import java.util.Optional;
import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
-public class FabricClientSparkPlugin extends FabricSparkPlugin implements SuggestionProvider<CommandSource> {
+public class FabricClientSparkPlugin extends FabricSparkPlugin implements Command<FabricClientCommandSource>, SuggestionProvider<FabricClientCommandSource> {
public static void register(FabricSparkMod mod, MinecraftClient client) {
FabricClientSparkPlugin plugin = new FabricClientSparkPlugin(mod, client);
plugin.enable();
-
- // ensure commands are registered
- plugin.scheduler.scheduleWithFixedDelay(plugin::checkCommandRegistered, 10, 10, TimeUnit.SECONDS);
-
- // register shutdown hook
- ClientLifecycleEvents.CLIENT_STOPPING.register(stoppingClient -> {
- if (stoppingClient == plugin.minecraft) {
- plugin.disable();
- }
- });
}
private final MinecraftClient minecraft;
- private CommandDispatcher<CommandSource> dispatcher;
public FabricClientSparkPlugin(FabricSparkMod mod, MinecraftClient minecraft) {
super(mod);
this.minecraft = minecraft;
}
- private CommandDispatcher<CommandSource> getPlayerCommandDispatcher() {
- return Optional.ofNullable(this.minecraft.player)
- .map(player -> player.networkHandler)
- .map(ClientPlayNetworkHandler::getCommandDispatcher)
- .orElse(null);
+ @Override
+ public void enable() {
+ super.enable();
+
+ // events
+ ClientLifecycleEvents.CLIENT_STOPPING.register(this::onDisable);
+ ClientCommandRegistrationCallback.EVENT.register(this::onCommandRegister);
}
- private void checkCommandRegistered() {
- CommandDispatcher<CommandSource> dispatcher = getPlayerCommandDispatcher();
- if (dispatcher == null) {
- return;
+ private void onDisable(MinecraftClient stoppingClient) {
+ if (stoppingClient == this.minecraft) {
+ disable();
}
+ }
- try {
- if (dispatcher != this.dispatcher) {
- this.dispatcher = dispatcher;
- registerCommands(this.dispatcher, c -> Command.SINGLE_SUCCESS, this, "sparkc", "sparkclient");
- FabricSparkGameHooks.INSTANCE.setChatSendCallback(this::onClientChat);
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
+ public void onCommandRegister(CommandDispatcher<FabricClientCommandSource> dispatcher, CommandRegistryAccess registryAccess) {
+ registerCommands(dispatcher, this, this, "sparkc", "sparkclient");
}
- public boolean onClientChat(String chat) {
- String[] args = processArgs(chat, false);
+ @Override
+ public int run(CommandContext<FabricClientCommandSource> context) throws CommandSyntaxException {
+ String[] args = processArgs(context, false, "sparkc", "sparkclient");
if (args == null) {
- return false;
+ return 0;
}
this.threadDumper.ensureSetup();
- this.platform.executeCommand(new FabricCommandSender(this.minecraft.player, this), args);
- this.minecraft.inGameHud.getChatHud().addToMessageHistory(chat);
- return true;
+ this.platform.executeCommand(new FabricCommandSender(context.getSource().getEntity(), this), args);
+ return Command.SINGLE_SUCCESS;
}
@Override
- public CompletableFuture<Suggestions> getSuggestions(CommandContext<CommandSource> context, SuggestionsBuilder builder) throws CommandSyntaxException {
- String[] args = processArgs(context.getInput(), true);
+ public CompletableFuture<Suggestions> getSuggestions(CommandContext<FabricClientCommandSource> context, SuggestionsBuilder builder) throws CommandSyntaxException {
+ String[] args = processArgs(context, true, "/sparkc", "/sparkclient");
if (args == null) {
return Suggestions.empty();
}
- return generateSuggestions(new FabricCommandSender(this.minecraft.player, this), args, builder);
- }
-
- private static String[] processArgs(String input, boolean tabComplete) {
- String[] split = input.split(" ", tabComplete ? -1 : 0);
- if (split.length == 0 || !split[0].equals("/sparkc") && !split[0].equals("/sparkclient")) {
- return null;
- }
-
- return Arrays.copyOfRange(split, 1, split.length);
+ return generateSuggestions(new FabricCommandSender(context.getSource().getEntity(), this), args, builder);
}
@Override
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
index 8d38b1c..428ac4c 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
@@ -21,6 +21,7 @@
package me.lucko.spark.fabric.plugin;
import com.mojang.brigadier.Command;
+import com.mojang.brigadier.CommandDispatcher;
import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.exceptions.CommandSyntaxException;
import com.mojang.brigadier.suggestion.SuggestionProvider;
@@ -28,49 +29,33 @@ import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.fabric.api.permissions.v0.Permissions;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
import me.lucko.spark.fabric.FabricPlatformInfo;
+import me.lucko.spark.fabric.FabricPlayerPingProvider;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
-
import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi;
-import net.fabricmc.fabric.api.command.v1.CommandRegistrationCallback;
-import net.fabricmc.fabric.api.event.lifecycle.v1.ServerLifecycleEvents;
+
import net.fabricmc.loader.api.FabricLoader;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.command.CommandOutput;
import net.minecraft.server.command.ServerCommandSource;
-import java.util.Arrays;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
public class FabricServerSparkPlugin extends FabricSparkPlugin implements Command<ServerCommandSource>, SuggestionProvider<ServerCommandSource> {
- public static void register(FabricSparkMod mod, MinecraftServer server) {
+ public static FabricServerSparkPlugin register(FabricSparkMod mod, MinecraftServer server) {
FabricServerSparkPlugin plugin = new FabricServerSparkPlugin(mod, server);
plugin.enable();
-
- // register commands
- registerCommands(server.getCommandManager().getDispatcher(), plugin, plugin, "spark");
- CommandRegistrationCallback.EVENT.register((dispatcher, isDedicated) -> registerCommands(dispatcher, plugin, plugin, "spark"));
-
-
- if (FabricLoader.getInstance().isModLoaded("placeholder-api")) {
- new SparkFabricPlaceholderApi(plugin.platform);
- }
-
- // register shutdown hook
- ServerLifecycleEvents.SERVER_STOPPING.register(stoppingServer -> {
- if (stoppingServer == plugin.server) {
- plugin.disable();
- }
- });
+ return plugin;
}
private final MinecraftServer server;
@@ -81,8 +66,25 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public void enable() {
+ super.enable();
+
+ // register commands
+ registerCommands(this.server.getCommandManager().getDispatcher());
+
+ // placeholders
+ if (FabricLoader.getInstance().isModLoaded("placeholder-api")) {
+ new SparkFabricPlaceholderApi(this.platform);
+ }
+ }
+
+ public void registerCommands(CommandDispatcher<ServerCommandSource> dispatcher) {
+ registerCommands(dispatcher, this, this, "spark");
+ }
+
+ @Override
public int run(CommandContext<ServerCommandSource> context) throws CommandSyntaxException {
- String[] args = processArgs(context, false);
+ String[] args = processArgs(context, false, "/spark", "spark");
if (args == null) {
return 0;
}
@@ -95,7 +97,7 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
@Override
public CompletableFuture<Suggestions> getSuggestions(CommandContext<ServerCommandSource> context, SuggestionsBuilder builder) throws CommandSyntaxException {
- String[] args = processArgs(context, true);
+ String[] args = processArgs(context, true, "/spark", "spark");
if (args == null) {
return Suggestions.empty();
}
@@ -103,15 +105,6 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
return generateSuggestions(new FabricCommandSender(context.getSource().getPlayer(), this), args, builder);
}
- private static String[] processArgs(CommandContext<ServerCommandSource> context, boolean tabComplete) {
- String[] split = context.getInput().split(" ", tabComplete ? -1 : 0);
- if (split.length == 0 || !split[0].equals("/spark") && !split[0].equals("spark")) {
- return null;
- }
-
- return Arrays.copyOfRange(split, 1, split.length);
- }
-
@Override
public boolean hasPermission(CommandOutput sender, String permission) {
if (sender instanceof PlayerEntity) {
@@ -140,6 +133,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new FabricPlayerPingProvider(this.server);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new FabricPlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
index 4bcfce4..b1392d4 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
@@ -25,6 +25,7 @@ import com.mojang.brigadier.CommandDispatcher;
import com.mojang.brigadier.arguments.StringArgumentType;
import com.mojang.brigadier.builder.LiteralArgumentBuilder;
import com.mojang.brigadier.builder.RequiredArgumentBuilder;
+import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
@@ -35,6 +36,7 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.util.SparkThreadFactory;
import me.lucko.spark.fabric.FabricClassSourceLookup;
import me.lucko.spark.fabric.FabricSparkMod;
@@ -44,6 +46,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.file.Path;
+import java.util.Arrays;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -54,22 +57,18 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
private final FabricSparkMod mod;
private final Logger logger;
protected final ScheduledExecutorService scheduler;
- protected final SparkPlatform platform;
+
+ protected SparkPlatform platform;
protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected FabricSparkPlugin(FabricSparkMod mod) {
this.mod = mod;
this.logger = LogManager.getLogger("spark");
- this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-fabric-async-worker");
- thread.setDaemon(true);
- return thread;
- });
- this.platform = new SparkPlatform(this);
+ this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory());
}
public void enable() {
+ this.platform = new SparkPlatform(this);
this.platform.enable();
}
@@ -155,4 +154,13 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
}
}
+ protected static String[] processArgs(CommandContext<?> context, boolean tabComplete, String... aliases) {
+ String[] split = context.getInput().split(" ", tabComplete ? -1 : 0);
+ if (split.length == 0 || !Arrays.asList(aliases).contains(split[0])) {
+ return null;
+ }
+
+ return Arrays.copyOfRange(split, 1, split.length);
+ }
+
}
diff --git a/spark-fabric/src/main/resources/fabric.mod.json b/spark-fabric/src/main/resources/fabric.mod.json
index a9e15e0..e2e600d 100644
--- a/spark-fabric/src/main/resources/fabric.mod.json
+++ b/spark-fabric/src/main/resources/fabric.mod.json
@@ -23,13 +23,10 @@
"me.lucko.spark.fabric.FabricSparkMod::initializeClient"
]
},
- "mixins": [
- "spark.mixins.json"
- ],
"depends": {
"fabricloader": ">=0.4.0",
"fabric-api-base": "*",
- "fabric-command-api-v1": "*",
+ "fabric-command-api-v2": "*",
"fabric-lifecycle-events-v1" : "*"
}
}
diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json
deleted file mode 100644
index 3f495ea..0000000
--- a/spark-fabric/src/main/resources/spark.mixins.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "required": true,
- "package": "me.lucko.spark.fabric.mixin",
- "compatibilityLevel": "JAVA_8",
- "client": [
- "ClientPlayerEntityMixin"
- ],
- "injectors": {
- "defaultRequire": 1
- }
-}
diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle
index e0fe0f8..210122b 100644
--- a/spark-forge/build.gradle
+++ b/spark-forge/build.gradle
@@ -20,8 +20,7 @@ tasks.withType(JavaCompile) {
}
minecraft {
- mappings channel: 'official', version: '1.18.1'
- accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg')
+ mappings channel: 'official', version: '1.19'
}
configurations {
@@ -30,7 +29,7 @@ configurations {
}
dependencies {
- minecraft 'net.minecraftforge:forge:1.18.1-39.0.8'
+ minecraft 'net.minecraftforge:forge:1.19-41.0.11'
shade project(':spark-common')
}
@@ -49,7 +48,7 @@ processResources {
}
shadowJar {
- archiveName = 'spark-forge.jar'
+ archiveName = "spark-${project.pluginVersion}-forge.jar"
configurations = [project.configurations.shade]
relocate 'okio', 'me.lucko.spark.lib.okio'
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java
index 9ca6f03..f3b746d 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeCommandSender.java
@@ -25,14 +25,14 @@ import me.lucko.spark.forge.plugin.ForgeSparkPlugin;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
-import net.minecraft.Util;
import net.minecraft.commands.CommandSource;
+import net.minecraft.network.chat.Component.Serializer;
import net.minecraft.network.chat.MutableComponent;
-import net.minecraft.network.chat.TextComponent;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.rcon.RconConsoleSource;
import net.minecraft.world.entity.player.Player;
+import java.util.Objects;
import java.util.UUID;
public class ForgeCommandSender extends AbstractCommandSender<CommandSource> {
@@ -66,8 +66,9 @@ public class ForgeCommandSender extends AbstractCommandSender<CommandSource> {
@Override
public void sendMessage(Component message) {
- MutableComponent component = TextComponent.Serializer.fromJson(GsonComponentSerializer.gson().serialize(message));
- super.delegate.sendMessage(component, Util.NIL_UUID);
+ MutableComponent component = Serializer.fromJson(GsonComponentSerializer.gson().serialize(message));
+ Objects.requireNonNull(component, "component");
+ super.delegate.sendSystemMessage(component);
}
@Override
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java
index 60294be..97b3a86 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlatformInfo.java
@@ -20,12 +20,12 @@
package me.lucko.spark.forge;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import net.minecraftforge.versions.forge.ForgeVersion;
import net.minecraftforge.versions.mcp.MCPVersion;
-public class ForgePlatformInfo extends AbstractPlatformInfo {
+public class ForgePlatformInfo implements PlatformInfo {
private final Type type;
public ForgePlatformInfo(Type type) {
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java
new file mode 100644
index 0000000..f31cc5b
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgePlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.level.ServerPlayer;
+
+import java.util.Map;
+
+public class ForgePlayerPingProvider implements PlayerPingProvider {
+ private final MinecraftServer server;
+
+ public ForgePlayerPingProvider(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (ServerPlayer player : this.server.getPlayerList().getPlayers()) {
+ builder.put(player.getGameProfile().getName(), player.latency);
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java
index 02a2ebc..3321d12 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkMod.java
@@ -24,7 +24,7 @@ import me.lucko.spark.forge.plugin.ForgeClientSparkPlugin;
import me.lucko.spark.forge.plugin.ForgeServerSparkPlugin;
import net.minecraftforge.common.MinecraftForge;
-import net.minecraftforge.event.RegisterCommandsEvent;
+import net.minecraftforge.event.server.ServerAboutToStartEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.IExtensionPoint;
import net.minecraftforge.fml.ModContainer;
@@ -61,15 +61,15 @@ public class ForgeSparkMod {
this.configDirectory = FMLPaths.CONFIGDIR.get().resolve(this.container.getModId());
}
- @SubscribeEvent
- public void registerCommands(RegisterCommandsEvent e) {
- ForgeServerSparkPlugin.register(this, e);
- }
-
public void clientInit(FMLClientSetupEvent e) {
ForgeClientSparkPlugin.register(this, e);
}
+ @SubscribeEvent
+ public void serverInit(ServerAboutToStartEvent e) {
+ ForgeServerSparkPlugin.register(this, e);
+ }
+
public Path getConfigDirectory() {
if (this.configDirectory == null) {
throw new IllegalStateException("Config directory not set");
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
index 95bb2d1..cf5c89b 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
@@ -21,7 +21,6 @@
package me.lucko.spark.forge.plugin;
import com.mojang.brigadier.Command;
-import com.mojang.brigadier.CommandDispatcher;
import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.exceptions.CommandSyntaxException;
import com.mojang.brigadier.suggestion.SuggestionProvider;
@@ -38,95 +37,64 @@ import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
import net.minecraft.client.Minecraft;
-import net.minecraft.client.multiplayer.ClientPacketListener;
import net.minecraft.commands.CommandSource;
-import net.minecraft.commands.SharedSuggestionProvider;
-import net.minecraftforge.client.event.ClientChatEvent;
+import net.minecraft.commands.CommandSourceStack;
+import net.minecraftforge.client.event.RegisterClientCommandsEvent;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.TickEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent;
-import java.util.Arrays;
-import java.util.Optional;
import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
-public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements SuggestionProvider<SharedSuggestionProvider> {
+public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<CommandSourceStack>, SuggestionProvider<CommandSourceStack> {
public static void register(ForgeSparkMod mod, FMLClientSetupEvent event) {
ForgeClientSparkPlugin plugin = new ForgeClientSparkPlugin(mod, Minecraft.getInstance());
plugin.enable();
-
- // register listeners
- MinecraftForge.EVENT_BUS.register(plugin);
-
- // ensure commands are registered
- plugin.scheduler.scheduleWithFixedDelay(plugin::checkCommandRegistered, 10, 10, TimeUnit.SECONDS);
}
private final Minecraft minecraft;
- private CommandDispatcher<SharedSuggestionProvider> dispatcher;
public ForgeClientSparkPlugin(ForgeSparkMod mod, Minecraft minecraft) {
super(mod);
this.minecraft = minecraft;
}
- private CommandDispatcher<SharedSuggestionProvider> getPlayerCommandDispatcher() {
- return Optional.ofNullable(this.minecraft.player)
- .map(player -> player.connection)
- .map(ClientPacketListener::getCommands)
- .orElse(null);
- }
-
- private void checkCommandRegistered() {
- CommandDispatcher<SharedSuggestionProvider> dispatcher = getPlayerCommandDispatcher();
- if (dispatcher == null) {
- return;
- }
+ @Override
+ public void enable() {
+ super.enable();
- try {
- if (dispatcher != this.dispatcher) {
- this.dispatcher = dispatcher;
- registerCommands(this.dispatcher, context -> Command.SINGLE_SUCCESS, this, "sparkc", "sparkclient");
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
+ // register listeners
+ MinecraftForge.EVENT_BUS.register(this);
}
@SubscribeEvent
- public void onClientChat(ClientChatEvent event) {
- String[] args = processArgs(event.getMessage(), false);
+ public void onCommandRegister(RegisterClientCommandsEvent e) {
+ registerCommands(e.getDispatcher(), this, this, "sparkc", "sparkclient");
+ }
+
+ @Override
+ public int run(CommandContext<CommandSourceStack> context) throws CommandSyntaxException {
+ String[] args = processArgs(context, false, "sparkc", "sparkclient");
if (args == null) {
- return;
+ return 0;
}
this.threadDumper.ensureSetup();
- this.platform.executeCommand(new ForgeCommandSender(this.minecraft.player, this), args);
- this.minecraft.gui.getChat().addRecentChat(event.getMessage());
- event.setCanceled(true);
+ this.platform.executeCommand(new ForgeCommandSender(context.getSource().getEntity(), this), args);
+ return Command.SINGLE_SUCCESS;
}
@Override
- public CompletableFuture<Suggestions> getSuggestions(CommandContext<SharedSuggestionProvider> context, SuggestionsBuilder builder) throws CommandSyntaxException {
- String[] args = processArgs(context.getInput(), true);
+ public CompletableFuture<Suggestions> getSuggestions(CommandContext<CommandSourceStack> context, SuggestionsBuilder builder) throws CommandSyntaxException {
+ String[] args = processArgs(context, true, "/sparkc", "/sparkclient");
if (args == null) {
return Suggestions.empty();
}
- return generateSuggestions(new ForgeCommandSender(this.minecraft.player, this), args, builder);
- }
-
- private static String[] processArgs(String input, boolean tabComplete) {
- String[] split = input.split(" ", tabComplete ? -1 : 0);
- if (split.length == 0 || !split[0].equals("/sparkc") && !split[0].equals("/sparkclient")) {
- return null;
- }
-
- return Arrays.copyOfRange(split, 1, split.length);
+ return generateSuggestions(new ForgeCommandSender(context.getSource().getEntity(), this), args, builder);
}
@Override
@@ -158,5 +126,4 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Suggesti
public String getCommandName() {
return "sparkc";
}
-
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
index 4a2416f..e341d6f 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
@@ -29,11 +29,13 @@ import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.ForgeCommandSender;
import me.lucko.spark.forge.ForgePlatformInfo;
+import me.lucko.spark.forge.ForgePlayerPingProvider;
import me.lucko.spark.forge.ForgeSparkMod;
import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
@@ -45,49 +47,61 @@ import net.minecraft.server.level.ServerPlayer;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.RegisterCommandsEvent;
import net.minecraftforge.event.TickEvent;
+import net.minecraftforge.event.server.ServerAboutToStartEvent;
import net.minecraftforge.event.server.ServerStoppingEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
-import net.minecraftforge.server.ServerLifecycleHooks;
import net.minecraftforge.server.permission.PermissionAPI;
import net.minecraftforge.server.permission.events.PermissionGatherEvent;
import net.minecraftforge.server.permission.nodes.PermissionNode;
import net.minecraftforge.server.permission.nodes.PermissionNode.PermissionResolver;
import net.minecraftforge.server.permission.nodes.PermissionTypes;
-import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
-import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<CommandSourceStack>, SuggestionProvider<CommandSourceStack> {
- public static void register(ForgeSparkMod mod, RegisterCommandsEvent event) {
- ForgeServerSparkPlugin plugin = new ForgeServerSparkPlugin(mod, ServerLifecycleHooks::getCurrentServer);
+ public static void register(ForgeSparkMod mod, ServerAboutToStartEvent event) {
+ ForgeServerSparkPlugin plugin = new ForgeServerSparkPlugin(mod, event.getServer());
plugin.enable();
-
- // register listeners
- MinecraftForge.EVENT_BUS.register(plugin);
-
- // register commands & permissions
- CommandDispatcher<CommandSourceStack> dispatcher = event.getDispatcher();
- registerCommands(dispatcher, plugin, plugin, "spark");
}
- private final Supplier<MinecraftServer> server;
+ private final MinecraftServer server;
private Map<String, PermissionNode<Boolean>> registeredPermissions = Collections.emptyMap();
- public ForgeServerSparkPlugin(ForgeSparkMod mod, Supplier<MinecraftServer> server) {
+ public ForgeServerSparkPlugin(ForgeSparkMod mod, MinecraftServer server) {
super(mod);
this.server = server;
}
+ @Override
+ public void enable() {
+ super.enable();
+
+ // register commands
+ registerCommands(this.server.getCommands().getDispatcher());
+
+ // register listeners
+ MinecraftForge.EVENT_BUS.register(this);
+ }
+
+ @Override
+ public void disable() {
+ super.disable();
+
+ // unregister listeners
+ MinecraftForge.EVENT_BUS.unregister(this);
+ }
+
@SubscribeEvent
public void onDisable(ServerStoppingEvent event) {
- disable();
+ if (event.getServer() == this.server) {
+ disable();
+ }
}
@SubscribeEvent
@@ -112,21 +126,31 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
this.registeredPermissions = builder.build();
}
+ @SubscribeEvent
+ public void onCommandRegister(RegisterCommandsEvent e) {
+ registerCommands(e.getDispatcher());
+ }
+
+ private void registerCommands(CommandDispatcher<CommandSourceStack> dispatcher) {
+ registerCommands(dispatcher, this, this, "spark");
+ }
+
@Override
public int run(CommandContext<CommandSourceStack> context) throws CommandSyntaxException {
- String[] args = processArgs(context, false);
+ String[] args = processArgs(context, false, "/spark", "spark");
if (args == null) {
return 0;
}
this.threadDumper.ensureSetup();
- this.platform.executeCommand(new ForgeCommandSender(context.getSource().source, this), args);
+ CommandSource source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer();
+ this.platform.executeCommand(new ForgeCommandSender(source, this), args);
return Command.SINGLE_SUCCESS;
}
@Override
public CompletableFuture<Suggestions> getSuggestions(CommandContext<CommandSourceStack> context, SuggestionsBuilder builder) throws CommandSyntaxException {
- String[] args = processArgs(context, true);
+ String[] args = processArgs(context, true, "/spark", "spark");
if (args == null) {
return Suggestions.empty();
}
@@ -134,15 +158,6 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
return generateSuggestions(new ForgeCommandSender(context.getSource().getPlayerOrException(), this), args, builder);
}
- private static String [] processArgs(CommandContext<CommandSourceStack> context, boolean tabComplete) {
- String[] split = context.getInput().split(" ", tabComplete ? -1 : 0);
- if (split.length == 0 || !split[0].equals("/spark") && !split[0].equals("spark")) {
- return null;
- }
-
- return Arrays.copyOfRange(split, 1, split.length);
- }
-
@Override
public boolean hasPermission(CommandSource sender, String permission) {
if (sender instanceof ServerPlayer) {
@@ -163,8 +178,8 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
@Override
public Stream<ForgeCommandSender> getCommandSenders() {
return Stream.concat(
- this.server.get().getPlayerList().getPlayers().stream(),
- Stream.of(this.server.get())
+ this.server.getPlayerList().getPlayers().stream(),
+ Stream.of(this.server)
).map(sender -> new ForgeCommandSender(sender, this));
}
@@ -179,6 +194,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new ForgePlayerPingProvider(this.server);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new ForgePlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
index f8b7559..f257e34 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
@@ -25,6 +25,7 @@ import com.mojang.brigadier.CommandDispatcher;
import com.mojang.brigadier.arguments.StringArgumentType;
import com.mojang.brigadier.builder.LiteralArgumentBuilder;
import com.mojang.brigadier.builder.RequiredArgumentBuilder;
+import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
@@ -35,6 +36,7 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.util.SparkThreadFactory;
import me.lucko.spark.forge.ForgeClassSourceLookup;
import me.lucko.spark.forge.ForgeSparkMod;
@@ -44,6 +46,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.file.Path;
+import java.util.Arrays;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -51,44 +54,21 @@ import java.util.logging.Level;
public abstract class ForgeSparkPlugin implements SparkPlugin {
- public static <T> void registerCommands(CommandDispatcher<T> dispatcher, Command<T> executor, SuggestionProvider<T> suggestor, String... aliases) {
- if (aliases.length == 0) {
- return;
- }
-
- String mainName = aliases[0];
- LiteralArgumentBuilder<T> command = LiteralArgumentBuilder.<T>literal(mainName)
- .executes(executor)
- .then(RequiredArgumentBuilder.<T, String>argument("args", StringArgumentType.greedyString())
- .suggests(suggestor)
- .executes(executor)
- );
-
- LiteralCommandNode<T> node = dispatcher.register(command);
- for (int i = 1; i < aliases.length; i++) {
- dispatcher.register(LiteralArgumentBuilder.<T>literal(aliases[i]).redirect(node));
- }
- }
-
private final ForgeSparkMod mod;
private final Logger logger;
protected final ScheduledExecutorService scheduler;
- protected final SparkPlatform platform;
+
+ protected SparkPlatform platform;
protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected ForgeSparkPlugin(ForgeSparkMod mod) {
this.mod = mod;
this.logger = LogManager.getLogger("spark");
- this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-forge-async-worker");
- thread.setDaemon(true);
- return thread;
- });
- this.platform = new SparkPlatform(this);
+ this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory());
}
public void enable() {
+ this.platform = new SparkPlatform(this);
this.platform.enable();
}
@@ -154,4 +134,32 @@ public abstract class ForgeSparkPlugin implements SparkPlugin {
return suggestions.build();
});
}
+
+ protected static <T> void registerCommands(CommandDispatcher<T> dispatcher, Command<T> executor, SuggestionProvider<T> suggestor, String... aliases) {
+ if (aliases.length == 0) {
+ return;
+ }
+
+ String mainName = aliases[0];
+ LiteralArgumentBuilder<T> command = LiteralArgumentBuilder.<T>literal(mainName)
+ .executes(executor)
+ .then(RequiredArgumentBuilder.<T, String>argument("args", StringArgumentType.greedyString())
+ .suggests(suggestor)
+ .executes(executor)
+ );
+
+ LiteralCommandNode<T> node = dispatcher.register(command);
+ for (int i = 1; i < aliases.length; i++) {
+ dispatcher.register(LiteralArgumentBuilder.<T>literal(aliases[i]).redirect(node));
+ }
+ }
+
+ protected static String[] processArgs(CommandContext<?> context, boolean tabComplete, String... aliases) {
+ String[] split = context.getInput().split(" ", tabComplete ? -1 : 0);
+ if (split.length == 0 || !Arrays.asList(aliases).contains(split[0])) {
+ return null;
+ }
+
+ return Arrays.copyOfRange(split, 1, split.length);
+ }
}
diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
deleted file mode 100644
index a1d1dc8..0000000
--- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
+++ /dev/null
@@ -1 +0,0 @@
-public net.minecraft.commands.CommandSourceStack f_81288_ # source \ No newline at end of file
diff --git a/spark-forge1122/build.gradle b/spark-forge1122/build.gradle
deleted file mode 100644
index 5c40d04..0000000
--- a/spark-forge1122/build.gradle
+++ /dev/null
@@ -1,82 +0,0 @@
-buildscript {
- repositories {
- maven { url = "https://files.minecraftforge.net/maven" }
- }
- dependencies {
- classpath group: 'net.minecraftforge.gradle', name: 'ForgeGradle', version: '5.1.+', changing: true
- }
-}
-
-plugins {
- id 'net.kyori.blossom' version '1.3.0'
- id 'com.github.johnrengelman.shadow' version '7.0.0'
-}
-
-apply plugin: 'net.minecraftforge.gradle'
-
-minecraft {
- mappings channel: 'snapshot', version: '20171003-1.12'
-}
-
-configurations {
- shade
- implementation.extendsFrom shade
-}
-
-// https://github.com/MinecraftForge/ForgeGradle/issues/627#issuecomment-533927535
-configurations.all {
- resolutionStrategy {
- force 'org.lwjgl.lwjgl:lwjgl-platform:2.9.4-nightly-20150209'
- }
-}
-
-dependencies {
- minecraft 'net.minecraftforge:forge:1.12.2-14.23.5.2854'
- shade project(':spark-common')
-}
-
-processResources {
- from(sourceSets.main.resources.srcDirs) {
- include 'mcmod.info'
- expand (
- 'pluginVersion': project.pluginVersion,
- 'pluginDescription': project.pluginDescription
- )
- }
-}
-
-blossom {
- replaceTokenIn('src/main/java/me/lucko/spark/forge/Forge1122SparkMod.java')
- replaceToken '@version@', project.pluginVersion
-}
-
-shadowJar {
- archiveName = 'spark-forge1122.jar'
- configurations = [project.configurations.shade]
-
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
- relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
- relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
- relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
- relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
- relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
- relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
-
- exclude 'module-info.class'
- exclude 'META-INF/maven/**'
- exclude 'META-INF/proguard/**'
-}
-
-artifacts {
- archives shadowJar
- shadow shadowJar
-}
-
-reobf {
- shadowJar {
- dependsOn createMcpToSrg
- mappings = createMcpToSrg.outputs.files.singleFile
- }
-}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122CommandSender.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122CommandSender.java
deleted file mode 100644
index 7e7d73c..0000000
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122CommandSender.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.forge;
-
-import me.lucko.spark.common.command.sender.AbstractCommandSender;
-import me.lucko.spark.forge.plugin.Forge1122SparkPlugin;
-
-import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
-import net.minecraft.command.ICommandSender;
-import net.minecraft.entity.player.EntityPlayer;
-import net.minecraft.network.rcon.RConConsoleSource;
-import net.minecraft.server.MinecraftServer;
-import net.minecraft.util.text.ITextComponent;
-
-import java.util.UUID;
-
-public class Forge1122CommandSender extends AbstractCommandSender<ICommandSender> {
- private final Forge1122SparkPlugin plugin;
-
- public Forge1122CommandSender(ICommandSender source, Forge1122SparkPlugin plugin) {
- super(source);
- this.plugin = plugin;
- }
-
- @Override
- public String getName() {
- if (super.delegate instanceof EntityPlayer) {
- return ((EntityPlayer) super.delegate).getGameProfile().getName();
- } else if (super.delegate instanceof MinecraftServer) {
- return "Console";
- } else if (super.delegate instanceof RConConsoleSource) {
- return "RCON Console";
- } else {
- return "unknown:" + super.delegate.getClass().getSimpleName();
- }
- }
-
- @Override
- public UUID getUniqueId() {
- if (super.delegate instanceof EntityPlayer) {
- return ((EntityPlayer) super.delegate).getUniqueID();
- }
- return null;
- }
-
- @Override
- public void sendMessage(Component message) {
- ITextComponent component = ITextComponent.Serializer.jsonToComponent(GsonComponentSerializer.gson().serialize(message));
- super.delegate.sendMessage(component);
- }
-
- @Override
- public boolean hasPermission(String permission) {
- return this.plugin.hasPermission(super.delegate, permission);
- }
-}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122SparkMod.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122SparkMod.java
deleted file mode 100644
index 535c69e..0000000
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122SparkMod.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.forge;
-
-import me.lucko.spark.forge.plugin.Forge1122ClientSparkPlugin;
-import me.lucko.spark.forge.plugin.Forge1122ServerSparkPlugin;
-
-import net.minecraftforge.fml.common.FMLCommonHandler;
-import net.minecraftforge.fml.common.Mod;
-import net.minecraftforge.fml.common.Mod.EventHandler;
-import net.minecraftforge.fml.common.event.FMLInitializationEvent;
-import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
-import net.minecraftforge.fml.common.event.FMLServerStartingEvent;
-import net.minecraftforge.fml.common.event.FMLServerStoppingEvent;
-import net.minecraftforge.fml.relauncher.Side;
-
-import java.nio.file.Path;
-
-@Mod(
- modid = "spark",
- name = "spark",
- version = "@version@",
- acceptableRemoteVersions = "*"
-)
-public class Forge1122SparkMod {
-
- private Path configDirectory;
- private Forge1122ServerSparkPlugin activeServerPlugin;
-
- public String getVersion() {
- return Forge1122SparkMod.class.getAnnotation(Mod.class).version();
- }
-
- @EventHandler
- public void preInit(FMLPreInitializationEvent e) {
- this.configDirectory = e.getModConfigurationDirectory().toPath();
- }
-
- @EventHandler
- public void clientInit(FMLInitializationEvent e) {
- if (FMLCommonHandler.instance().getSide() == Side.CLIENT) {
- Forge1122ClientSparkPlugin.register(this);
- }
- }
-
- @EventHandler
- public void serverInit(FMLServerStartingEvent e) {
- this.activeServerPlugin = Forge1122ServerSparkPlugin.register(this, e);
- }
-
- @EventHandler
- public void serverStop(FMLServerStoppingEvent e) {
- if (this.activeServerPlugin != null) {
- this.activeServerPlugin.disable();
- this.activeServerPlugin = null;
- }
- }
-
- public Path getConfigDirectory() {
- if (this.configDirectory == null) {
- throw new IllegalStateException("Config directory not set");
- }
- return this.configDirectory;
- }
-}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickReporter.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickReporter.java
deleted file mode 100644
index cd2cfd8..0000000
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickReporter.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.forge;
-
-import me.lucko.spark.common.tick.SimpleTickReporter;
-import me.lucko.spark.common.tick.TickReporter;
-
-import net.minecraftforge.common.MinecraftForge;
-import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
-import net.minecraftforge.fml.common.gameevent.TickEvent;
-
-public class Forge1122TickReporter extends SimpleTickReporter implements TickReporter {
- private final TickEvent.Type type;
-
- public Forge1122TickReporter(TickEvent.Type type) {
- this.type = type;
- }
-
- @SubscribeEvent
- public void onTick(TickEvent e) {
- if (e.type != this.type) {
- return;
- }
-
- switch (e.phase) {
- case START:
- onStart();
- break;
- case END:
- onEnd();
- break;
- default:
- throw new AssertionError(e.phase);
- }
- }
-
- @Override
- public void start() {
- MinecraftForge.EVENT_BUS.register(this);
- }
-
- @Override
- public void close() {
- MinecraftForge.EVENT_BUS.unregister(this);
- super.close();
- }
-
-}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ClientSparkPlugin.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ClientSparkPlugin.java
deleted file mode 100644
index 7ab0356..0000000
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ClientSparkPlugin.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.forge.plugin;
-
-import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.forge.Forge1122CommandSender;
-import me.lucko.spark.forge.Forge1122PlatformInfo;
-import me.lucko.spark.forge.Forge1122SparkMod;
-import me.lucko.spark.forge.Forge1122TickHook;
-import me.lucko.spark.forge.Forge1122TickReporter;
-
-import net.minecraft.client.Minecraft;
-import net.minecraft.command.ICommandSender;
-import net.minecraftforge.client.ClientCommandHandler;
-import net.minecraftforge.common.MinecraftForge;
-import net.minecraftforge.fml.common.gameevent.TickEvent;
-
-import java.util.stream.Stream;
-
-public class Forge1122ClientSparkPlugin extends Forge1122SparkPlugin {
-
- public static void register(Forge1122SparkMod mod) {
- Forge1122ClientSparkPlugin plugin = new Forge1122ClientSparkPlugin(mod, Minecraft.getMinecraft());
- plugin.enable();
-
- // register listeners
- MinecraftForge.EVENT_BUS.register(plugin);
-
- // register commands
- ClientCommandHandler.instance.registerCommand(plugin);
- }
-
- private final Minecraft minecraft;
-
- public Forge1122ClientSparkPlugin(Forge1122SparkMod mod, Minecraft minecraft) {
- super(mod);
- this.minecraft = minecraft;
- }
-
- @Override
- public boolean hasPermission(ICommandSender sender, String permission) {
- return true;
- }
-
- @Override
- public Stream<Forge1122CommandSender> getCommandSenders() {
- return Stream.of(new Forge1122CommandSender(this.minecraft.player, this));
- }
-
- @Override
- public TickHook createTickHook() {
- return new Forge1122TickHook(TickEvent.Type.CLIENT);
- }
-
- @Override
- public TickReporter createTickReporter() {
- return new Forge1122TickReporter(TickEvent.Type.CLIENT);
- }
-
- @Override
- public PlatformInfo getPlatformInfo() {
- return new Forge1122PlatformInfo(PlatformInfo.Type.CLIENT);
- }
-
- @Override
- public String getCommandName() {
- return "sparkc";
- }
-
-}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ServerSparkPlugin.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ServerSparkPlugin.java
deleted file mode 100644
index 72b1429..0000000
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122ServerSparkPlugin.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.forge.plugin;
-
-import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.forge.Forge1122CommandSender;
-import me.lucko.spark.forge.Forge1122PlatformInfo;
-import me.lucko.spark.forge.Forge1122SparkMod;
-import me.lucko.spark.forge.Forge1122TickHook;
-import me.lucko.spark.forge.Forge1122TickReporter;
-
-import net.minecraft.command.ICommandSender;
-import net.minecraft.entity.player.EntityPlayer;
-import net.minecraft.server.MinecraftServer;
-import net.minecraftforge.fml.common.event.FMLServerStartingEvent;
-import net.minecraftforge.fml.common.gameevent.TickEvent;
-import net.minecraftforge.server.permission.DefaultPermissionLevel;
-import net.minecraftforge.server.permission.PermissionAPI;
-
-import java.util.stream.Stream;
-
-public class Forge1122ServerSparkPlugin extends Forge1122SparkPlugin {
-
- public static Forge1122ServerSparkPlugin register(Forge1122SparkMod mod, FMLServerStartingEvent event) {
- Forge1122ServerSparkPlugin plugin = new Forge1122ServerSparkPlugin(mod, event.getServer());
- plugin.enable();
-
- // register commands & permissions
- event.registerServerCommand(plugin);
- PermissionAPI.registerNode("spark", DefaultPermissionLevel.OP, "Access to the spark command");
-
- return plugin;
- }
-
- private final MinecraftServer server;
-
- public Forge1122ServerSparkPlugin(Forge1122SparkMod mod, MinecraftServer server) {
- super(mod);
- this.server = server;
- }
-
- @Override
- public boolean hasPermission(ICommandSender sender, String permission) {
- if (sender instanceof EntityPlayer) {
- return PermissionAPI.hasPermission((EntityPlayer) sender, permission);
- } else {
- return true;
- }
- }
-
- @Override
- public Stream<Forge1122CommandSender> getCommandSenders() {
- return Stream.concat(
- this.server.getPlayerList().getPlayers().stream(),
- Stream.of(this.server)
- ).map(sender -> new Forge1122CommandSender(sender, this));
- }
-
- @Override
- public TickHook createTickHook() {
- return new Forge1122TickHook(TickEvent.Type.SERVER);
- }
-
- @Override
- public TickReporter createTickReporter() {
- return new Forge1122TickReporter(TickEvent.Type.SERVER);
- }
-
- @Override
- public PlatformInfo getPlatformInfo() {
- return new Forge1122PlatformInfo(PlatformInfo.Type.SERVER);
- }
-
- @Override
- public String getCommandName() {
- return "spark";
- }
-}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122SparkPlugin.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122SparkPlugin.java
deleted file mode 100644
index 727a2fd..0000000
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/Forge1122SparkPlugin.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.forge.plugin;
-
-import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.SparkPlugin;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.forge.Forge1122CommandSender;
-import me.lucko.spark.forge.Forge1122SparkMod;
-
-import net.minecraft.command.ICommand;
-import net.minecraft.command.ICommandSender;
-import net.minecraft.server.MinecraftServer;
-import net.minecraft.util.math.BlockPos;
-
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.nio.file.Path;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.logging.Level;
-
-import javax.annotation.Nullable;
-
-public abstract class Forge1122SparkPlugin implements SparkPlugin, ICommand {
-
- private final Forge1122SparkMod mod;
- private final Logger logger;
- protected final ScheduledExecutorService scheduler;
- protected final SparkPlatform platform;
- protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
-
- protected Forge1122SparkPlugin(Forge1122SparkMod mod) {
- this.mod = mod;
- this.logger = LogManager.getLogger("spark");
- this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
- Thread thread = Executors.defaultThreadFactory().newThread(r);
- thread.setName("spark-forge-async-worker");
- thread.setDaemon(true);
- return thread;
- });
- this.platform = new SparkPlatform(this);
- }
-
- public void enable() {
- this.platform.enable();
- }
-
- public void disable() {
- this.platform.disable();
- this.scheduler.shutdown();
- }
-
- public abstract boolean hasPermission(ICommandSender sender, String permission);
-
- @Override
- public String getVersion() {
- return this.mod.getVersion();
- }
-
- @Override
- public Path getPluginDirectory() {
- return this.mod.getConfigDirectory();
- }
-
- @Override
- public void executeAsync(Runnable task) {
- this.scheduler.execute(task);
- }
-
- @Override
- public void log(Level level, String msg) {
- if (level == Level.INFO) {
- this.logger.info(msg);
- } else if (level == Level.WARNING) {
- this.logger.warn(msg);
- } else if (level == Level.SEVERE) {
- this.logger.error(msg);
- } else {
- throw new IllegalArgumentException(level.getName());
- }
- }
-
- @Override
- public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
- }
-
- // implement ICommand
-
- @Override
- public String getName() {
- return getCommandName();
- }
-
- @Override
- public String getUsage(ICommandSender iCommandSender) {
- return "/" + getCommandName();
- }
-
- @Override
- public List<String> getAliases() {
- return Collections.singletonList(getCommandName());
- }
-
- @Override
- public void execute(MinecraftServer server, ICommandSender sender, String[] args) {
- this.threadDumper.ensureSetup();
- this.platform.executeCommand(new Forge1122CommandSender(sender, this), args);
- }
-
- @Override
- public List<String> getTabCompletions(MinecraftServer server, ICommandSender sender, String[] args, @Nullable BlockPos blockPos) {
- return this.platform.tabCompleteCommand(new Forge1122CommandSender(sender, this), args);
- }
-
- @Override
- public boolean checkPermission(MinecraftServer minecraftServer, ICommandSender sender) {
- return this.platform.hasPermissionForAnyCommand(new Forge1122CommandSender(sender, this));
- }
-
- @Override
- public boolean isUsernameIndex(String[] strings, int i) {
- return false;
- }
-
- @Override
- public int compareTo(ICommand o) {
- return getCommandName().compareTo(o.getName());
- }
-
-}
diff --git a/spark-forge1122/src/main/resources/mcmod.info b/spark-forge1122/src/main/resources/mcmod.info
deleted file mode 100644
index 1c2c241..0000000
--- a/spark-forge1122/src/main/resources/mcmod.info
+++ /dev/null
@@ -1,7 +0,0 @@
-[{
- "modid": "spark",
- "name": "spark",
- "description": "${pluginDescription}",
- "version": "${pluginVersion}",
- "authors": ["Luck"]
-}]
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java
index 7d7fea5..d9c8351 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlatformInfo.java
@@ -20,10 +20,10 @@
package me.lucko.spark.forge;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import net.minecraftforge.common.ForgeVersion;
-public class Forge1710PlatformInfo extends AbstractPlatformInfo {
+public class Forge1710PlatformInfo implements PlatformInfo {
private final Type type;
public Forge1710PlatformInfo(Type type) {
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickHook.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickHook.java
index 3914224..44cb11a 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickHook.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickHook.java
@@ -20,11 +20,11 @@
package me.lucko.spark.forge;
+import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.TickEvent;
import me.lucko.spark.common.tick.AbstractTickHook;
import me.lucko.spark.common.tick.TickHook;
-import net.minecraftforge.common.MinecraftForge;
public class Forge1710TickHook extends AbstractTickHook implements TickHook {
private final TickEvent.Type type;
@@ -48,12 +48,12 @@ public class Forge1710TickHook extends AbstractTickHook implements TickHook {
@Override
public void start() {
- MinecraftForge.EVENT_BUS.register(this);
+ FMLCommonHandler.instance().bus().register(this);
}
@Override
public void close() {
- MinecraftForge.EVENT_BUS.unregister(this);
+ FMLCommonHandler.instance().bus().unregister(this);
}
}
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickReporter.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickReporter.java
index f507637..d46c4d6 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickReporter.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710TickReporter.java
@@ -20,11 +20,11 @@
package me.lucko.spark.forge;
+import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.TickEvent;
import me.lucko.spark.common.tick.SimpleTickReporter;
import me.lucko.spark.common.tick.TickReporter;
-import net.minecraftforge.common.MinecraftForge;
public class Forge1710TickReporter extends SimpleTickReporter implements TickReporter {
private final TickEvent.Type type;
@@ -53,12 +53,12 @@ public class Forge1710TickReporter extends SimpleTickReporter implements TickRep
@Override
public void start() {
- MinecraftForge.EVENT_BUS.register(this);
+ FMLCommonHandler.instance().bus().register(this);
}
@Override
public void close() {
- MinecraftForge.EVENT_BUS.unregister(this);
+ FMLCommonHandler.instance().bus().unregister(this);
super.close();
}
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java
index 1e99a3f..303739f 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java
@@ -28,6 +28,7 @@ import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.*;
import net.minecraft.command.ICommandSender;
import net.minecraft.entity.player.EntityPlayer;
+import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.server.MinecraftServer;
import java.util.List;
@@ -54,8 +55,9 @@ public class Forge1710ServerSparkPlugin extends Forge1710SparkPlugin {
@Override
public boolean hasPermission(ICommandSender sender, String permission) {
- if (sender instanceof EntityPlayer) {
- return isOp((EntityPlayer) sender);
+ if (sender instanceof EntityPlayerMP) {
+ EntityPlayerMP player = (EntityPlayerMP)sender;
+ return isOp(player) || player.mcServer.getServerOwner().equals(player.getGameProfile().getName());
} else {
return true;
}
diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle
new file mode 100644
index 0000000..26cdc2c
--- /dev/null
+++ b/spark-minestom/build.gradle
@@ -0,0 +1,50 @@
+plugins {
+ id 'com.github.johnrengelman.shadow' version '7.0.0'
+}
+
+tasks.withType(JavaCompile) {
+ // override, compile targeting J17
+ options.release = 17
+}
+
+dependencies {
+ implementation project(':spark-common')
+ compileOnly 'com.github.Minestom:Minestom:367c389bc6'
+ implementation 'com.google.guava:guava:19.0'
+}
+
+processResources {
+ from(sourceSets.main.resources.srcDirs) {
+ expand (
+ 'pluginVersion': project.pluginVersion,
+ 'pluginDescription': project.pluginDescription
+ )
+ include 'extension.json'
+ }
+}
+
+shadowJar {
+ archiveName = "spark-${project.pluginVersion}-minestom.jar"
+
+ dependencies {
+ exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
+ }
+
+ relocate 'okio', 'me.lucko.spark.lib.okio'
+ relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
+ relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination'
+ relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
+ relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
+ relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java
new file mode 100644
index 0000000..252060e
--- /dev/null
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java
@@ -0,0 +1,49 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.minestom;
+
+import me.lucko.spark.common.util.ClassSourceLookup;
+
+import net.minestom.server.MinecraftServer;
+import net.minestom.server.extensions.Extension;
+import net.minestom.server.extensions.ExtensionClassLoader;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class MinestomClassSourceLookup extends ClassSourceLookup.ByClassLoader {
+ private final Map<ClassLoader, String> classLoaderToExtensions;
+
+ public MinestomClassSourceLookup() {
+ this.classLoaderToExtensions = new HashMap<>();
+ for (Extension extension : MinecraftServer.getExtensionManager().getExtensions()) {
+ this.classLoaderToExtensions.put(extension.getClass().getClassLoader(), extension.getOrigin().getName());
+ }
+ }
+
+ @Override
+ public String identify(ClassLoader loader) {
+ if (loader instanceof ExtensionClassLoader) {
+ return this.classLoaderToExtensions.get(loader);
+ }
+ return null;
+ }
+}
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java
new file mode 100644
index 0000000..da46224
--- /dev/null
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomCommandSender.java
@@ -0,0 +1,65 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.minestom;
+
+import me.lucko.spark.common.command.sender.AbstractCommandSender;
+
+import net.kyori.adventure.text.Component;
+import net.minestom.server.command.CommandSender;
+import net.minestom.server.command.ConsoleSender;
+import net.minestom.server.entity.Player;
+
+import java.util.UUID;
+
+public class MinestomCommandSender extends AbstractCommandSender<CommandSender> {
+ public MinestomCommandSender(CommandSender delegate) {
+ super(delegate);
+ }
+
+ @Override
+ public String getName() {
+ if (this.delegate instanceof Player player) {
+ return player.getUsername();
+ } else if (this.delegate instanceof ConsoleSender) {
+ return "Console";
+ }else {
+ return "unknown:" + this.delegate.getClass().getSimpleName();
+ }
+ }
+
+ @Override
+ public UUID getUniqueId() {
+ if (super.delegate instanceof Player player) {
+ return player.getUuid();
+ }
+ return null;
+ }
+
+ @Override
+ public void sendMessage(Component message) {
+ this.delegate.sendMessage(message);
+ }
+
+ @Override
+ public boolean hasPermission(String permission) {
+ return this.delegate.hasPermission(permission);
+ }
+}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122PlatformInfo.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlatformInfo.java
index 379c0e3..add258a 100644
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122PlatformInfo.java
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlatformInfo.java
@@ -18,36 +18,30 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.forge;
+package me.lucko.spark.minestom;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
-import net.minecraftforge.common.ForgeVersion;
-
-public class Forge1122PlatformInfo extends AbstractPlatformInfo {
- private final Type type;
-
- public Forge1122PlatformInfo(Type type) {
- this.type = type;
- }
+import net.minestom.server.MinecraftServer;
+public class MinestomPlatformInfo implements PlatformInfo {
@Override
public Type getType() {
- return this.type;
+ return Type.SERVER;
}
@Override
public String getName() {
- return "Forge";
+ return "Minestom";
}
@Override
public String getVersion() {
- return ForgeVersion.getVersion();
+ return MinecraftServer.VERSION_NAME + "-" + MinecraftServer.getBrandName();
}
@Override
public String getMinecraftVersion() {
- return ForgeVersion.mcVersion;
+ return MinecraftServer.VERSION_NAME;
}
}
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java
new file mode 100644
index 0000000..0b50c0a
--- /dev/null
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomPlayerPingProvider.java
@@ -0,0 +1,41 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.minestom;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.minestom.server.MinecraftServer;
+import net.minestom.server.entity.Player;
+
+import java.util.Map;
+
+public class MinestomPlayerPingProvider implements PlayerPingProvider {
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : MinecraftServer.getConnectionManager().getOnlinePlayers()) {
+ builder.put(player.getUsername(), player.getLatency());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java
new file mode 100644
index 0000000..2b43cae
--- /dev/null
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java
@@ -0,0 +1,183 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.minestom;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.tick.TickReporter;
+import me.lucko.spark.common.util.ClassSourceLookup;
+
+import net.minestom.server.MinecraftServer;
+import net.minestom.server.command.CommandSender;
+import net.minestom.server.command.builder.Command;
+import net.minestom.server.command.builder.CommandContext;
+import net.minestom.server.command.builder.CommandExecutor;
+import net.minestom.server.command.builder.arguments.ArgumentStringArray;
+import net.minestom.server.command.builder.arguments.ArgumentType;
+import net.minestom.server.command.builder.suggestion.Suggestion;
+import net.minestom.server.command.builder.suggestion.SuggestionCallback;
+import net.minestom.server.command.builder.suggestion.SuggestionEntry;
+import net.minestom.server.extensions.Extension;
+import net.minestom.server.timer.ExecutionType;
+
+import org.jetbrains.annotations.NotNull;
+
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.logging.Level;
+import java.util.stream.Stream;
+
+public class MinestomSparkPlugin extends Extension implements SparkPlugin {
+ private SparkPlatform platform;
+ private MinestomSparkCommand command;
+
+ @Override
+ public void initialize() {
+ this.platform = new SparkPlatform(this);
+ this.platform.enable();
+ this.command = new MinestomSparkCommand(this.platform);
+ MinecraftServer.getCommandManager().register(this.command);
+ }
+
+ @Override
+ public void terminate() {
+ this.platform.disable();
+ MinecraftServer.getCommandManager().unregister(this.command);
+ }
+
+ @Override
+ public String getVersion() {
+ return getOrigin().getVersion();
+ }
+
+ @Override
+ public Path getPluginDirectory() {
+ return getDataDirectory();
+ }
+
+ @Override
+ public String getCommandName() {
+ return "spark";
+ }
+
+ @Override
+ public Stream<MinestomCommandSender> getCommandSenders() {
+ return Stream.concat(
+ MinecraftServer.getConnectionManager().getOnlinePlayers().stream(),
+ Stream.of(MinecraftServer.getCommandManager().getConsoleSender())
+ ).map(MinestomCommandSender::new);
+ }
+
+ @Override
+ public void executeAsync(Runnable task) {
+ MinecraftServer.getSchedulerManager().scheduleNextTick(task, ExecutionType.ASYNC);
+ }
+
+ @Override
+ public void log(Level level, String msg) {
+ if (level == Level.INFO) {
+ this.getLogger().info(msg);
+ } else if (level == Level.WARNING) {
+ this.getLogger().warn(msg);
+ } else if (level == Level.SEVERE) {
+ this.getLogger().error(msg);
+ } else {
+ throw new IllegalArgumentException(level.getName());
+ }
+ }
+
+ @Override
+ public PlatformInfo getPlatformInfo() {
+ return new MinestomPlatformInfo();
+ }
+
+ @Override
+ public ClassSourceLookup createClassSourceLookup() {
+ return new MinestomClassSourceLookup();
+ }
+
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new MinestomPlayerPingProvider();
+ }
+
+ @Override
+ public TickReporter createTickReporter() {
+ return new MinestomTickReporter();
+ }
+
+ @Override
+ public TickHook createTickHook() {
+ return new MinestomTickHook();
+ }
+
+ private static final class MinestomSparkCommand extends Command implements CommandExecutor, SuggestionCallback {
+ private final SparkPlatform platform;
+
+ public MinestomSparkCommand(SparkPlatform platform) {
+ super("spark");
+ this.platform = platform;
+
+ ArgumentStringArray arrayArgument = ArgumentType.StringArray("args");
+ arrayArgument.setSuggestionCallback(this);
+
+ addSyntax(this, arrayArgument);
+ setDefaultExecutor((sender, context) -> platform.executeCommand(new MinestomCommandSender(sender), new String[0]));
+ }
+
+ // execute
+ @Override
+ public void apply(@NotNull CommandSender sender, @NotNull CommandContext context) {
+ String[] args = processArgs(context, false);
+ if (args == null) {
+ return;
+ }
+
+ this.platform.executeCommand(new MinestomCommandSender(sender), args);
+ }
+
+ // tab complete
+ @Override
+ public void apply(@NotNull CommandSender sender, @NotNull CommandContext context, @NotNull Suggestion suggestion) {
+ String[] args = processArgs(context, true);
+ if (args == null) {
+ return;
+ }
+
+ Iterable<String> suggestionEntries = this.platform.tabCompleteCommand(new MinestomCommandSender(sender), args);
+ for (String suggestionEntry : suggestionEntries) {
+ suggestion.addEntry(new SuggestionEntry(suggestionEntry));
+ }
+ }
+
+ private static String [] processArgs(CommandContext context, boolean tabComplete) {
+ String[] split = context.getInput().split(" ", tabComplete ? -1 : 0);
+ if (split.length == 0 || !split[0].equals("/spark") && !split[0].equals("spark")) {
+ return null;
+ }
+
+ return Arrays.copyOfRange(split, 1, split.length);
+ }
+ }
+}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickHook.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickHook.java
index 7ff9893..44c1c2d 100644
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/Forge1122TickHook.java
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickHook.java
@@ -18,43 +18,30 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.forge;
+package me.lucko.spark.minestom;
import me.lucko.spark.common.tick.AbstractTickHook;
-import me.lucko.spark.common.tick.TickHook;
-import net.minecraftforge.common.MinecraftForge;
-import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
-import net.minecraftforge.fml.common.gameevent.TickEvent;
+import net.minestom.server.MinecraftServer;
+import net.minestom.server.timer.Task;
+import net.minestom.server.timer.TaskSchedule;
-public class Forge1122TickHook extends AbstractTickHook implements TickHook {
- private final TickEvent.Type type;
-
- public Forge1122TickHook(TickEvent.Type type) {
- this.type = type;
- }
-
- @SubscribeEvent
- public void onTick(TickEvent e) {
- if (e.phase != TickEvent.Phase.START) {
- return;
- }
-
- if (e.type != this.type) {
- return;
- }
-
- onTick();
- }
+public class MinestomTickHook extends AbstractTickHook {
+ private Task task;
@Override
public void start() {
- MinecraftForge.EVENT_BUS.register(this);
+ this.task = MinecraftServer.getSchedulerManager()
+ .buildTask(this::onTick)
+ .delay(TaskSchedule.tick(1))
+ .repeat(TaskSchedule.tick(1))
+ .schedule();
}
@Override
public void close() {
- MinecraftForge.EVENT_BUS.unregister(this);
+ if (this.task != null) {
+ this.task.cancel();
+ }
}
-
}
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java
new file mode 100644
index 0000000..ae25f92
--- /dev/null
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomTickReporter.java
@@ -0,0 +1,48 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.minestom;
+
+import me.lucko.spark.common.tick.AbstractTickReporter;
+
+import net.minestom.server.MinecraftServer;
+import net.minestom.server.event.Event;
+import net.minestom.server.event.EventNode;
+import net.minestom.server.event.server.ServerTickMonitorEvent;
+
+import java.util.UUID;
+
+public class MinestomTickReporter extends AbstractTickReporter {
+ private final EventNode<Event> node = EventNode.all("sparkTickReporter-" + UUID.randomUUID());
+
+ public MinestomTickReporter() {
+ this.node.addListener(ServerTickMonitorEvent.class, event -> onTick(event.getTickMonitor().getTickTime()));
+ }
+
+ @Override
+ public void start() {
+ MinecraftServer.getGlobalEventHandler().addChild(this.node);
+ }
+
+ @Override
+ public void close() {
+ MinecraftServer.getGlobalEventHandler().removeChild(this.node);
+ }
+}
diff --git a/spark-minestom/src/main/resources/extension.json b/spark-minestom/src/main/resources/extension.json
new file mode 100644
index 0000000..bea56bb
--- /dev/null
+++ b/spark-minestom/src/main/resources/extension.json
@@ -0,0 +1,5 @@
+{
+ "entrypoint": "me.lucko.spark.minestom.MinestomSparkPlugin",
+ "name": "spark",
+ "version": "${pluginVersion}"
+} \ No newline at end of file
diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle
index ff36091..2e1ad55 100644
--- a/spark-nukkit/build.gradle
+++ b/spark-nukkit/build.gradle
@@ -23,7 +23,7 @@ processResources {
}
shadowJar {
- archiveName = 'spark-nukkit.jar'
+ archiveName = "spark-${project.pluginVersion}-nukkit.jar"
relocate 'okio', 'me.lucko.spark.lib.okio'
relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java
index 9ef5ba2..ab7a40b 100644
--- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlatformInfo.java
@@ -20,11 +20,11 @@
package me.lucko.spark.nukkit;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import cn.nukkit.Server;
-public class NukkitPlatformInfo extends AbstractPlatformInfo {
+public class NukkitPlatformInfo implements PlatformInfo {
private final Server server;
public NukkitPlatformInfo(Server server) {
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java
new file mode 100644
index 0000000..fc25d7c
--- /dev/null
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.nukkit;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import cn.nukkit.Player;
+import cn.nukkit.Server;
+
+import java.util.Map;
+
+public class NukkitPlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public NukkitPlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers().values()) {
+ builder.put(player.getName(), player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
index c9e099d..18132c3 100644
--- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
@@ -23,6 +23,7 @@ package me.lucko.spark.nukkit;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -108,6 +109,11 @@ public class NukkitSparkPlugin extends PluginBase implements SparkPlugin {
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new NukkitPlayerPingProvider(getServer());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new NukkitPlatformInfo(getServer());
}
diff --git a/spark-sponge/build.gradle b/spark-sponge/build.gradle
deleted file mode 100644
index b81008c..0000000
--- a/spark-sponge/build.gradle
+++ /dev/null
@@ -1,19 +0,0 @@
-plugins {
- id 'net.kyori.blossom' version '1.3.0'
-}
-
-dependencies {
- implementation project(':spark-common')
- compileOnly 'org.spongepowered:spongeapi:7.3.0'
- annotationProcessor 'org.spongepowered:spongeapi:7.3.0'
-}
-
-repositories {
- maven { url "https://repo.spongepowered.org/maven" }
-}
-
-blossom {
- replaceTokenIn('src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java')
- replaceToken '@version@', project.pluginVersion
- replaceToken '@desc@', project.pluginDescription
-}
diff --git a/spark-universal/build.gradle b/spark-sponge7/build.gradle
index 26dbd30..b6f8273 100644
--- a/spark-universal/build.gradle
+++ b/spark-sponge7/build.gradle
@@ -1,16 +1,26 @@
plugins {
+ id 'net.kyori.blossom' version '1.3.0'
id 'com.github.johnrengelman.shadow' version '7.0.0'
}
dependencies {
implementation project(':spark-common')
- implementation project(':spark-bukkit')
- implementation project(':spark-bungeecord')
- implementation project(':spark-sponge')
+ compileOnly 'org.spongepowered:spongeapi:7.3.0'
+ annotationProcessor 'org.spongepowered:spongeapi:7.3.0'
+}
+
+repositories {
+ maven { url "https://repo.spongepowered.org/maven" }
+}
+
+blossom {
+ replaceTokenIn('src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java')
+ replaceToken '@version@', project.pluginVersion
+ replaceToken '@desc@', project.pluginDescription
}
shadowJar {
- archiveName = 'spark.jar'
+ archiveFileName = "spark-${project.pluginVersion}-sponge7.jar"
relocate 'okio', 'me.lucko.spark.lib.okio'
relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
@@ -25,10 +35,10 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude 'META-INF/versions/**'
}
artifacts {
archives shadowJar
shadow shadowJar
}
-
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java
new file mode 100644
index 0000000..90f3b8f
--- /dev/null
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import me.lucko.spark.common.util.ClassSourceLookup;
+
+import org.spongepowered.api.Game;
+
+import java.nio.file.Path;
+
+public class Sponge7ClassSourceLookup extends ClassSourceLookup.ByCodeSource {
+ private final Path modsDirectory;
+
+ public Sponge7ClassSourceLookup(Game game) {
+ this.modsDirectory = game.getGameDirectory().resolve("mods").toAbsolutePath().normalize();
+ }
+
+ @Override
+ public String identifyFile(Path path) {
+ if (!path.startsWith(this.modsDirectory)) {
+ return null;
+ }
+
+ return super.identifyFileName(this.modsDirectory.relativize(path).toString());
+ }
+}
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeCommandSender.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java
index b2c4b8c..b0cfd3c 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeCommandSender.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7CommandSender.java
@@ -31,8 +31,8 @@ import org.spongepowered.api.text.serializer.TextSerializers;
import java.util.UUID;
-public class SpongeCommandSender extends AbstractCommandSender<CommandSource> {
- public SpongeCommandSender(CommandSource source) {
+public class Sponge7CommandSender extends AbstractCommandSender<CommandSource> {
+ public Sponge7CommandSender(CommandSource source) {
super(source);
}
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlatformInfo.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java
index 969a2ee..91d7ea2 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongePlatformInfo.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlatformInfo.java
@@ -20,15 +20,15 @@
package me.lucko.spark.sponge;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import org.spongepowered.api.Game;
import org.spongepowered.api.Platform;
-public class SpongePlatformInfo extends AbstractPlatformInfo {
+public class Sponge7PlatformInfo implements PlatformInfo {
private final Game game;
- public SpongePlatformInfo(Game game) {
+ public Sponge7PlatformInfo(Game game) {
this.game = game;
}
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java
new file mode 100644
index 0000000..8f4c15f
--- /dev/null
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7PlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.living.player.Player;
+
+import java.util.Map;
+
+public class Sponge7PlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public Sponge7PlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers()) {
+ builder.put(player.getName(), player.getConnection().getLatency());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
index 45d8d80..670e0c5 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
@@ -25,9 +25,11 @@ import com.google.inject.Inject;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.ClassSourceLookup;
import org.slf4j.Logger;
import org.spongepowered.api.Game;
@@ -38,7 +40,6 @@ import org.spongepowered.api.config.ConfigDir;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.game.state.GameStartedServerEvent;
import org.spongepowered.api.event.game.state.GameStoppingServerEvent;
-import org.spongepowered.api.plugin.Dependency;
import org.spongepowered.api.plugin.Plugin;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.AsynchronousExecutor;
@@ -60,13 +61,9 @@ import javax.annotation.Nullable;
name = "spark",
version = "@version@",
description = "@desc@",
- authors = {"Luck"},
- dependencies = {
- // explicit dependency on spongeapi with no defined API version
- @Dependency(id = "spongeapi")
- }
+ authors = {"Luck"}
)
-public class SpongeSparkPlugin implements SparkPlugin {
+public class Sponge7SparkPlugin implements SparkPlugin {
private final PluginContainer pluginContainer;
private final Logger logger;
@@ -78,7 +75,7 @@ public class SpongeSparkPlugin implements SparkPlugin {
private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Inject
- public SpongeSparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
+ public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
this.pluginContainer = pluginContainer;
this.logger = logger;
this.game = game;
@@ -100,7 +97,7 @@ public class SpongeSparkPlugin implements SparkPlugin {
@Override
public String getVersion() {
- return SpongeSparkPlugin.class.getAnnotation(Plugin.class).version();
+ return Sponge7SparkPlugin.class.getAnnotation(Plugin.class).version();
}
@Override
@@ -114,11 +111,11 @@ public class SpongeSparkPlugin implements SparkPlugin {
}
@Override
- public Stream<SpongeCommandSender> getCommandSenders() {
+ public Stream<Sponge7CommandSender> getCommandSenders() {
return Stream.concat(
this.game.getServer().getOnlinePlayers().stream(),
Stream.of(this.game.getServer().getConsole())
- ).map(SpongeCommandSender::new);
+ ).map(Sponge7CommandSender::new);
}
@Override
@@ -146,12 +143,26 @@ public class SpongeSparkPlugin implements SparkPlugin {
@Override
public TickHook createTickHook() {
- return new SpongeTickHook(this);
+ return new Sponge7TickHook(this);
+ }
+
+ @Override
+ public ClassSourceLookup createClassSourceLookup() {
+ return new Sponge7ClassSourceLookup(this.game);
+ }
+
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge7PlayerPingProvider(this.game.getServer());
+ } else {
+ return null;
+ }
}
@Override
public PlatformInfo getPlatformInfo() {
- return new SpongePlatformInfo(this.game);
+ return new Sponge7PlatformInfo(this.game);
}
@Override
@@ -160,27 +171,27 @@ public class SpongeSparkPlugin implements SparkPlugin {
}
private static final class SparkCommand implements CommandCallable {
- private final SpongeSparkPlugin plugin;
+ private final Sponge7SparkPlugin plugin;
- private SparkCommand(SpongeSparkPlugin plugin) {
+ private SparkCommand(Sponge7SparkPlugin plugin) {
this.plugin = plugin;
}
@Override
public CommandResult process(CommandSource source, String arguments) {
this.plugin.threadDumper.ensureSetup();
- this.plugin.platform.executeCommand(new SpongeCommandSender(source), arguments.split(" "));
+ this.plugin.platform.executeCommand(new Sponge7CommandSender(source), arguments.split(" "));
return CommandResult.empty();
}
@Override
public List<String> getSuggestions(CommandSource source, String arguments, @Nullable Location<World> targetPosition) {
- return this.plugin.platform.tabCompleteCommand(new SpongeCommandSender(source), arguments.split(" "));
+ return this.plugin.platform.tabCompleteCommand(new Sponge7CommandSender(source), arguments.split(" "));
}
@Override
public boolean testPermission(CommandSource source) {
- return this.plugin.platform.hasPermissionForAnyCommand(new SpongeCommandSender(source));
+ return this.plugin.platform.hasPermissionForAnyCommand(new Sponge7CommandSender(source));
}
@Override
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickHook.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java
index b6903de..2618522 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickHook.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7TickHook.java
@@ -25,11 +25,11 @@ import me.lucko.spark.common.tick.TickHook;
import org.spongepowered.api.scheduler.Task;
-public class SpongeTickHook extends AbstractTickHook implements TickHook, Runnable {
- private final SpongeSparkPlugin plugin;
+public class Sponge7TickHook extends AbstractTickHook implements TickHook, Runnable {
+ private final Sponge7SparkPlugin plugin;
private Task task;
- public SpongeTickHook(SpongeSparkPlugin plugin) {
+ public Sponge7TickHook(Sponge7SparkPlugin plugin) {
this.plugin = plugin;
}
diff --git a/spark-sponge8/build.gradle b/spark-sponge8/build.gradle
index 45f17b3..314ab18 100644
--- a/spark-sponge8/build.gradle
+++ b/spark-sponge8/build.gradle
@@ -22,7 +22,7 @@ processResources {
}
shadowJar {
- archiveFileName = 'spark-sponge8.jar'
+ archiveFileName = "spark-${project.pluginVersion}-sponge8.jar"
dependencies {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
new file mode 100644
index 0000000..fa4ac45
--- /dev/null
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
@@ -0,0 +1,84 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.util.ClassSourceLookup;
+
+import org.spongepowered.api.Game;
+import org.spongepowered.plugin.PluginCandidate;
+import org.spongepowered.plugin.PluginContainer;
+import org.spongepowered.plugin.builtin.jvm.JVMPluginContainer;
+import org.spongepowered.plugin.builtin.jvm.locator.JVMPluginResource;
+
+import java.lang.reflect.Field;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.Map;
+
+public class Sponge8ClassSourceLookup extends ClassSourceLookup.ByCodeSource {
+ private final Path modsDirectory;
+ private final Map<Path, String> pathToPluginMap;
+
+ public Sponge8ClassSourceLookup(Game game) {
+ this.modsDirectory = game.gameDirectory().resolve("mods").toAbsolutePath().normalize();
+ this.pathToPluginMap = constructPathToPluginIdMap(game.pluginManager().plugins());
+ }
+
+ @Override
+ public String identifyFile(Path path) {
+ String id = this.pathToPluginMap.get(path);
+ if (id != null) {
+ return id;
+ }
+
+ if (!path.startsWith(this.modsDirectory)) {
+ return null;
+ }
+
+ return super.identifyFileName(this.modsDirectory.relativize(path).toString());
+ }
+
+ // pretty nasty, but if it fails it doesn't really matter
+ @SuppressWarnings("unchecked")
+ private static Map<Path, String> constructPathToPluginIdMap(Collection<PluginContainer> plugins) {
+ ImmutableMap.Builder<Path, String> builder = ImmutableMap.builder();
+
+ try {
+ Field candidateField = JVMPluginContainer.class.getDeclaredField("candidate");
+ candidateField.setAccessible(true);
+
+ for (PluginContainer plugin : plugins) {
+ if (plugin instanceof JVMPluginContainer) {
+ PluginCandidate<JVMPluginResource> candidate = (PluginCandidate<JVMPluginResource>) candidateField.get(plugin);
+ Path path = candidate.resource().path().toAbsolutePath().normalize();
+ builder.put(path, plugin.metadata().id());
+ }
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+
+ return builder.build();
+ }
+
+}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java
index 5e7a65a..e7878dc 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8CommandSender.java
@@ -17,6 +17,7 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+
package me.lucko.spark.sponge;
import me.lucko.spark.common.command.sender.AbstractCommandSender;
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java
index c2a136a..9589ddf 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlatformInfo.java
@@ -20,12 +20,12 @@
package me.lucko.spark.sponge;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import org.spongepowered.api.Game;
import org.spongepowered.api.Platform;
-public class Sponge8PlatformInfo extends AbstractPlatformInfo {
+public class Sponge8PlatformInfo implements PlatformInfo {
private final Game game;
public Sponge8PlatformInfo(Game game) {
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java
new file mode 100644
index 0000000..2bcaf6a
--- /dev/null
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8PlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.living.player.server.ServerPlayer;
+
+import java.util.Map;
+
+public class Sponge8PlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public Sponge8PlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (ServerPlayer player : this.server.onlinePlayers()) {
+ builder.put(player.name(), player.connection().latency());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
index 0195b24..e867a75 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
@@ -25,9 +25,11 @@ import com.google.inject.Inject;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.ClassSourceLookup;
import net.kyori.adventure.text.Component;
@@ -147,6 +149,20 @@ public class Sponge8SparkPlugin implements SparkPlugin {
}
@Override
+ public ClassSourceLookup createClassSourceLookup() {
+ return new Sponge8ClassSourceLookup(this.game);
+ }
+
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge8PlayerPingProvider(this.game.server());
+ } else {
+ return null;
+ }
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Sponge8PlatformInfo(this.game);
}
diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle
index 8a345f9..b2e938b 100644
--- a/spark-velocity/build.gradle
+++ b/spark-velocity/build.gradle
@@ -20,7 +20,7 @@ blossom {
}
shadowJar {
- archiveName = 'spark-velocity.jar'
+ archiveName = "spark-${project.pluginVersion}-velocity.jar"
dependencies {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java
index 23cdbc1..4ee42cb 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlatformInfo.java
@@ -22,9 +22,9 @@ package me.lucko.spark.velocity;
import com.velocitypowered.api.proxy.ProxyServer;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
-public class VelocityPlatformInfo extends AbstractPlatformInfo {
+public class VelocityPlatformInfo implements PlatformInfo {
private final ProxyServer proxy;
public VelocityPlatformInfo(ProxyServer proxy) {
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java
new file mode 100644
index 0000000..382ea22
--- /dev/null
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityPlayerPingProvider.java
@@ -0,0 +1,46 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.velocity;
+
+import com.google.common.collect.ImmutableMap;
+import com.velocitypowered.api.proxy.Player;
+import com.velocitypowered.api.proxy.ProxyServer;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import java.util.Map;
+
+public class VelocityPlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public VelocityPlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.proxy.getAllPlayers()) {
+ builder.put(player.getUsername(), (int) player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
index 698aab0..7d9ced8 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
@@ -32,6 +32,7 @@ import com.velocitypowered.api.proxy.ProxyServer;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -133,6 +134,11 @@ public class VelocitySparkPlugin implements SparkPlugin, SimpleCommand {
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new VelocityPlayerPingProvider(this.proxy);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new VelocityPlatformInfo(this.proxy);
}
diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle
index e2a0559..5bef80b 100644
--- a/spark-velocity4/build.gradle
+++ b/spark-velocity4/build.gradle
@@ -25,7 +25,7 @@ blossom {
}
shadowJar {
- archiveName = 'spark-velocity4.jar'
+ archiveName = "spark-${project.pluginVersion}-velocity4.jar"
dependencies {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java
index fd2b3b5..bb2f26b 100644
--- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlatformInfo.java
@@ -22,9 +22,9 @@ package me.lucko.spark.velocity;
import com.velocitypowered.api.proxy.ProxyServer;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
-public class Velocity4PlatformInfo extends AbstractPlatformInfo {
+public class Velocity4PlatformInfo implements PlatformInfo {
private final ProxyServer proxy;
public Velocity4PlatformInfo(ProxyServer proxy) {
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java
new file mode 100644
index 0000000..18f36a7
--- /dev/null
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4PlayerPingProvider.java
@@ -0,0 +1,46 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.velocity;
+
+import com.google.common.collect.ImmutableMap;
+import com.velocitypowered.api.proxy.ProxyServer;
+import com.velocitypowered.api.proxy.connection.Player;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import java.util.Map;
+
+public class Velocity4PlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public Velocity4PlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.proxy.connectedPlayers()) {
+ builder.put(player.username(), (int) player.ping());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
index e1acd57..0c57689 100644
--- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
@@ -32,6 +32,7 @@ import com.velocitypowered.api.proxy.ProxyServer;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -133,6 +134,11 @@ public class Velocity4SparkPlugin implements SparkPlugin, SimpleCommand {
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new Velocity4PlayerPingProvider(this.proxy);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Velocity4PlatformInfo(this.proxy);
}
diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle
index 07be15c..c11e3fb 100644
--- a/spark-waterdog/build.gradle
+++ b/spark-waterdog/build.gradle
@@ -28,7 +28,7 @@ processResources {
}
shadowJar {
- archiveName = 'spark-waterdog.jar'
+ archiveName = "spark-${project.pluginVersion}-waterdog.jar"
relocate 'okio', 'me.lucko.spark.lib.okio'
relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java
index bfbe768..14b8f60 100644
--- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlatformInfo.java
@@ -20,11 +20,11 @@
package me.lucko.spark.waterdog;
-import me.lucko.spark.common.platform.AbstractPlatformInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
import dev.waterdog.waterdogpe.WaterdogPE;
-public class WaterdogPlatformInfo extends AbstractPlatformInfo {
+public class WaterdogPlatformInfo implements PlatformInfo {
@Override
public Type getType() {
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java
new file mode 100644
index 0000000..b22325c
--- /dev/null
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogPlayerPingProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.waterdog;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import dev.waterdog.waterdogpe.ProxyServer;
+import dev.waterdog.waterdogpe.player.ProxiedPlayer;
+
+import java.util.Map;
+
+public class WaterdogPlayerPingProvider implements PlayerPingProvider {
+ private final ProxyServer proxy;
+
+ public WaterdogPlayerPingProvider(ProxyServer proxy) {
+ this.proxy = proxy;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (ProxiedPlayer player : this.proxy.getPlayers().values()) {
+ builder.put(player.getName(), (int) player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
index fd2f031..07b153a 100644
--- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
@@ -22,6 +22,7 @@ package me.lucko.spark.waterdog;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -100,6 +101,11 @@ public class WaterdogSparkPlugin extends Plugin implements SparkPlugin {
}
@Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new WaterdogPlayerPingProvider(getProxy());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new WaterdogPlatformInfo();
}