aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorlucko <git@lucko.me>2022-12-27 09:17:54 +0000
committerGitHub <noreply@github.com>2022-12-27 09:17:54 +0000
commite5b278047ccb7bc6b301d787474c51d162911867 (patch)
tree11bba64e8f28ce8b83adc05252b75f17e2ccbf6a
parent4a16a1a2f4eb09f706b4a541e3d31618de29420b (diff)
parent1075665def4a41cf0064255a6da1d1a652f5d473 (diff)
downloadspark-e5b278047ccb7bc6b301d787474c51d162911867.tar.gz
spark-e5b278047ccb7bc6b301d787474c51d162911867.tar.bz2
spark-e5b278047ccb7bc6b301d787474c51d162911867.zip
Merge pull request #284 from embeddedt/forge-1.7.10
Align 1.7.10 with master
-rw-r--r--build.gradle4
-rw-r--r--jitpack.yml2
-rw-r--r--spark-bukkit/build.gradle11
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java2
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java122
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java36
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java130
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java3
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java3
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java123
-rw-r--r--spark-bungeecord/build.gradle11
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java2
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java14
-rw-r--r--spark-common/build.gradle39
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java118
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java45
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Command.java58
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java22
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java241
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java (renamed from spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java)9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java74
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java13
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java86
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java (renamed from spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java)35
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java24
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java48
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java)57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java)29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java69
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java55
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java90
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java110
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java104
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java189
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java99
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java115
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java17
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java76
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java61
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java24
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java58
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java276
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java273
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java50
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java85
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java79
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java166
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java462
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java81
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java70
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java93
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java287
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java241
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/Compression.java60
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java70
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java20
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java (renamed from spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java)33
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java191
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java81
-rw-r--r--spark-common/src/main/proto/spark/spark.proto42
-rw-r--r--spark-common/src/main/proto/spark/spark_sampler.proto29
-rwxr-xr-xspark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.sobin328432 -> 343408 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.sobin0 -> 317560 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.sobin342239 -> 361312 bytes
-rwxr-xr-xspark-common/src/main/resources/spark/macos/libasyncProfiler.sobin688400 -> 724576 bytes
-rw-r--r--spark-fabric/build.gradle23
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java161
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java75
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java57
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java178
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java40
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java36
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java34
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java40
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java36
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java183
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java29
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java52
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java71
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java24
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java52
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java87
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java133
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java53
-rw-r--r--spark-fabric/src/main/resources/fabric.mod.json3
-rw-r--r--spark-fabric/src/main/resources/spark.mixins.json15
-rw-r--r--spark-forge/build.gradle8
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java2
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java75
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java57
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java174
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java28
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java69
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java21
-rw-r--r--spark-forge/src/main/resources/META-INF/accesstransformer.cfg7
-rw-r--r--spark-forge1710/build.gradle6
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlayerPingProvider.java48
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java150
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java19
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java53
-rw-r--r--spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java8
-rw-r--r--spark-forge1710/src/main/resources/META-INF/spark_at.cfg1
-rw-r--r--spark-minestom/build.gradle5
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java2
-rw-r--r--spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java14
-rw-r--r--spark-nukkit/build.gradle5
-rw-r--r--spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java2
-rw-r--r--spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java10
-rw-r--r--spark-sponge7/build.gradle3
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java2
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java41
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java104
-rw-r--r--spark-sponge8/build.gradle3
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java2
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java63
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java105
-rw-r--r--spark-velocity/build.gradle3
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java2
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java14
-rw-r--r--spark-velocity4/build.gradle3
-rw-r--r--spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java4
-rw-r--r--spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java14
-rw-r--r--spark-waterdog/build.gradle5
-rw-r--r--spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java2
-rw-r--r--spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java14
143 files changed, 6500 insertions, 1490 deletions
diff --git a/build.gradle b/build.gradle
index a56b089..f96ec86 100644
--- a/build.gradle
+++ b/build.gradle
@@ -4,7 +4,7 @@ plugins {
allprojects {
group = 'me.lucko'
- version = '1.9-SNAPSHOT'
+ version = '1.10-SNAPSHOT'
configurations {
compileClasspath // Fabric-loom needs this for remap jar for some reason
@@ -18,7 +18,7 @@ subprojects {
apply plugin: 'org.cadixdev.licenser'
ext {
- baseVersion = '1.9'
+ baseVersion = '1.10'
patchVersion = determinePatchVersion()
pluginVersion = baseVersion + '.' + patchVersion
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
diff --git a/jitpack.yml b/jitpack.yml
new file mode 100644
index 0000000..56acad0
--- /dev/null
+++ b/jitpack.yml
@@ -0,0 +1,2 @@
+before_install:
+ - ./gradlew :spark-forge1710:setupCIWorkspace
diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle
index 8e111e8..92b65cc 100644
--- a/spark-bukkit/build.gradle
+++ b/spark-bukkit/build.gradle
@@ -4,13 +4,7 @@ plugins {
dependencies {
implementation project(':spark-common')
- implementation('me.lucko:adventure-platform-bukkit:4.9.4') {
- exclude(module: 'adventure-api')
- exclude(module: 'checker-qual')
- exclude(module: 'annotations')
- exclude(module: 'adventure-text-serializer-gson')
- exclude(module: 'adventure-text-serializer-legacy')
- }
+ implementation 'net.kyori:adventure-platform-bukkit:4.2.0'
compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT'
// placeholders
@@ -37,12 +31,9 @@ processResources {
shadowJar {
archiveName = "spark-${project.pluginVersion}-bukkit.jar"
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java
index 6d8afda..f9c0c0b 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.bukkit;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import org.bukkit.plugin.java.JavaPlugin;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 953e171..5db1b38 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -25,11 +25,16 @@ import com.google.common.collect.ImmutableSet;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
import com.google.gson.JsonSerializer;
-import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
-import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import org.bukkit.Bukkit;
+import org.bukkit.World;
import org.bukkit.configuration.MemorySection;
import org.bukkit.configuration.file.YamlConfiguration;
@@ -37,23 +42,19 @@ import co.aikar.timings.TimingsManager;
import java.io.BufferedReader;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-public class BukkitServerConfigProvider extends AbstractServerConfigProvider<BukkitServerConfigProvider.FileType> {
- private static final Gson GSON = new GsonBuilder()
- .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
- .create();
+public class BukkitServerConfigProvider extends ServerConfigProvider {
/** A map of provided files and their type */
- private static final Map<String, FileType> FILES;
+ private static final Map<String, ConfigParser> FILES;
/** A collection of paths to be excluded from the files */
private static final Collection<String> HIDDEN_PATHS;
@@ -61,51 +62,89 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
super(FILES, HIDDEN_PATHS);
}
- @Override
- protected JsonElement load(String path, FileType type) throws IOException {
- Path filePath = Paths.get(path);
- if (!Files.exists(filePath)) {
- return null;
+ private static class YamlConfigParser implements ConfigParser {
+ public static final YamlConfigParser INSTANCE = new YamlConfigParser();
+ protected static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
+
+ return filter.apply(GSON.toJsonTree(values));
}
- try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
- Map<String, Object> values;
-
- if (type == FileType.PROPERTIES) {
- PropertiesFileReader propertiesReader = new PropertiesFileReader(reader);
- values = propertiesReader.readProperties();
- } else if (type == FileType.YAML) {
- YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
- values = config.getValues(false);
- } else {
- throw new IllegalArgumentException("Unknown file type: " + type);
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ return config.getValues(false);
+ }
+ }
+
+ // Paper 1.19+ split config layout
+ private static class SplitYamlConfigParser extends YamlConfigParser {
+ public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser();
+
+ @Override
+ public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException {
+ String prefix = group.replace("/", "");
+
+ Path configDir = Paths.get("config");
+ if (!Files.exists(configDir)) {
+ return null;
}
- return GSON.toJsonTree(values);
+ JsonObject root = new JsonObject();
+
+ for (Map.Entry<String, Path> entry : getNestedFiles(configDir, prefix).entrySet()) {
+ String fileName = entry.getKey();
+ Path path = entry.getValue();
+
+ Map<String, Object> values = this.parse(path);
+ if (values == null) {
+ continue;
+ }
+
+ // apply the filter individually to each nested file
+ root.add(fileName, filter.apply(GSON.toJsonTree(values)));
+ }
+
+ return root;
}
- }
- enum FileType {
- PROPERTIES,
- YAML
+ private static Map<String, Path> getNestedFiles(Path configDir, String prefix) {
+ Map<String, Path> files = new LinkedHashMap<>();
+ files.put("global.yml", configDir.resolve(prefix + "-global.yml"));
+ files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml"));
+ for (World world : Bukkit.getWorlds()) {
+ files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml"));
+ }
+ return files;
+ }
}
static {
- ImmutableMap.Builder<String, FileType> files = ImmutableMap.<String, FileType>builder()
- .put("server.properties", FileType.PROPERTIES)
- .put("bukkit.yml", FileType.YAML)
- .put("spigot.yml", FileType.YAML)
- .put("paper.yml", FileType.YAML)
- .put("purpur.yml", FileType.YAML);
+ ImmutableMap.Builder<String, ConfigParser> files = ImmutableMap.<String, ConfigParser>builder()
+ .put("server.properties", PropertiesConfigParser.INSTANCE)
+ .put("bukkit.yml", YamlConfigParser.INSTANCE)
+ .put("spigot.yml", YamlConfigParser.INSTANCE)
+ .put("paper.yml", YamlConfigParser.INSTANCE)
+ .put("paper/", SplitYamlConfigParser.INSTANCE)
+ .put("purpur.yml", YamlConfigParser.INSTANCE);
for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
- files.put(config, FileType.YAML);
+ files.put(config, YamlConfigParser.INSTANCE);
}
ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
.add("database")
.add("settings.bungeecord-addresses")
.add("settings.velocity-support.secret")
+ .add("proxies.velocity.secret")
.add("server-ip")
.add("motd")
.add("resource-pack")
@@ -113,6 +152,8 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
.add("level-seed")
.add("world-settings.*.feature-seeds")
.add("world-settings.*.seed-*")
+ .add("feature-seeds")
+ .add("seed-*")
.addAll(getTimingsHiddenConfigs())
.addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
@@ -120,13 +161,6 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
HIDDEN_PATHS = hiddenPaths.build();
}
- private static List<String> getSystemPropertyList(String property) {
- String value = System.getProperty(property);
- return value == null
- ? Collections.emptyList()
- : Arrays.asList(value.split(","));
- }
-
private static List<String> getTimingsHiddenConfigs() {
try {
return TimingsManager.hiddenConfigs;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index 9727277..87490ea 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -28,10 +28,12 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.common.util.ClassSourceLookup;
import net.kyori.adventure.platform.bukkit.BukkitAudiences;
@@ -39,24 +41,29 @@ import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
+import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.ServicePriority;
import org.bukkit.plugin.java.JavaPlugin;
import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Stream;
public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
private BukkitAudiences audienceFactory;
+ private ThreadDumper gameThreadDumper;
+
private SparkPlatform platform;
private CommandExecutor tpsCommand = null;
- private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Override
public void onEnable() {
this.audienceFactory = BukkitAudiences.create(this);
+ this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread());
this.platform = new SparkPlatform(this);
this.platform.enable();
@@ -101,7 +108,6 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
- this.threadDumper.ensureSetup();
this.platform.executeCommand(new BukkitCommandSender(sender, this.audienceFactory), args);
return true;
}
@@ -136,7 +142,12 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public void executeAsync(Runnable task) {
- getServer().getScheduler().runTaskAsynchronously(BukkitSparkPlugin.this, task);
+ getServer().getScheduler().runTaskAsynchronously(this, task);
+ }
+
+ @Override
+ public void executeSync(Runnable task) {
+ getServer().getScheduler().runTask(this, task);
}
@Override
@@ -146,7 +157,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
+ return this.gameThreadDumper;
}
@Override
@@ -174,6 +185,16 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ Arrays.asList(getServer().getPluginManager().getPlugins()),
+ Plugin::getName,
+ plugin -> plugin.getDescription().getVersion(),
+ plugin -> String.join(", ", plugin.getDescription().getAuthors())
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
if (BukkitPlayerPingProvider.isSupported()) {
return new BukkitPlayerPingProvider(getServer());
@@ -188,6 +209,11 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new BukkitWorldInfoProvider(getServer());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
new file mode 100644
index 0000000..8f876cf
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
@@ -0,0 +1,130 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.bukkit;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.bukkit.Chunk;
+import org.bukkit.Server;
+import org.bukkit.World;
+import org.bukkit.entity.Entity;
+import org.bukkit.entity.EntityType;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class BukkitWorldInfoProvider implements WorldInfoProvider {
+ private static final boolean SUPPORTS_PAPER_COUNT_METHODS;
+
+ static {
+ boolean supportsPaperCountMethods = false;
+ try {
+ World.class.getMethod("getEntityCount");
+ World.class.getMethod("getTileEntityCount");
+ World.class.getMethod("getChunkCount");
+ supportsPaperCountMethods = true;
+ } catch (Exception e) {
+ // ignored
+ }
+ SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods;
+ }
+
+ private final Server server;
+
+ public BukkitWorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getOnlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (World world : this.server.getWorlds()) {
+ if (SUPPORTS_PAPER_COUNT_METHODS) {
+ entities += world.getEntityCount();
+ tileEntities += world.getTileEntityCount();
+ chunks += world.getChunkCount();
+ } else {
+ entities += world.getEntities().size();
+ Chunk[] chunksArray = world.getLoadedChunks();
+ for (Chunk chunk : chunksArray) {
+ tileEntities += chunk.getTileEntities().length;
+ }
+ chunks += chunksArray.length;
+ }
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<BukkitChunkInfo> pollChunks() {
+ ChunksResult<BukkitChunkInfo> data = new ChunksResult<>();
+
+ for (World world : this.server.getWorlds()) {
+ Chunk[] chunks = world.getLoadedChunks();
+
+ List<BukkitChunkInfo> list = new ArrayList<>(chunks.length);
+ for (Chunk chunk : chunks) {
+ if (chunk != null) {
+ list.add(new BukkitChunkInfo(chunk));
+ }
+ }
+
+ data.put(world.getName(), list);
+ }
+
+ return data;
+ }
+
+ static final class BukkitChunkInfo extends AbstractChunkInfo<EntityType> {
+ private final CountMap<EntityType> entityCounts;
+
+ BukkitChunkInfo(Chunk chunk) {
+ super(chunk.getX(), chunk.getZ());
+
+ this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class);
+ for (Entity entity : chunk.getEntities()) {
+ if (entity != null) {
+ this.entityCounts.increment(entity.getType());
+ }
+ }
+ }
+
+ @Override
+ public CountMap<EntityType> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public String entityTypeName(EntityType type) {
+ return type.getName();
+ }
+
+ }
+
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
index 078d027..7fa6e02 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
@@ -22,6 +22,7 @@ package me.lucko.spark.bukkit.placeholder;
import me.lucko.spark.bukkit.BukkitSparkPlugin;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.util.SparkPlaceholder;
import be.maximvdw.placeholderapi.PlaceholderAPI;
import be.maximvdw.placeholderapi.PlaceholderReplaceEvent;
@@ -43,6 +44,6 @@ public class SparkMVdWPlaceholders implements PlaceholderReplacer {
}
String identifier = placeholder.substring("spark_".length());
- return SparkPlaceholderProvider.respond(this.platform, identifier);
+ return SparkPlaceholder.resolveFormattingCode(this.platform, identifier);
}
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
index 69dca72..b3919dd 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
@@ -23,6 +23,7 @@ package me.lucko.spark.bukkit.placeholder;
import me.clip.placeholderapi.expansion.PlaceholderExpansion;
import me.lucko.spark.bukkit.BukkitSparkPlugin;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.util.SparkPlaceholder;
import org.bukkit.OfflinePlayer;
import org.bukkit.entity.Player;
@@ -44,7 +45,7 @@ public class SparkPlaceholderApi extends PlaceholderExpansion {
@Override
public String onRequest(OfflinePlayer p, String params) {
- return SparkPlaceholderProvider.respond(this.platform, params);
+ return SparkPlaceholder.resolveFormattingCode(this.platform, params);
}
@Override
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
deleted file mode 100644
index 5b57857..0000000
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.bukkit.placeholder;
-
-import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.monitor.cpu.CpuMonitor;
-import me.lucko.spark.common.monitor.tick.TickStatistics;
-import me.lucko.spark.common.util.StatisticFormatter;
-
-import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.TextComponent;
-import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
-
-enum SparkPlaceholderProvider {
- ;
-
- public static TextComponent respondComponent(SparkPlatform platform, String placeholder) {
- if (placeholder.startsWith("tps")) {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null) {
- return null;
- }
-
- switch (placeholder) {
- case "tps":
- return Component.text()
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
- .build();
- case "tps_5s":
- return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
- case "tps_10s":
- return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
- case "tps_1m":
- return StatisticFormatter.formatTps(tickStatistics.tps1Min());
- case "tps_5m":
- return StatisticFormatter.formatTps(tickStatistics.tps5Min());
- case "tps_15m":
- return StatisticFormatter.formatTps(tickStatistics.tps15Min());
- }
- }
-
- if (placeholder.startsWith("tickduration")) {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
- return null;
- }
-
- switch (placeholder) {
- case "tickduration":
- return Component.text()
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
- .build();
- case "tickduration_10s":
- return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
- case "tickduration_1m":
- return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
- }
- }
-
- if (placeholder.startsWith("cpu")) {
- switch (placeholder) {
- case "cpu_system":
- return Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
- .build();
- case "cpu_system_10s":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
- case "cpu_system_1m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
- case "cpu_system_15m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
- case "cpu_process":
- return Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
- .build();
- case "cpu_process_10s":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
- case "cpu_process_1m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
- case "cpu_process_15m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
- }
- }
-
- return null;
- }
-
- public static String respond(SparkPlatform platform, String placeholder) {
- TextComponent result = respondComponent(platform, placeholder);
- if (result == null) {
- return null;
- }
- return LegacyComponentSerializer.legacySection().serialize(result);
- }
-
-}
diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle
index ccea89d..885de55 100644
--- a/spark-bungeecord/build.gradle
+++ b/spark-bungeecord/build.gradle
@@ -4,13 +4,7 @@ plugins {
dependencies {
implementation project(':spark-common')
- implementation('me.lucko:adventure-platform-bungeecord:4.9.4') {
- exclude(module: 'adventure-api')
- exclude(module: 'checker-qual')
- exclude(module: 'annotations')
- exclude(module: 'adventure-text-serializer-gson')
- exclude(module: 'adventure-text-serializer-legacy')
- }
+ implementation 'net.kyori:adventure-platform-bungeecord:4.2.0'
compileOnly 'net.md-5:bungeecord-api:1.16-R0.4'
}
@@ -27,12 +21,9 @@ processResources {
shadowJar {
archiveName = "spark-${project.pluginVersion}-bungeecord.jar"
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java
index e601f87..2024d54 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.bungeecord;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import net.md_5.bungee.api.plugin.PluginDescription;
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
index e259adc..71beddb 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java
@@ -24,7 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import net.kyori.adventure.platform.bungeecord.BungeeAudiences;
import net.md_5.bungee.api.CommandSender;
@@ -33,6 +34,7 @@ import net.md_5.bungee.api.plugin.Plugin;
import net.md_5.bungee.api.plugin.TabExecutor;
import java.nio.file.Path;
+import java.util.Collection;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -92,6 +94,16 @@ public class BungeeCordSparkPlugin extends Plugin implements SparkPlugin {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ getProxy().getPluginManager().getPlugins(),
+ plugin -> plugin.getDescription().getName(),
+ plugin -> plugin.getDescription().getVersion(),
+ plugin -> plugin.getDescription().getAuthor()
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
return new BungeeCordPlayerPingProvider(getProxy());
}
diff --git a/spark-common/build.gradle b/spark-common/build.gradle
index 554eec2..c3d960d 100644
--- a/spark-common/build.gradle
+++ b/spark-common/build.gradle
@@ -1,29 +1,37 @@
+import org.cadixdev.gradle.licenser.LicenseExtension
+
plugins {
- id 'com.google.protobuf' version '0.8.16'
+ id 'com.google.protobuf' version '0.9.1'
}
license {
exclude '**/sampler/async/jfr/**'
}
+extensions.configure(LicenseExtension.class) {
+ it.exclude {
+ it.file.toString().startsWith(buildDir.toString())
+ }
+}
+
dependencies {
api project(':spark-api')
- implementation 'com.github.jvm-profiling-tools:async-profiler:v2.7'
+ implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3'
implementation 'org.ow2.asm:asm:9.1'
- implementation 'com.google.protobuf:protobuf-javalite:3.15.6'
- implementation 'com.squareup.okhttp3:okhttp:3.14.1'
- implementation 'com.squareup.okio:okio:1.17.3'
+ implementation 'com.google.protobuf:protobuf-javalite:3.21.11'
implementation 'net.bytebuddy:byte-buddy-agent:1.11.0'
- implementation 'org.tukaani:xz:1.8'
- api('net.kyori:adventure-api:4.9.3') {
+ api('net.kyori:adventure-api:4.12.0') {
+ exclude(module: 'adventure-bom')
exclude(module: 'checker-qual')
exclude(module: 'annotations')
}
- api('net.kyori:adventure-text-serializer-gson:4.9.3') {
+ api('net.kyori:adventure-text-serializer-gson:4.12.0') {
+ exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
exclude(module: 'gson')
}
- api('net.kyori:adventure-text-serializer-legacy:4.9.3') {
+ api('net.kyori:adventure-text-serializer-legacy:4.12.0') {
+ exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
}
implementation('net.kyori:adventure-text-feature-pagination:4.0.0-SNAPSHOT') {
@@ -34,20 +42,9 @@ dependencies {
compileOnly 'org.checkerframework:checker-qual:3.8.0'
}
-processResources {
- from(sourceSets.main.resources.srcDirs) {
- include 'spark/linux/libasyncProfiler.so'
- include 'spark/macosx/libasyncProfiler.so'
- }
-}
-
protobuf {
protoc {
- if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") {
- path = '/opt/homebrew/bin/protoc'
- } else {
- artifact = 'com.google.protobuf:protoc:3.15.6'
- }
+ artifact = 'com.google.protobuf:protoc:3.21.11'
}
generateProtoTasks {
all().each { task ->
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 0ef4556..dae04ff 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -45,17 +45,18 @@ import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.platform.PlatformStatisticsProvider;
+import me.lucko.spark.common.sampler.BackgroundSamplerManager;
+import me.lucko.spark.common.sampler.SamplerContainer;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
-import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.Configuration;
import me.lucko.spark.common.util.TemporaryFiles;
+import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
-import okhttp3.OkHttpClient;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -64,6 +65,7 @@ import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -74,13 +76,11 @@ import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
import static net.kyori.adventure.text.Component.text;
-import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
import static net.kyori.adventure.text.format.TextDecoration.BOLD;
-import static net.kyori.adventure.text.format.TextDecoration.UNDERLINED;
/**
* Abstract spark implementation used by all platforms.
@@ -91,15 +91,17 @@ public class SparkPlatform {
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss");
private final SparkPlugin plugin;
+ private final TemporaryFiles temporaryFiles;
private final Configuration configuration;
private final String viewerUrl;
- private final OkHttpClient httpClient;
private final BytebinClient bytebinClient;
private final boolean disableResponseBroadcast;
private final List<CommandModule> commandModules;
private final List<Command> commands;
private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
private final ActivityLog activityLog;
+ private final SamplerContainer samplerContainer;
+ private final BackgroundSamplerManager backgroundSamplerManager;
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
@@ -112,13 +114,12 @@ public class SparkPlatform {
public SparkPlatform(SparkPlugin plugin) {
this.plugin = plugin;
+ this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp"));
this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json"));
this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/");
String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/");
-
- this.httpClient = new OkHttpClient();
- this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin");
+ this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin");
this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false);
@@ -140,6 +141,9 @@ public class SparkPlatform {
this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json"));
this.activityLog.load();
+ this.samplerContainer = new SamplerContainer();
+ this.backgroundSamplerManager = new BackgroundSamplerManager(this, this.configuration);
+
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null;
@@ -178,6 +182,8 @@ public class SparkPlatform {
SparkApi api = new SparkApi(this);
this.plugin.registerApi(api);
SparkApi.register(api);
+
+ this.backgroundSamplerManager.initialise();
}
public void disable() {
@@ -195,20 +201,21 @@ public class SparkPlatform {
module.close();
}
- SparkApi.unregister();
+ this.samplerContainer.close();
- TemporaryFiles.deleteTemporaryFiles();
+ SparkApi.unregister();
- // shutdown okhttp
- // see: https://github.com/square/okhttp/issues/4029
- this.httpClient.dispatcher().executorService().shutdown();
- this.httpClient.connectionPool().evictAll();
+ this.temporaryFiles.deleteTemporaryFiles();
}
public SparkPlugin getPlugin() {
return this.plugin;
}
+ public TemporaryFiles getTemporaryFiles() {
+ return this.temporaryFiles;
+ }
+
public Configuration getConfiguration() {
return this.configuration;
}
@@ -233,6 +240,14 @@ public class SparkPlatform {
return this.activityLog;
}
+ public SamplerContainer getSamplerContainer() {
+ return this.samplerContainer;
+ }
+
+ public BackgroundSamplerManager getBackgroundSamplerManager() {
+ return this.backgroundSamplerManager;
+ }
+
public TickHook getTickHook() {
return this.tickHook;
}
@@ -366,14 +381,15 @@ public class SparkPlatform {
.append(text("v" + getPlugin().getVersion(), GRAY))
.build()
);
+
+ String helpCmd = "/" + getPlugin().getCommandName() + " help";
resp.replyPrefixed(text()
.color(GRAY)
- .append(text("Use "))
+ .append(text("Run "))
.append(text()
- .content("/" + getPlugin().getCommandName() + " help")
+ .content(helpCmd)
.color(WHITE)
- .decoration(UNDERLINED, true)
- .clickEvent(ClickEvent.runCommand("/" + getPlugin().getCommandName() + " help"))
+ .clickEvent(ClickEvent.runCommand(helpCmd))
.build()
)
.append(text(" to view usage information."))
@@ -389,7 +405,7 @@ public class SparkPlatform {
if (command.aliases().contains(alias)) {
resp.setCommandPrimaryAlias(command.primaryAlias());
try {
- command.executor().execute(this, sender, resp, new Arguments(rawArgs));
+ command.executor().execute(this, sender, resp, new Arguments(rawArgs, command.allowSubCommand()));
} catch (Arguments.ParseException e) {
resp.replyPrefixed(text(e.getMessage(), RED));
}
@@ -437,35 +453,53 @@ public class SparkPlatform {
);
for (Command command : commands) {
String usage = "/" + getPlugin().getCommandName() + " " + command.primaryAlias();
- ClickEvent clickEvent = ClickEvent.suggestCommand(usage);
- sender.reply(text()
- .append(text(">", GOLD, BOLD))
- .append(space())
- .append(text().content(usage).color(GRAY).clickEvent(clickEvent).build())
- .build()
- );
- for (Command.ArgumentInfo arg : command.arguments()) {
- if (arg.requiresParameter()) {
+
+ if (command.allowSubCommand()) {
+ Map<String, List<Command.ArgumentInfo>> argumentsBySubCommand = command.arguments().stream()
+ .collect(Collectors.groupingBy(Command.ArgumentInfo::subCommandName, LinkedHashMap::new, Collectors.toList()));
+
+ argumentsBySubCommand.forEach((subCommand, arguments) -> {
+ String subCommandUsage = usage + " " + subCommand;
+
sender.reply(text()
- .content(" ")
- .append(text("[", DARK_GRAY))
- .append(text("--" + arg.argumentName(), GRAY))
+ .append(text(">", GOLD, BOLD))
.append(space())
- .append(text("<" + arg.parameterDescription() + ">", DARK_GRAY))
- .append(text("]", DARK_GRAY))
- .build()
- );
- } else {
- sender.reply(text()
- .content(" ")
- .append(text("[", DARK_GRAY))
- .append(text("--" + arg.argumentName(), GRAY))
- .append(text("]", DARK_GRAY))
+ .append(text().content(subCommandUsage).color(GRAY).clickEvent(ClickEvent.suggestCommand(subCommandUsage)).build())
.build()
);
+
+ for (Command.ArgumentInfo arg : arguments) {
+ if (arg.argumentName().isEmpty()) {
+ continue;
+ }
+ sender.reply(arg.toComponent(" "));
+ }
+ });
+ } else {
+ sender.reply(text()
+ .append(text(">", GOLD, BOLD))
+ .append(space())
+ .append(text().content(usage).color(GRAY).clickEvent(ClickEvent.suggestCommand(usage)).build())
+ .build()
+ );
+
+ for (Command.ArgumentInfo arg : command.arguments()) {
+ sender.reply(arg.toComponent(" "));
}
}
}
+
+ sender.reply(Component.empty());
+ sender.replyPrefixed(text()
+ .append(text("For full usage information, please go to: "))
+ .append(text()
+ .content("https://spark.lucko.me/docs/Command-Usage")
+ .color(WHITE)
+ .clickEvent(ClickEvent.openUrl("https://spark.lucko.me/docs/Command-Usage"))
+ .build()
+ )
+ .build()
+ );
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index b817df1..b7aef2a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -23,14 +23,19 @@ package me.lucko.spark.common;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.common.util.ClassSourceLookup;
import java.nio.file.Path;
+import java.util.Collection;
+import java.util.Collections;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -75,6 +80,15 @@ public interface SparkPlugin {
void executeAsync(Runnable task);
/**
+ * Executes the given {@link Runnable} on the server/client main thread.
+ *
+ * @param task the task
+ */
+ default void executeSync(Runnable task) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
* Print to the plugin logger.
*
* @param level the log level
@@ -123,6 +137,15 @@ public interface SparkPlugin {
}
/**
+ * Gets a list of known sources (plugins/mods) on the platform.
+ *
+ * @return a list of sources
+ */
+ default Collection<SourceMetadata> getKnownSources() {
+ return Collections.emptyList();
+ }
+
+ /**
* Creates a player ping provider function.
*
* <p>Returns {@code null} if the platform does not support querying player pings</p>
@@ -139,7 +162,25 @@ public interface SparkPlugin {
* @return the server config provider function
*/
default ServerConfigProvider createServerConfigProvider() {
- return ServerConfigProvider.NO_OP;
+ return null;
+ }
+
+ /**
+ * Creates a metadata provider for the platform.
+ *
+ * @return the platform extra metadata provider
+ */
+ default MetadataProvider createExtraMetadataProvider() {
+ return null;
+ }
+
+ /**
+ * Creates a world info provider.
+ *
+ * @return the world info provider function
+ */
+ default WorldInfoProvider createWorldInfoProvider() {
+ return WorldInfoProvider.NO_OP;
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
index 17c49e2..ad8c777 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
@@ -38,8 +38,9 @@ public class Arguments {
private final List<String> rawArgs;
private final SetMultimap<String, String> parsedArgs;
+ private String parsedSubCommand = null;
- public Arguments(List<String> rawArgs) {
+ public Arguments(List<String> rawArgs, boolean allowSubCommand) {
this.rawArgs = rawArgs;
this.parsedArgs = HashMultimap.create();
@@ -52,7 +53,9 @@ public class Arguments {
Matcher matcher = FLAG_REGEX.matcher(arg);
boolean matches = matcher.matches();
- if (flag == null || matches) {
+ if (i == 0 && allowSubCommand && !matches) {
+ this.parsedSubCommand = arg;
+ } else if (flag == null || matches) {
if (!matches) {
throw new ParseException("Expected flag at position " + i + " but got '" + arg + "' instead!");
}
@@ -80,6 +83,10 @@ public class Arguments {
return this.rawArgs;
}
+ public String subCommand() {
+ return this.parsedSubCommand;
+ }
+
public int intFlag(String key) {
Iterator<String> it = this.parsedArgs.get(key).iterator();
if (it.hasNext()) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
index dad15e6..c6871a9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
@@ -25,10 +25,17 @@ import com.google.common.collect.ImmutableList;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
+import net.kyori.adventure.text.Component;
+
import java.util.Collections;
import java.util.List;
import java.util.Objects;
+import static net.kyori.adventure.text.Component.space;
+import static net.kyori.adventure.text.Component.text;
+import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
+
public class Command {
public static Builder builder() {
@@ -39,12 +46,14 @@ public class Command {
private final List<ArgumentInfo> arguments;
private final Executor executor;
private final TabCompleter tabCompleter;
+ private final boolean allowSubCommand;
- private Command(List<String> aliases, List<ArgumentInfo> arguments, Executor executor, TabCompleter tabCompleter) {
+ private Command(List<String> aliases, List<ArgumentInfo> arguments, Executor executor, TabCompleter tabCompleter, boolean allowSubCommand) {
this.aliases = aliases;
this.arguments = arguments;
this.executor = executor;
this.tabCompleter = tabCompleter;
+ this.allowSubCommand = allowSubCommand;
}
public List<String> aliases() {
@@ -67,11 +76,16 @@ public class Command {
return this.aliases.get(0);
}
+ public boolean allowSubCommand() {
+ return this.allowSubCommand;
+ }
+
public static final class Builder {
private final ImmutableList.Builder<String> aliases = ImmutableList.builder();
private final ImmutableList.Builder<ArgumentInfo> arguments = ImmutableList.builder();
private Executor executor = null;
private TabCompleter tabCompleter = null;
+ private boolean allowSubCommand = false;
Builder() {
@@ -82,8 +96,13 @@ public class Command {
return this;
}
+ public Builder argumentUsage(String subCommandName, String argumentName, String parameterDescription) {
+ this.arguments.add(new ArgumentInfo(subCommandName, argumentName, parameterDescription));
+ return this;
+ }
+
public Builder argumentUsage(String argumentName, String parameterDescription) {
- this.arguments.add(new ArgumentInfo(argumentName, parameterDescription));
+ this.arguments.add(new ArgumentInfo("", argumentName, parameterDescription));
return this;
}
@@ -97,6 +116,11 @@ public class Command {
return this;
}
+ public Builder allowSubCommand(boolean allowSubCommand) {
+ this.allowSubCommand = allowSubCommand;
+ return this;
+ }
+
public Command build() {
List<String> aliases = this.aliases.build();
if (aliases.isEmpty()) {
@@ -108,7 +132,7 @@ public class Command {
if (this.tabCompleter == null) {
this.tabCompleter = TabCompleter.empty();
}
- return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter);
+ return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter, this.allowSubCommand);
}
}
@@ -127,14 +151,20 @@ public class Command {
}
public static final class ArgumentInfo {
+ private final String subCommandName;
private final String argumentName;
private final String parameterDescription;
- public ArgumentInfo(String argumentName, String parameterDescription) {
+ public ArgumentInfo(String subCommandName, String argumentName, String parameterDescription) {
+ this.subCommandName = subCommandName;
this.argumentName = argumentName;
this.parameterDescription = parameterDescription;
}
+ public String subCommandName() {
+ return this.subCommandName;
+ }
+
public String argumentName() {
return this.argumentName;
}
@@ -146,6 +176,26 @@ public class Command {
public boolean requiresParameter() {
return this.parameterDescription != null;
}
+
+ public Component toComponent(String padding) {
+ if (requiresParameter()) {
+ return text()
+ .content(padding)
+ .append(text("[", DARK_GRAY))
+ .append(text("--" + argumentName(), GRAY))
+ .append(space())
+ .append(text("<" + parameterDescription() + ">", DARK_GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ } else {
+ return text()
+ .content(padding)
+ .append(text("[", DARK_GRAY))
+ .append(text("--" + argumentName(), GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+ }
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
index b777f3e..6252ac7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
@@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class ActivityLogModule implements CommandModule, RowRenderer<Activity> {
private final Pagination.Builder pagination = Pagination.builder()
+ .width(45)
.renderer(new Renderer() {
@Override
public Component renderEmpty() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java
index 2ce83fd..a2da0a0 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java
@@ -123,7 +123,7 @@ public class GcMonitoringModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(text(formatTime((long) averageFrequency), WHITE))
+ .append(text(FormatUtil.formatSeconds((long) averageFrequency / 1000), WHITE))
.append(text(" avg frequency", GRAY))
.build()
);
@@ -153,26 +153,6 @@ public class GcMonitoringModule implements CommandModule {
);
}
- private static String formatTime(long millis) {
- if (millis <= 0) {
- return "0s";
- }
-
- long second = millis / 1000;
- long minute = second / 60;
- second = second % 60;
-
- StringBuilder sb = new StringBuilder();
- if (minute != 0) {
- sb.append(minute).append("m ");
- }
- if (second != 0) {
- sb.append(second).append("s ");
- }
-
- return sb.toString().trim();
- }
-
private static class ReportingGcMonitor extends GarbageCollectionMonitor implements GarbageCollectionMonitor.Listener {
private final SparkPlatform platform;
private final CommandResponseHandler resp;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 1030f35..5bd62a8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -36,8 +36,6 @@ import me.lucko.spark.proto.SparkHeapProtos;
import net.kyori.adventure.text.event.ClickEvent;
-import okhttp3.MediaType;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -54,7 +52,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
public class HeapAnalysisModule implements CommandModule {
- private static final MediaType SPARK_HEAP_MEDIA_TYPE = MediaType.parse("application/x-spark-heap");
+ private static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap";
@Override
public void registerCommands(Consumer<Command> consumer) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 970d062..cd00f0d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -35,17 +35,17 @@ import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.SamplerBuilder;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
-import me.lucko.spark.common.sampler.ThreadNodeOrder;
import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.MethodDisambiguator;
import me.lucko.spark.proto.SparkSamplerProtos;
+import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
-import okhttp3.MediaType;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -64,57 +64,45 @@ import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
+import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
public class SamplerModule implements CommandModule {
- private static final MediaType SPARK_SAMPLER_MEDIA_TYPE = MediaType.parse("application/x-spark-sampler");
-
- /** The sampler instance currently running, if any */
- private Sampler activeSampler = null;
-
- @Override
- public void close() {
- if (this.activeSampler != null) {
- this.activeSampler.stop();
- this.activeSampler = null;
- }
- }
+ private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler";
@Override
public void registerCommands(Consumer<Command> consumer) {
consumer.accept(Command.builder()
.aliases("profiler", "sampler")
- .argumentUsage("info", null)
- .argumentUsage("stop", null)
- .argumentUsage("cancel", null)
- .argumentUsage("interval", "interval millis")
- .argumentUsage("thread", "thread name")
- .argumentUsage("only-ticks-over", "tick length millis")
- .argumentUsage("timeout", "timeout seconds")
- .argumentUsage("regex --thread", "thread regex")
- .argumentUsage("combine-all", null)
- .argumentUsage("not-combined", null)
- .argumentUsage("force-java-sampler", null)
- .argumentUsage("stop --comment", "comment")
- .argumentUsage("stop --order-by-time", null)
- .argumentUsage("stop --save-to-file", null)
+ .allowSubCommand(true)
+ .argumentUsage("info", "", null)
+ .argumentUsage("start", "timeout", "timeout seconds")
+ .argumentUsage("start", "thread *", null)
+ .argumentUsage("start", "thread", "thread name")
+ .argumentUsage("start", "only-ticks-over", "tick length millis")
+ .argumentUsage("start", "interval", "interval millis")
+ .argumentUsage("stop", "", null)
+ .argumentUsage("cancel", "", null)
.executor(this::profiler)
.tabCompleter((platform, sender, arguments) -> {
- if (arguments.contains("--info") || arguments.contains("--cancel")) {
- return Collections.emptyList();
- }
-
- if (arguments.contains("--stop") || arguments.contains("--upload")) {
- return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment", "--save-to-file");
+ List<String> opts = Collections.emptyList();
+
+ if (arguments.size() > 0) {
+ String subCommand = arguments.get(0);
+ if (subCommand.equals("stop") || subCommand.equals("upload")) {
+ opts = new ArrayList<>(Arrays.asList("--comment", "--save-to-file"));
+ opts.removeAll(arguments);
+ }
+ if (subCommand.equals("start")) {
+ opts = new ArrayList<>(Arrays.asList("--timeout", "--regex", "--combine-all",
+ "--not-combined", "--interval", "--only-ticks-over", "--force-java-sampler"));
+ opts.removeAll(arguments);
+ opts.add("--thread"); // allowed multiple times
+ }
}
- List<String> opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel",
- "--timeout", "--regex", "--combine-all", "--not-combined", "--interval",
- "--only-ticks-over", "--force-java-sampler"));
- opts.removeAll(arguments);
- opts.add("--thread"); // allowed multiple times
-
return TabCompleter.create()
- .from(0, CompletionSupplier.startsWith(opts))
+ .at(0, CompletionSupplier.startsWith(Arrays.asList("info", "start", "stop", "cancel")))
+ .from(1, CompletionSupplier.startsWith(opts))
.complete(arguments);
})
.build()
@@ -122,25 +110,50 @@ public class SamplerModule implements CommandModule {
}
private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- if (arguments.boolFlag("info")) {
- profilerInfo(resp);
+ String subCommand = arguments.subCommand() == null ? "" : arguments.subCommand();
+
+ if (subCommand.equals("info") || arguments.boolFlag("info")) {
+ profilerInfo(platform, resp);
return;
}
- if (arguments.boolFlag("cancel")) {
- profilerCancel(resp);
+ if (subCommand.equals("cancel") || arguments.boolFlag("cancel")) {
+ profilerCancel(platform, resp);
return;
}
- if (arguments.boolFlag("stop") || arguments.boolFlag("upload")) {
+ if (subCommand.equals("stop") || subCommand.equals("upload") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) {
profilerStop(platform, sender, resp, arguments);
return;
}
- profilerStart(platform, sender, resp, arguments);
+ if (subCommand.equals("start") || arguments.boolFlag("start")) {
+ profilerStart(platform, sender, resp, arguments);
+ return;
+ }
+
+ if (arguments.raw().isEmpty()) {
+ profilerInfo(platform, resp);
+ } else {
+ profilerStart(platform, sender, resp, arguments);
+ }
}
private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
+ Sampler previousSampler = platform.getSamplerContainer().getActiveSampler();
+ if (previousSampler != null) {
+ if (previousSampler.isRunningInBackground()) {
+ // there is a background profiler running - stop that first
+ resp.replyPrefixed(text("Stopping the background profiler before starting... please wait"));
+ previousSampler.stop(true);
+ platform.getSamplerContainer().unsetActiveSampler(previousSampler);
+ } else {
+ // there is a non-background profiler running - tell the user
+ profilerInfo(platform, resp);
+ return;
+ }
+ }
+
int timeoutSeconds = arguments.intFlag("timeout");
if (timeoutSeconds != -1 && timeoutSeconds <= 10) {
resp.replyPrefixed(text("The specified timeout is not long enough for accurate results to be formed. " +
@@ -197,12 +210,7 @@ public class SamplerModule implements CommandModule {
}
}
- if (this.activeSampler != null) {
- resp.replyPrefixed(text("An active profiler is already running."));
- return;
- }
-
- resp.broadcastPrefixed(text("Initializing a new profiler, please wait..."));
+ resp.broadcastPrefixed(text("Starting a new profiler, please wait..."));
SamplerBuilder builder = new SamplerBuilder();
builder.threadDumper(threadDumper);
@@ -217,21 +225,25 @@ public class SamplerModule implements CommandModule {
if (ticksOver != -1) {
builder.ticksOver(ticksOver, tickHook);
}
- Sampler sampler = this.activeSampler = builder.start(platform);
+ Sampler sampler = builder.start(platform);
+ platform.getSamplerContainer().setActiveSampler(sampler);
resp.broadcastPrefixed(text()
- .append(text("Profiler now active!", GOLD))
+ .append(text("Profiler is now running!", GOLD))
.append(space())
.append(text("(" + (sampler instanceof AsyncSampler ? "async" : "built-in java") + ")", DARK_GRAY))
.build()
);
+
if (timeoutSeconds == -1) {
- resp.broadcastPrefixed(text("Use '/" + platform.getPlugin().getCommandName() + " profiler --stop' to stop profiling and upload the results."));
+ resp.broadcastPrefixed(text("It will run in the background until it is stopped by an admin."));
+ resp.broadcastPrefixed(text("To stop the profiler and upload the results, run:"));
+ resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop"));
} else {
- resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."));
+ resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + FormatUtil.formatSeconds(timeoutSeconds) + "."));
}
- CompletableFuture<Sampler> future = this.activeSampler.getFuture();
+ CompletableFuture<Sampler> future = sampler.getFuture();
// send message if profiling fails
future.whenCompleteAsync((s, throwable) -> {
@@ -242,70 +254,101 @@ public class SamplerModule implements CommandModule {
});
// set activeSampler to null when complete.
- future.whenCompleteAsync((s, throwable) -> {
- if (sampler == this.activeSampler) {
- this.activeSampler = null;
- }
- });
+ sampler.getFuture().whenCompleteAsync((s, throwable) -> platform.getSamplerContainer().unsetActiveSampler(s));
// await the result
if (timeoutSeconds != -1) {
- ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME;
String comment = Iterables.getFirst(arguments.stringFlag("comment"), null);
MethodDisambiguator methodDisambiguator = new MethodDisambiguator();
MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator);
boolean saveToFile = arguments.boolFlag("save-to-file");
future.thenAcceptAsync(s -> {
resp.broadcastPrefixed(text("The active profiler has completed! Uploading results..."));
- handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile);
+ handleUpload(platform, resp, s, comment, mergeMode, saveToFile);
});
}
}
- private void profilerInfo(CommandResponseHandler resp) {
- if (this.activeSampler == null) {
- resp.replyPrefixed(text("There isn't an active profiler running."));
+ private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) {
+ Sampler sampler = platform.getSamplerContainer().getActiveSampler();
+ if (sampler == null) {
+ resp.replyPrefixed(text("The profiler isn't running!"));
+ resp.replyPrefixed(text("To start a new one, run:"));
+ resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler start"));
} else {
- long timeout = this.activeSampler.getEndTime();
+ resp.replyPrefixed(text("Profiler is already running!", GOLD));
+
+ long runningTime = (System.currentTimeMillis() - sampler.getStartTime()) / 1000L;
+
+ if (sampler.isRunningInBackground()) {
+ resp.replyPrefixed(text()
+ .append(text("It was started "))
+ .append(text("automatically", WHITE))
+ .append(text(" when spark enabled and has been running in the background for " + FormatUtil.formatSeconds(runningTime) + "."))
+ .build()
+ );
+ } else {
+ resp.replyPrefixed(text("So far, it has profiled for " + FormatUtil.formatSeconds(runningTime) + "."));
+ }
+
+ long timeout = sampler.getAutoEndTime();
if (timeout == -1) {
- resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout."));
+ resp.replyPrefixed(text("To stop the profiler and upload the results, run:"));
+ resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop"));
} else {
long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L;
- resp.replyPrefixed(text("There is an active profiler currently running, due to timeout in " + timeoutDiff + " seconds."));
+ resp.replyPrefixed(text("It is due to complete automatically and upload results in " + FormatUtil.formatSeconds(timeoutDiff) + "."));
}
- long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L;
- resp.replyPrefixed(text("It has been profiling for " + runningTime + " seconds so far."));
+ resp.replyPrefixed(text("To cancel the profiler without uploading the results, run:"));
+ resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler cancel"));
}
}
- private void profilerCancel(CommandResponseHandler resp) {
- if (this.activeSampler == null) {
+ private void profilerCancel(SparkPlatform platform, CommandResponseHandler resp) {
+ Sampler sampler = platform.getSamplerContainer().getActiveSampler();
+ if (sampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- close();
- resp.broadcastPrefixed(text("The active profiler has been cancelled.", GOLD));
+ platform.getSamplerContainer().stopActiveSampler(true);
+ resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD));
}
}
private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- if (this.activeSampler == null) {
+ Sampler sampler = platform.getSamplerContainer().getActiveSampler();
+
+ if (sampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- this.activeSampler.stop();
- resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results..."));
- ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME;
+ platform.getSamplerContainer().unsetActiveSampler(sampler);
+ sampler.stop(false);
+
+ boolean saveToFile = arguments.boolFlag("save-to-file");
+ if (saveToFile) {
+ resp.broadcastPrefixed(text("Stopping the profiler & saving results, please wait..."));
+ } else {
+ resp.broadcastPrefixed(text("Stopping the profiler & uploading results, please wait..."));
+ }
+
String comment = Iterables.getFirst(arguments.stringFlag("comment"), null);
MethodDisambiguator methodDisambiguator = new MethodDisambiguator();
MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator);
- boolean saveToFile = arguments.boolFlag("save-to-file");
- handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile);
- this.activeSampler = null;
+ handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile);
+
+ // if the previous sampler was running in the background, create a new one
+ if (platform.getBackgroundSamplerManager().restartBackgroundSampler()) {
+ resp.broadcastPrefixed(text()
+ .append(text("Restarted the background profiler. "))
+ .append(text("(If you don't want this to happen, run: /" + platform.getPlugin().getCommandName() + " profiler cancel)", DARK_GRAY))
+ .build()
+ );
+ }
}
}
- private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
- SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
+ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
+ SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), comment, mergeMode, ClassSourceLookup.create(platform));
boolean saveToFile = false;
if (saveToFileFlag) {
@@ -315,7 +358,7 @@ public class SamplerModule implements CommandModule {
String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
String url = platform.getViewerUrl() + key;
- resp.broadcastPrefixed(text("Profiler results:", GOLD));
+ resp.broadcastPrefixed(text("Profiler stopped & upload complete!", GOLD));
resp.broadcast(text()
.content(url)
.color(GRAY)
@@ -336,13 +379,9 @@ public class SamplerModule implements CommandModule {
try {
Files.write(file, output.toByteArray());
- resp.broadcastPrefixed(text()
- .content("Profile written to: ")
- .color(GOLD)
- .append(text(file.toString(), GRAY))
- .build()
- );
- resp.broadcastPrefixed(text("You can read the profile file using the viewer web-app - " + platform.getViewerUrl(), GRAY));
+ resp.broadcastPrefixed(text("Profiler stopped & save complete!", GOLD));
+ resp.broadcastPrefixed(text("Data has been written to: " + file));
+ resp.broadcastPrefixed(text("You can view the profile file using the web app @ " + platform.getViewerUrl(), GRAY));
platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString()));
} catch (IOException e) {
@@ -351,4 +390,16 @@ public class SamplerModule implements CommandModule {
}
}
}
+
+ private static Component cmdPrompt(String cmd) {
+ return text()
+ .append(text(" "))
+ .append(text()
+ .content(cmd)
+ .color(WHITE)
+ .clickEvent(ClickEvent.runCommand(cmd))
+ .build()
+ )
+ .build();
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java
index 7d688d7..563e247 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.util;
+package me.lucko.spark.common.monitor;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -49,7 +49,12 @@ public enum LinuxProc {
/**
* Information about the system network usage.
*/
- NET_DEV("/proc/net/dev");
+ NET_DEV("/proc/net/dev"),
+
+ /**
+ * Information about the operating system distro.
+ */
+ OSINFO("/etc/os-release");
private final Path path;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java
new file mode 100644
index 0000000..6b602d9
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java
@@ -0,0 +1,74 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility for reading from wmic (Windows Management Instrumentation Commandline) on Windows systems.
+ */
+public enum WindowsWmic {
+
+ /**
+ * Gets the CPU name
+ */
+ CPU_GET_NAME("wmic", "cpu", "get", "name", "/FORMAT:list"),
+
+ /**
+ * Gets the operating system name (caption) and version.
+ */
+ OS_GET_CAPTION_AND_VERSION("wmic", "os", "get", "caption,version", "/FORMAT:list");
+
+ private static final boolean SUPPORTED = System.getProperty("os.name").startsWith("Windows");
+
+ private final String[] cmdArgs;
+
+ WindowsWmic(String... cmdArgs) {
+ this.cmdArgs = cmdArgs;
+ }
+
+ public @NonNull List<String> read() {
+ if (SUPPORTED) {
+ ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true);
+ try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) {
+ List<String> lines = new ArrayList<>();
+
+ String line;
+ while ((line = buf.readLine()) != null) {
+ lines.add(line);
+ }
+
+ return lines;
+ } catch (Exception e) {
+ // ignore
+ }
+ }
+
+ return Collections.emptyList();
+ }
+}
+
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
index 9bbe0f8..9954bd5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -20,12 +20,13 @@
package me.lucko.spark.common.monitor.cpu;
-import me.lucko.spark.common.util.LinuxProc;
+import me.lucko.spark.common.monitor.LinuxProc;
+import me.lucko.spark.common.monitor.WindowsWmic;
import java.util.regex.Pattern;
/**
- * Small utility to query the CPU model on Linux systems.
+ * Small utility to query the CPU model on Linux and Windows systems.
*/
public enum CpuInfo {
;
@@ -40,11 +41,17 @@ public enum CpuInfo {
public static String queryCpuModel() {
for (String line : LinuxProc.CPUINFO.read()) {
String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line);
-
if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) {
return splitLine[1];
}
}
+
+ for (String line : WindowsWmic.CPU_GET_NAME.read()) {
+ if (line.startsWith("Name")) {
+ return line.substring(5).trim();
+ }
+ }
+
return "";
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
index 226f75b..8f63f71 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -20,7 +20,7 @@
package me.lucko.spark.common.monitor.memory;
-import me.lucko.spark.common.util.LinuxProc;
+import me.lucko.spark.common.monitor.LinuxProc;
import java.lang.management.ManagementFactory;
import java.util.regex.Matcher;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
index bd9e187..332077a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
@@ -22,7 +22,7 @@ package me.lucko.spark.common.monitor.net;
import com.google.common.collect.ImmutableMap;
-import me.lucko.spark.common.util.LinuxProc;
+import me.lucko.spark.common.monitor.LinuxProc;
import org.checkerframework.checker.nullness.qual.NonNull;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java
new file mode 100644
index 0000000..1c2732c
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java
@@ -0,0 +1,86 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.os;
+
+import me.lucko.spark.common.monitor.LinuxProc;
+import me.lucko.spark.common.monitor.WindowsWmic;
+
+/**
+ * Small utility to query the operating system name & version.
+ */
+public final class OperatingSystemInfo {
+ private final String name;
+ private final String version;
+ private final String arch;
+
+ public OperatingSystemInfo(String name, String version, String arch) {
+ this.name = name;
+ this.version = version;
+ this.arch = arch;
+ }
+
+ public String name() {
+ return this.name;
+ }
+
+ public String version() {
+ return this.version;
+ }
+
+ public String arch() {
+ return this.arch;
+ }
+
+ public static OperatingSystemInfo poll() {
+ String name = null;
+ String version = null;
+
+ for (String line : LinuxProc.OSINFO.read()) {
+ if (line.startsWith("PRETTY_NAME") && line.length() > 13) {
+ name = line.substring(13).replace('"', ' ').trim();
+ }
+ }
+
+ for (String line : WindowsWmic.OS_GET_CAPTION_AND_VERSION.read()) {
+ if (line.startsWith("Caption") && line.length() > 18) {
+ // Caption=Microsoft Windows something
+ // \----------------/ = 18 chars
+ name = line.substring(18).trim();
+ } else if (line.startsWith("Version")) {
+ // Version=10.0.something
+ // \------/ = 8 chars
+ version = line.substring(8).trim();
+ }
+ }
+
+ if (name == null) {
+ name = System.getProperty("os.name");
+ }
+
+ if (version == null) {
+ version = System.getProperty("os.version");
+ }
+
+ String arch = System.getProperty("os.arch");
+
+ return new OperatingSystemInfo(name, version, arch);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java
index adcedcd..39022b4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java
@@ -18,35 +18,30 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.sampler;
+package me.lucko.spark.common.platform;
-import me.lucko.spark.common.sampler.node.ThreadNode;
+import com.google.gson.JsonElement;
-import java.util.Comparator;
+import java.util.LinkedHashMap;
+import java.util.Map;
/**
- * Methods of ordering {@link ThreadNode}s in the output data.
+ * Function to export dynamic metadata to be displayed within the spark viewer.
*/
-public enum ThreadNodeOrder implements Comparator<ThreadNode> {
+@FunctionalInterface
+public interface MetadataProvider {
/**
- * Order by the name of the thread (alphabetically)
+ * Produces a map of the metadata.
+ *
+ * @return the metadata
*/
- BY_NAME {
- @Override
- public int compare(ThreadNode o1, ThreadNode o2) {
- return o1.getThreadLabel().compareTo(o2.getThreadLabel());
- }
- },
+ Map<String, JsonElement> get();
- /**
- * Order by the time taken by the thread (most time taken first)
- */
- BY_TIME {
- @Override
- public int compare(ThreadNode o1, ThreadNode o2) {
- return -Double.compare(o1.getTotalTime(), o2.getTotalTime());
- }
+ default Map<String, String> export() {
+ Map<String, String> map = new LinkedHashMap<>();
+ get().forEach((key, value) -> map.put(key, value.toString()));
+ return map;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index f35bbbe..fc7e78a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -28,10 +28,14 @@ import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
import me.lucko.spark.common.monitor.memory.MemoryInfo;
import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.os.OperatingSystemInfo;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldStatisticsProvider;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
@@ -47,6 +51,7 @@ public class PlatformStatisticsProvider {
public SystemStatistics getSystemStatistics() {
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
+ OperatingSystemInfo osInfo = OperatingSystemInfo.poll();
SystemStatistics.Builder builder = SystemStatistics.newBuilder()
.setCpu(SystemStatistics.Cpu.newBuilder()
@@ -83,9 +88,9 @@ public class PlatformStatisticsProvider {
.build()
)
.setOs(SystemStatistics.Os.newBuilder()
- .setArch(System.getProperty("os.arch"))
- .setName(System.getProperty("os.name"))
- .setVersion(System.getProperty("os.version"))
+ .setArch(osInfo.arch())
+ .setName(osInfo.name())
+ .setVersion(osInfo.version())
.build()
)
.setJava(SystemStatistics.Java.newBuilder()
@@ -182,6 +187,19 @@ public class PlatformStatisticsProvider {
builder.setPlayerCount(playerCount);
}
+ try {
+ WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(
+ new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider())
+ );
+ WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics();
+ if (worldStatistics != null) {
+ builder.setWorld(worldStatistics);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+
return builder.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
new file mode 100644
index 0000000..675a32e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
@@ -0,0 +1,48 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+
+public interface ConfigParser {
+
+ JsonElement load(String file, ExcludedConfigFilter filter) throws IOException;
+
+ default Map<String, Object> parse(Path file) throws IOException {
+ if (!Files.exists(file)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) {
+ return this.parse(reader);
+ }
+ }
+
+ Map<String, Object> parse(BufferedReader reader) throws IOException;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
index ead2131..c11c7f8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
@@ -20,11 +20,9 @@
package me.lucko.spark.common.platform.serverconfig;
-import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@@ -34,62 +32,26 @@ import java.util.Map;
import java.util.stream.Collectors;
/**
- * Abstract implementation of {@link ServerConfigProvider}.
- *
- * <p>This implementation is able to delete hidden paths from
- * the configurations before they are sent to the viewer.</p>
- *
- * @param <T> the file type
+ * Filtered excluded paths from {@link JsonElement}s (parsed configuration files).
*/
-public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
- private final Map<String, T> files;
- private final Collection<String> hiddenPaths;
-
- protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
- this.files = files;
- this.hiddenPaths = hiddenPaths;
- }
-
- @Override
- public final Map<String, JsonElement> loadServerConfigurations() {
- ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
-
- this.files.forEach((path, type) -> {
- try {
- JsonElement json = load(path, type);
- if (json != null) {
- delete(json, this.hiddenPaths);
- builder.put(path, json);
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- });
+public class ExcludedConfigFilter {
+ private final Collection<String> pathsToExclude;
- return builder.build();
+ public ExcludedConfigFilter(Collection<String> pathsToExclude) {
+ this.pathsToExclude = pathsToExclude;
}
/**
- * Loads a file from the system.
- *
- * @param path the name of the file to load
- * @param type the type of the file
- * @return the loaded file
- * @throws IOException if an error occurs performing i/o
- */
- protected abstract JsonElement load(String path, T type) throws IOException;
-
- /**
- * Deletes the given paths from the json element.
+ * Deletes the excluded paths from the json element.
*
* @param json the json element
- * @param paths the paths to delete
*/
- private static void delete(JsonElement json, Collection<String> paths) {
- for (String path : paths) {
+ public JsonElement apply(JsonElement json) {
+ for (String path : this.pathsToExclude) {
Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
delete(json, pathDeque);
}
+ return json;
}
private static void delete(JsonElement json, Deque<String> path) {
@@ -132,5 +94,4 @@ public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements
}
}
}
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
index 8fc89d7..344ba1c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
@@ -20,25 +20,38 @@
package me.lucko.spark.common.platform.serverconfig;
-import java.io.FilterReader;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
import java.io.IOException;
-import java.io.Reader;
+import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
- * A {@link Reader} that can parse a .properties file.
+ * A {@link ConfigParser} that can parse a .properties file.
*/
-public class PropertiesFileReader extends FilterReader {
+public enum PropertiesConfigParser implements ConfigParser {
+ INSTANCE;
+
+ private static final Gson GSON = new Gson();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
- public PropertiesFileReader(Reader in) {
- super(in);
+ return filter.apply(GSON.toJsonTree(values));
}
- public Map<String, Object> readProperties() throws IOException {
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
Properties properties = new Properties();
- properties.load(this);
+ properties.load(reader);
Map<String, Object> values = new HashMap<>();
properties.forEach((k, v) -> {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
index 1fc2391..485f215 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -20,40 +20,57 @@
package me.lucko.spark.common.platform.serverconfig;
+import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
+import me.lucko.spark.common.platform.MetadataProvider;
+
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
-import java.util.stream.Collectors;
/**
- * Function to export server configuration files for access within the spark viewer.
+ * Abstract implementation of {@link MetadataProvider} which
+ * provides server configuration data.
+ *
+ * <p>This implementation is able to delete hidden paths from
+ * the configurations before they are sent to the viewer.</p>
*/
-@FunctionalInterface
-public interface ServerConfigProvider {
-
- /**
- * Loads a map of the server configuration files.
- *
- * <p>The key is the name of the file and the value is a
- * {@link JsonElement} of the contents.</p>
- *
- * @return the exported server configurations
- */
- Map<String, JsonElement> loadServerConfigurations();
-
- default Map<String, String> exportServerConfigurations() {
- return loadServerConfigurations().entrySet()
- .stream()
- .collect(Collectors.toMap(
- Map.Entry::getKey,
- e -> e.getValue().toString()
- ));
+public abstract class ServerConfigProvider implements MetadataProvider {
+ private final Map<String, ConfigParser> files;
+ private final ExcludedConfigFilter hiddenPathFilters;
+
+ protected ServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
+ this.files = files;
+ this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths);
+ }
+
+ @Override
+ public final Map<String, JsonElement> get() {
+ ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
+
+ this.files.forEach((path, parser) -> {
+ try {
+ JsonElement json = parser.load(path, this.hiddenPathFilters);
+ if (json == null) {
+ return;
+ }
+ builder.put(path, json);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ });
+
+ return builder.build();
}
- /**
- * A no-op implementation
- */
- ServerConfigProvider NO_OP = Collections::emptyMap;
+ protected static List<String> getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
new file mode 100644
index 0000000..80026cd
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
@@ -0,0 +1,55 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+public abstract class AbstractChunkInfo<E> implements ChunkInfo<E> {
+ private final int x;
+ private final int z;
+
+ protected AbstractChunkInfo(int x, int z) {
+ this.x = x;
+ this.z = z;
+ }
+
+ @Override
+ public int getX() {
+ return this.x;
+ }
+
+ @Override
+ public int getZ() {
+ return this.z;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof AbstractChunkInfo)) return false;
+ AbstractChunkInfo<?> that = (AbstractChunkInfo<?>) obj;
+ return this.x == that.x && this.z == that.z;
+ }
+
+ @Override
+ public int hashCode() {
+ return this.x ^ this.z;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java
new file mode 100644
index 0000000..82cddef
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java
@@ -0,0 +1,90 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.function.Function;
+import java.util.logging.Level;
+
+/**
+ * Async-friendly wrapper around {@link WorldInfoProvider}.
+ */
+public class AsyncWorldInfoProvider {
+ private static final int TIMEOUT_SECONDS = 5;
+
+ private final SparkPlatform platform;
+ private final WorldInfoProvider provider;
+
+ public AsyncWorldInfoProvider(SparkPlatform platform, WorldInfoProvider provider) {
+ this.platform = platform;
+ this.provider = provider == WorldInfoProvider.NO_OP ? null : provider;
+ }
+
+ private <T> CompletableFuture<T> async(Function<WorldInfoProvider, T> function) {
+ if (this.provider == null) {
+ return null;
+ }
+
+ if (this.provider.mustCallSync()) {
+ SparkPlugin plugin = this.platform.getPlugin();
+ return CompletableFuture.supplyAsync(() -> function.apply(this.provider), plugin::executeSync);
+ } else {
+ return CompletableFuture.completedFuture(function.apply(this.provider));
+ }
+ }
+
+ private <T> T get(CompletableFuture<T> future) {
+ if (future == null) {
+ return null;
+ }
+
+ try {
+ return future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException(e);
+ } catch (TimeoutException e) {
+ this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
+ return null;
+ }
+ }
+
+ public CompletableFuture<WorldInfoProvider.CountsResult> pollCounts() {
+ return async(WorldInfoProvider::pollCounts);
+ }
+
+ public CompletableFuture<WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>>> pollChunks() {
+ return async(WorldInfoProvider::pollChunks);
+ }
+
+ public WorldInfoProvider.CountsResult getCounts() {
+ return get(pollCounts());
+ }
+
+ public WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> getChunks() {
+ return get(pollChunks());
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
new file mode 100644
index 0000000..2193a50
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+/**
+ * Information about a given chunk.
+ *
+ * @param <E> the type used to describe entities
+ */
+public interface ChunkInfo<E> {
+
+ int getX();
+
+ int getZ();
+
+ CountMap<E> getEntityCounts();
+
+ /**
+ * Converts entity type {@link E} to a string.
+ *
+ * @param type the entity type
+ * @return a string
+ */
+ String entityTypeName(E type);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
new file mode 100644
index 0000000..3083266
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
@@ -0,0 +1,110 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A map of (key) -> count.
+ *
+ * @param <T> the key type
+ */
+public interface CountMap<T> {
+
+ /**
+ * Increment the counter for the given key
+ *
+ * @param key the key
+ */
+ void increment(T key);
+
+ /**
+ * Add to the counter for the given key
+ *
+ * @param key the key
+ */
+ void add(T key, int delta);
+
+ AtomicInteger total();
+
+ Map<T, AtomicInteger> asMap();
+
+ /**
+ * A simple {@link CountMap} backed by the provided {@link Map}
+ *
+ * @param <T> the key type
+ */
+ class Simple<T> implements CountMap<T> {
+ private final Map<T, AtomicInteger> counts;
+ private final AtomicInteger total;
+
+ public Simple(Map<T, AtomicInteger> counts) {
+ this.counts = counts;
+ this.total = new AtomicInteger();
+ }
+
+ @Override
+ public void increment(T key) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ @Override
+ public void add(T key, int delta) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.addAndGet(delta);
+ this.total.addAndGet(delta);
+ }
+
+ @Override
+ public AtomicInteger total() {
+ return this.total;
+ }
+
+ @Override
+ public Map<T, AtomicInteger> asMap() {
+ return this.counts;
+ }
+ }
+
+ /**
+ * A {@link CountMap} backed by an {@link EnumMap}.
+ *
+ * @param <T> the key type - must be an enum
+ */
+ class EnumKeyed<T extends Enum<T>> extends Simple<T> {
+ public EnumKeyed(Class<T> keyClass) {
+ super(new EnumMap<>(keyClass));
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
new file mode 100644
index 0000000..7fb581d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
@@ -0,0 +1,104 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Provides information about worlds.
+ */
+public interface WorldInfoProvider {
+
+ WorldInfoProvider NO_OP = new WorldInfoProvider() {
+ @Override
+ public CountsResult pollCounts() {
+ return null;
+ }
+
+ @Override
+ public ChunksResult<? extends ChunkInfo<?>> pollChunks() {
+ return null;
+ }
+ };
+
+ /**
+ * Polls for counts.
+ *
+ * @return the counts
+ */
+ CountsResult pollCounts();
+
+ /**
+ * Polls for chunk information.
+ *
+ * @return the chunk information
+ */
+ ChunksResult<? extends ChunkInfo<?>> pollChunks();
+
+ default boolean mustCallSync() {
+ return true;
+ }
+
+ final class ChunksResult<T extends ChunkInfo<?>> {
+ private final Map<String, List<T>> worlds = new HashMap<>();
+
+ public void put(String worldName, List<T> chunks) {
+ this.worlds.put(worldName, chunks);
+ }
+
+ public Map<String, List<T>> getWorlds() {
+ return this.worlds;
+ }
+ }
+
+ final class CountsResult {
+ private final int players;
+ private final int entities;
+ private final int tileEntities;
+ private final int chunks;
+
+ public CountsResult(int players, int entities, int tileEntities, int chunks) {
+ this.players = players;
+ this.entities = entities;
+ this.tileEntities = tileEntities;
+ this.chunks = chunks;
+ }
+
+ public int players() {
+ return this.players;
+ }
+
+ public int entities() {
+ return this.entities;
+ }
+
+ public int tileEntities() {
+ return this.tileEntities;
+ }
+
+ public int chunks() {
+ return this.chunks;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
new file mode 100644
index 0000000..7e63222
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
@@ -0,0 +1,189 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class WorldStatisticsProvider {
+ private final AsyncWorldInfoProvider provider;
+
+ public WorldStatisticsProvider(AsyncWorldInfoProvider provider) {
+ this.provider = provider;
+ }
+
+ public WorldStatistics getWorldStatistics() {
+ WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> result = provider.getChunks();
+ if (result == null) {
+ return null;
+ }
+
+ WorldStatistics.Builder stats = WorldStatistics.newBuilder();
+
+ AtomicInteger combinedTotal = new AtomicInteger();
+ CountMap<String> combined = new CountMap.Simple<>(new HashMap<>());
+
+ result.getWorlds().forEach((worldName, chunks) -> {
+ WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder();
+ builder.setName(worldName);
+
+ List<Region> regions = groupIntoRegions(chunks);
+
+ int total = 0;
+
+ for (Region region : regions) {
+ builder.addRegions(regionToProto(region, combined));
+ total += region.getTotalEntities().get();
+ }
+
+ builder.setTotalEntities(total);
+ combinedTotal.addAndGet(total);
+
+ stats.addWorlds(builder.build());
+ });
+
+ stats.setTotalEntities(combinedTotal.get());
+ combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get()));
+
+ return stats.build();
+ }
+
+ private static WorldStatistics.Region regionToProto(Region region, CountMap<String> combined) {
+ WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder();
+ builder.setTotalEntities(region.getTotalEntities().get());
+ for (ChunkInfo<?> chunk : region.getChunks()) {
+ builder.addChunks(chunkToProto(chunk, combined));
+ }
+ return builder.build();
+ }
+
+ private static <E> WorldStatistics.Chunk chunkToProto(ChunkInfo<E> chunk, CountMap<String> combined) {
+ WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder();
+ builder.setX(chunk.getX());
+ builder.setZ(chunk.getZ());
+ builder.setTotalEntities(chunk.getEntityCounts().total().get());
+ chunk.getEntityCounts().asMap().forEach((key, value) -> {
+ String name = chunk.entityTypeName(key);
+ int count = value.get();
+
+ if (name == null) {
+ name = "unknown[" + key.toString() + "]";
+ }
+
+ builder.putEntityCounts(name, count);
+ combined.add(name, count);
+ });
+ return builder.build();
+ }
+
+ private static List<Region> groupIntoRegions(List<? extends ChunkInfo<?>> chunks) {
+ List<Region> regions = new ArrayList<>();
+
+ for (ChunkInfo<?> chunk : chunks) {
+ CountMap<?> counts = chunk.getEntityCounts();
+ if (counts.total().get() == 0) {
+ continue;
+ }
+
+ boolean found = false;
+
+ for (Region region : regions) {
+ if (region.isAdjacent(chunk)) {
+ found = true;
+ region.add(chunk);
+
+ // if the chunk is adjacent to more than one region, merge the regions together
+ for (Iterator<Region> iterator = regions.iterator(); iterator.hasNext(); ) {
+ Region otherRegion = iterator.next();
+ if (region != otherRegion && otherRegion.isAdjacent(chunk)) {
+ iterator.remove();
+ region.merge(otherRegion);
+ }
+ }
+
+ break;
+ }
+ }
+
+ if (!found) {
+ regions.add(new Region(chunk));
+ }
+ }
+
+ return regions;
+ }
+
+ /**
+ * A map of nearby chunks grouped together by Euclidean distance.
+ */
+ private static final class Region {
+ private static final int DISTANCE_THRESHOLD = 2;
+ private final Set<ChunkInfo<?>> chunks;
+ private final AtomicInteger totalEntities;
+
+ private Region(ChunkInfo<?> initial) {
+ this.chunks = new HashSet<>();
+ this.chunks.add(initial);
+ this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get());
+ }
+
+ public Set<ChunkInfo<?>> getChunks() {
+ return this.chunks;
+ }
+
+ public AtomicInteger getTotalEntities() {
+ return this.totalEntities;
+ }
+
+ public boolean isAdjacent(ChunkInfo<?> chunk) {
+ for (ChunkInfo<?> el : this.chunks) {
+ if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public void add(ChunkInfo<?> chunk) {
+ this.chunks.add(chunk);
+ this.totalEntities.addAndGet(chunk.getEntityCounts().total().get());
+ }
+
+ public void merge(Region group) {
+ this.chunks.addAll(group.getChunks());
+ this.totalEntities.addAndGet(group.getTotalEntities().get());
+ }
+
+ private static long squaredEuclideanDistance(ChunkInfo<?> a, ChunkInfo<?> b) {
+ long dx = a.getX() - b.getX();
+ long dz = a.getZ() - b.getZ();
+ return (dx * dx) + (dz * dz);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index ce466a0..e324fd3 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -23,16 +23,22 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
+import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+import java.util.Collection;
import java.util.Comparator;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -41,6 +47,9 @@ import java.util.concurrent.CompletableFuture;
*/
public abstract class AbstractSampler implements Sampler {
+ /** The spark platform instance */
+ protected final SparkPlatform platform;
+
/** The interval to wait between sampling, in microseconds */
protected final int interval;
@@ -51,7 +60,13 @@ public abstract class AbstractSampler implements Sampler {
protected long startTime = -1;
/** The unix timestamp (in millis) when this sampler should automatically complete. */
- protected final long endTime; // -1 for nothing
+ protected final long autoEndTime; // -1 for nothing
+
+ /** If the sampler is running in the background */
+ protected boolean background;
+
+ /** Collects statistics for each window in the sample */
+ protected final WindowStatisticsCollector windowStatisticsCollector;
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture<Sampler> future = new CompletableFuture<>();
@@ -59,10 +74,13 @@ public abstract class AbstractSampler implements Sampler {
/** The garbage collector statistics when profiling started */
protected Map<String, GarbageCollectorStatistics> initialGcStats;
- protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
- this.interval = interval;
- this.threadDumper = threadDumper;
- this.endTime = endTime;
+ protected AbstractSampler(SparkPlatform platform, SamplerSettings settings) {
+ this.platform = platform;
+ this.interval = settings.interval();
+ this.threadDumper = settings.threadDumper();
+ this.autoEndTime = settings.autoEndTime();
+ this.background = settings.runningInBackground();
+ this.windowStatisticsCollector = new WindowStatisticsCollector(platform);
}
@Override
@@ -74,8 +92,13 @@ public abstract class AbstractSampler implements Sampler {
}
@Override
- public long getEndTime() {
- return this.endTime;
+ public long getAutoEndTime() {
+ return this.autoEndTime;
+ }
+
+ @Override
+ public boolean isRunningInBackground() {
+ return this.background;
}
@Override
@@ -91,6 +114,16 @@ public abstract class AbstractSampler implements Sampler {
return this.initialGcStats;
}
+ @Override
+ public void start() {
+ this.startTime = System.currentTimeMillis();
+ }
+
+ @Override
+ public void stop(boolean cancelled) {
+ this.windowStatisticsCollector.stop();
+ }
+
protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
@@ -105,6 +138,11 @@ public abstract class AbstractSampler implements Sampler {
metadata.setComment(comment);
}
+ int totalTicks = this.windowStatisticsCollector.getTotalTicks();
+ if (totalTicks != -1) {
+ metadata.setNumberOfTicks(totalTicks);
+ }
+
try {
metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
} catch (Exception e) {
@@ -119,27 +157,60 @@ public abstract class AbstractSampler implements Sampler {
try {
ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider();
- metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations());
+ if (serverConfigProvider != null) {
+ metadata.putAllServerConfigurations(serverConfigProvider.export());
+ }
} catch (Exception e) {
e.printStackTrace();
}
+ try {
+ MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider();
+ if (extraMetadataProvider != null) {
+ metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ Collection<SourceMetadata> knownSources = platform.getPlugin().getKnownSources();
+ for (SourceMetadata source : knownSources) {
+ metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto());
+ }
+
proto.setMetadata(metadata);
}
- protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator<ThreadNode> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
List<ThreadNode> data = dataAggregator.exportData();
- data.sort(outputOrder);
+ data.sort(Comparator.comparing(ThreadNode::getThreadLabel));
ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
+ ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(data);
+ int[] timeWindows = timeEncoder.getKeys();
+ for (int timeWindow : timeWindows) {
+ proto.addTimeWindows(timeWindow);
+ }
+
+ this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows);
+ proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export());
+
for (ThreadNode entry : data) {
- proto.addThreads(entry.toProto(mergeMode));
+ proto.addThreads(entry.toProto(mergeMode, timeEncoder));
classSourceVisitor.visit(entry);
}
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
+ if (classSourceVisitor.hasClassSourceMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getClassSourceMapping());
+ }
+
+ if (classSourceVisitor.hasMethodSourceMappings()) {
+ proto.putAllMethodSources(classSourceVisitor.getMethodSourceMapping());
+ }
+
+ if (classSourceVisitor.hasLineSourceMappings()) {
+ proto.putAllLineSources(classSourceVisitor.getLineSourceMapping());
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
new file mode 100644
index 0000000..7e3b6b4
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
@@ -0,0 +1,115 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.util.Configuration;
+
+import java.util.logging.Level;
+
+public class BackgroundSamplerManager {
+
+ private static final String OPTION_ENABLED = "backgroundProfiler";
+ private static final String OPTION_ENGINE = "backgroundProfilerEngine";
+ private static final String OPTION_INTERVAL = "backgroundProfilerInterval";
+
+ private static final String MARKER_FAILED = "_marker_background_profiler_failed";
+
+ private final SparkPlatform platform;
+ private final Configuration configuration;
+ private final boolean enabled;
+
+ public BackgroundSamplerManager(SparkPlatform platform, Configuration configuration) {
+ this.platform = platform;
+ this.configuration = configuration;
+
+ PlatformInfo.Type type = this.platform.getPlugin().getPlatformInfo().getType();
+ this.enabled = type != PlatformInfo.Type.CLIENT && this.configuration.getBoolean(OPTION_ENABLED, type == PlatformInfo.Type.SERVER);
+ }
+
+ public void initialise() {
+ if (!this.enabled) {
+ return;
+ }
+
+ // are we enabling the background profiler by default for the first time?
+ boolean didEnableByDefault = false;
+ if (!this.configuration.contains(OPTION_ENABLED)) {
+ this.configuration.setBoolean(OPTION_ENABLED, true);
+ didEnableByDefault = true;
+ }
+
+ // did the background profiler fail to start on the previous attempt?
+ if (this.configuration.getBoolean(MARKER_FAILED, false)) {
+ this.platform.getPlugin().log(Level.WARNING, "It seems the background profiler failed to start when spark was last enabled. Sorry about that!");
+ this.platform.getPlugin().log(Level.WARNING, "In the future, spark will try to use the built-in Java profiling engine instead.");
+
+ this.configuration.remove(MARKER_FAILED);
+ this.configuration.setString(OPTION_ENGINE, "java");
+ this.configuration.save();
+ }
+
+ this.platform.getPlugin().log(Level.INFO, "Starting background profiler...");
+
+ if (didEnableByDefault) {
+ // set the failed marker and save before we try to start the profiler,
+ // then remove the marker afterwards if everything goes ok!
+ this.configuration.setBoolean(MARKER_FAILED, true);
+ this.configuration.save();
+ }
+
+ try {
+ startSampler();
+
+ if (didEnableByDefault) {
+ this.configuration.remove(MARKER_FAILED);
+ this.configuration.save();
+ }
+
+ } catch (Throwable e) {
+ e.printStackTrace();
+ }
+ }
+
+ public boolean restartBackgroundSampler() {
+ if (this.enabled) {
+ startSampler();
+ return true;
+ }
+ return false;
+ }
+
+ private void startSampler() {
+ boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java");
+
+ Sampler sampler = new SamplerBuilder()
+ .background(true)
+ .threadDumper(this.platform.getPlugin().getDefaultThreadDumper())
+ .threadGrouper(ThreadGrouper.BY_POOL)
+ .samplingInterval(this.configuration.getInteger(OPTION_INTERVAL, 10))
+ .forceJavaSampler(forceJavaEngine)
+ .start(this.platform);
+
+ this.platform.getSamplerContainer().setActiveSampler(sampler);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 845043f..36a63f1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -23,11 +23,9 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.node.MergeMode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import java.util.Comparator;
import java.util.concurrent.CompletableFuture;
/**
@@ -43,7 +41,7 @@ public interface Sampler {
/**
* Stops the sampler.
*/
- void stop();
+ void stop(boolean cancelled);
/**
* Gets the time when the sampler started (unix timestamp in millis)
@@ -57,7 +55,14 @@ public interface Sampler {
*
* @return the end time, or -1 if undefined
*/
- long getEndTime();
+ long getAutoEndTime();
+
+ /**
+ * If this sampler is running in the background. (wasn't started by a specific user)
+ *
+ * @return true if the sampler is running in the background
+ */
+ boolean isRunningInBackground();
/**
* Gets a future to encapsulate the completion of the sampler
@@ -67,6 +72,6 @@ public interface Sampler {
CompletableFuture<Sampler> getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
+ SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index 88cf018..ec635ef 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -38,7 +38,8 @@ public class SamplerBuilder {
private boolean ignoreSleeping = false;
private boolean ignoreNative = false;
private boolean useAsyncProfiler = true;
- private long timeout = -1;
+ private long autoEndTime = -1;
+ private boolean background = false;
private ThreadDumper threadDumper = ThreadDumper.ALL;
private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME;
@@ -57,7 +58,12 @@ public class SamplerBuilder {
if (timeout <= 0) {
throw new IllegalArgumentException("timeout > 0");
}
- this.timeout = System.currentTimeMillis() + unit.toMillis(timeout);
+ this.autoEndTime = System.currentTimeMillis() + unit.toMillis(timeout);
+ return this;
+ }
+
+ public SamplerBuilder background(boolean background) {
+ this.background = background;
return this;
}
@@ -93,15 +99,24 @@ public class SamplerBuilder {
}
public Sampler start(SparkPlatform platform) {
+ boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null;
+ boolean canUseAsyncProfiler = this.useAsyncProfiler &&
+ !onlyTicksOverMode &&
+ !(this.ignoreSleeping || this.ignoreNative) &&
+ !(this.threadDumper instanceof ThreadDumper.Regex) &&
+ AsyncProfilerAccess.getInstance(platform).checkSupported(platform);
+
+
int intervalMicros = (int) (this.samplingInterval * 1000d);
+ SamplerSettings settings = new SamplerSettings(intervalMicros, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background);
Sampler sampler;
- if (this.ticksOver != -1 && this.tickHook != null) {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
- } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) {
- sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout);
+ if (canUseAsyncProfiler) {
+ sampler = new AsyncSampler(platform, settings);
+ } else if (onlyTicksOverMode) {
+ sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
} else {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative);
+ sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative);
}
sampler.start();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
new file mode 100644
index 0000000..15b1029
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
@@ -0,0 +1,76 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Container for the active sampler.
+ */
+public class SamplerContainer implements AutoCloseable {
+
+ private final AtomicReference<Sampler> activeSampler = new AtomicReference<>();
+
+ /**
+ * Gets the active sampler, or null if a sampler is not active.
+ *
+ * @return the active sampler
+ */
+ public Sampler getActiveSampler() {
+ return this.activeSampler.get();
+ }
+
+ /**
+ * Sets the active sampler, throwing an exception if another sampler is already active.
+ *
+ * @param sampler the sampler
+ */
+ public void setActiveSampler(Sampler sampler) {
+ if (!this.activeSampler.compareAndSet(null, sampler)) {
+ throw new IllegalStateException("Attempted to set active sampler when another was already active!");
+ }
+ }
+
+ /**
+ * Unsets the active sampler, if the provided sampler is active.
+ *
+ * @param sampler the sampler
+ */
+ public void unsetActiveSampler(Sampler sampler) {
+ this.activeSampler.compareAndSet(sampler, null);
+ }
+
+ /**
+ * Stops the active sampler, if there is one.
+ */
+ public void stopActiveSampler(boolean cancelled) {
+ Sampler sampler = this.activeSampler.getAndSet(null);
+ if (sampler != null) {
+ sampler.stop(cancelled);
+ }
+ }
+
+ @Override
+ public void close() {
+ stopActiveSampler(true);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java
new file mode 100644
index 0000000..6e55a43
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java
@@ -0,0 +1,61 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+/**
+ * Base settings for all samplers
+ */
+public class SamplerSettings {
+
+ private final int interval;
+ private final ThreadDumper threadDumper;
+ private final ThreadGrouper threadGrouper;
+ private final long autoEndTime;
+ private final boolean runningInBackground;
+
+ public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground) {
+ this.interval = interval;
+ this.threadDumper = threadDumper;
+ this.threadGrouper = threadGrouper;
+ this.autoEndTime = autoEndTime;
+ this.runningInBackground = runningInBackground;
+ }
+
+ public int interval() {
+ return this.interval;
+ }
+
+ public ThreadDumper threadDumper() {
+ return this.threadDumper;
+ }
+
+ public ThreadGrouper threadGrouper() {
+ return this.threadGrouper;
+ }
+
+ public long autoEndTime() {
+ return this.autoEndTime;
+ }
+
+ public boolean runningInBackground() {
+ return this.runningInBackground;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 9d54f50..fd0c413 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -76,17 +76,29 @@ public interface ThreadDumper {
* the game (server/client) thread.
*/
final class GameThread implements Supplier<ThreadDumper> {
+ private Supplier<Thread> threadSupplier;
private Specific dumper = null;
+ public GameThread() {
+
+ }
+
+ public GameThread(Supplier<Thread> threadSupplier) {
+ this.threadSupplier = threadSupplier;
+ }
+
@Override
public ThreadDumper get() {
+ if (this.dumper == null) {
+ setThread(this.threadSupplier.get());
+ this.threadSupplier = null;
+ }
+
return Objects.requireNonNull(this.dumper, "dumper");
}
- public void ensureSetup() {
- if (this.dumper == null) {
- this.dumper = new Specific(new long[]{Thread.currentThread().getId()});
- }
+ public void setThread(Thread thread) {
+ this.dumper = new Specific(new long[]{thread.getId()});
}
}
@@ -98,6 +110,10 @@ public interface ThreadDumper {
private Set<Thread> threads;
private Set<String> threadNamesLowerCase;
+ public Specific(Thread thread) {
+ this.ids = new long[]{thread.getId()};
+ }
+
public Specific(long[] ids) {
this.ids = ids;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
index ad9dee4..2c003e5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
@@ -27,6 +27,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.IntPredicate;
/**
* Abstract implementation of {@link DataAggregator}.
@@ -52,6 +53,11 @@ public abstract class AbstractDataAggregator implements DataAggregator {
}
@Override
+ public void pruneData(IntPredicate timeWindowPredicate) {
+ this.threadData.values().removeIf(node -> node.removeTimeWindowsRecursively(timeWindowPredicate));
+ }
+
+ @Override
public List<ThreadNode> exportData() {
List<ThreadNode> data = new ArrayList<>(this.threadData.values());
for (ThreadNode node : data) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 5590a96..ed33204 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -24,6 +24,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.util.List;
+import java.util.function.IntPredicate;
/**
* Aggregates sampling data.
@@ -38,6 +39,13 @@ public interface DataAggregator {
List<ThreadNode> exportData();
/**
+ * Prunes windows of data from this aggregator if the given {@code timeWindowPredicate} returns true.
+ *
+ * @param timeWindowPredicate the predicate
+ */
+ void pruneData(IntPredicate timeWindowPredicate);
+
+ /**
* Gets metadata about the data aggregator instance.
*/
SamplerMetadata.DataAggregator getMetadata();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
index 3de3943..402330a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
@@ -47,10 +47,10 @@ public class AsyncDataAggregator extends AbstractDataAggregator {
.build();
}
- public void insertData(ProfileSegment element) {
+ public void insertData(ProfileSegment element, int window) {
try {
ThreadNode node = getNode(this.threadGrouper.getGroup(element.getNativeThreadId(), element.getThreadName()));
- node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime());
+ node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime(), window);
} catch (Exception e) {
e.printStackTrace();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index d642a53..1480650 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -22,26 +22,39 @@ package me.lucko.spark.common.sampler.async;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Table;
+import com.google.common.io.ByteStreams;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.util.TemporaryFiles;
import one.profiler.AsyncProfiler;
import one.profiler.Events;
+import java.io.BufferedReader;
import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
import java.util.Locale;
+import java.util.Objects;
import java.util.logging.Level;
+import java.util.stream.Collectors;
/**
* Provides a bridge between spark and async-profiler.
*/
-public enum AsyncProfilerAccess {
- INSTANCE;
+public class AsyncProfilerAccess {
+ private static AsyncProfilerAccess instance;
+
+ // singleton, needs a SparkPlatform for first init
+ public static synchronized AsyncProfilerAccess getInstance(SparkPlatform platform) {
+ if (instance == null) {
+ Objects.requireNonNull(platform, "platform");
+ instance = new AsyncProfilerAccess(platform);
+ }
+ return instance;
+ }
/** An instance of the async-profiler Java API. */
private final AsyncProfiler profiler;
@@ -52,13 +65,13 @@ public enum AsyncProfilerAccess {
/** If profiler is null, contains the reason why setup failed */
private final Exception setupException;
- AsyncProfilerAccess() {
+ AsyncProfilerAccess(SparkPlatform platform) {
AsyncProfiler profiler;
ProfilingEvent profilingEvent = null;
Exception setupException = null;
try {
- profiler = load();
+ profiler = load(platform);
if (isEventSupported(profiler, ProfilingEvent.CPU, false)) {
profilingEvent = ProfilingEvent.CPU;
} else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) {
@@ -74,11 +87,11 @@ public enum AsyncProfilerAccess {
this.setupException = setupException;
}
- public AsyncProfiler getProfiler() {
+ public AsyncProfilerJob startNewProfilerJob() {
if (this.profiler == null) {
throw new UnsupportedOperationException("async-profiler not supported", this.setupException);
}
- return this.profiler;
+ return AsyncProfilerJob.createNew(this, this.profiler);
}
public ProfilingEvent getProfilingEvent() {
@@ -103,13 +116,18 @@ public enum AsyncProfilerAccess {
return this.profiler != null;
}
- private static AsyncProfiler load() throws Exception {
+ private static AsyncProfiler load(SparkPlatform platform) throws Exception {
// check compatibility
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
+ if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) {
+ arch = "amd64-musl";
+ }
+
Table<String, String, String> supported = ImmutableTable.<String, String, String>builder()
.put("linux", "amd64", "linux/amd64")
+ .put("linux", "amd64-musl", "linux/amd64-musl")
.put("linux", "aarch64", "linux/aarch64")
.put("macosx", "amd64", "macos")
.put("macosx", "aarch64", "macos")
@@ -127,10 +145,10 @@ public enum AsyncProfilerAccess {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
}
- Path extractPath = TemporaryFiles.create("spark-", "-libasyncProfiler.so.tmp");
+ Path extractPath = platform.getTemporaryFiles().create("spark-", "-libasyncProfiler.so.tmp");
- try (InputStream in = profilerResource.openStream()) {
- Files.copy(in, extractPath, StandardCopyOption.REPLACE_EXISTING);
+ try (InputStream in = profilerResource.openStream(); OutputStream out = Files.newOutputStream(extractPath)) {
+ ByteStreams.copy(in, out);
}
// get an instance of async-profiler
@@ -190,4 +208,20 @@ public enum AsyncProfilerAccess {
super("A runtime error occurred whilst loading the native library", cause);
}
}
+
+ // Checks if the system is using musl instead of glibc
+ private static boolean isLinuxMusl() {
+ try {
+ InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`")
+ .start()
+ .getInputStream();
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
+ String output = reader.lines().collect(Collectors.joining());
+ return output.contains("musl"); // shrug
+ } catch (Throwable e) {
+ // ignore
+ return false;
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
new file mode 100644
index 0000000..d74b75f
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
@@ -0,0 +1,276 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.async;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+
+import one.profiler.AsyncProfiler;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Predicate;
+
+/**
+ * Represents a profiling job within async-profiler.
+ *
+ * <p>Only one job can be running at a time. This is guarded by
+ * {@link #createNew(AsyncProfilerAccess, AsyncProfiler)}.</p>
+ */
+public class AsyncProfilerJob {
+
+ /**
+ * The currently active job.
+ */
+ private static final AtomicReference<AsyncProfilerJob> ACTIVE = new AtomicReference<>();
+
+ /**
+ * Creates a new {@link AsyncProfilerJob}.
+ *
+ * <p>Will throw an {@link IllegalStateException} if another job is already active.</p>
+ *
+ * @param access the profiler access object
+ * @param profiler the profiler
+ * @return the job
+ */
+ static AsyncProfilerJob createNew(AsyncProfilerAccess access, AsyncProfiler profiler) {
+ synchronized (ACTIVE) {
+ AsyncProfilerJob existing = ACTIVE.get();
+ if (existing != null) {
+ throw new IllegalStateException("Another profiler is already active: " + existing);
+ }
+
+ AsyncProfilerJob job = new AsyncProfilerJob(access, profiler);
+ ACTIVE.set(job);
+ return job;
+ }
+ }
+
+ /** The async-profiler access object */
+ private final AsyncProfilerAccess access;
+ /** The async-profiler instance */
+ private final AsyncProfiler profiler;
+
+ // Set on init
+ /** The platform */
+ private SparkPlatform platform;
+ /** The sampling interval in microseconds */
+ private int interval;
+ /** The thread dumper */
+ private ThreadDumper threadDumper;
+ /** The profiling window */
+ private int window;
+ /** If the profiler should run in quiet mode */
+ private boolean quiet;
+
+ /** The file used by async-profiler to output data */
+ private Path outputFile;
+
+ private AsyncProfilerJob(AsyncProfilerAccess access, AsyncProfiler profiler) {
+ this.access = access;
+ this.profiler = profiler;
+ }
+
+ /**
+ * Executes an async-profiler command.
+ *
+ * @param command the command
+ * @return the output
+ */
+ private String execute(String command) {
+ try {
+ return this.profiler.execute(command);
+ } catch (IOException e) {
+ throw new RuntimeException("Exception whilst executing profiler command", e);
+ }
+ }
+
+ /**
+ * Checks to ensure that this job is still active.
+ */
+ private void checkActive() {
+ if (ACTIVE.get() != this) {
+ throw new IllegalStateException("Profiler job no longer active!");
+ }
+ }
+
+ // Initialise the job
+ public void init(SparkPlatform platform, int interval, ThreadDumper threadDumper, int window, boolean quiet) {
+ this.platform = platform;
+ this.interval = interval;
+ this.threadDumper = threadDumper;
+ this.window = window;
+ this.quiet = quiet;
+ }
+
+ /**
+ * Starts the job.
+ */
+ public void start() {
+ checkActive();
+
+ try {
+ // create a new temporary output file
+ try {
+ this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp");
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to create temporary output file", e);
+ }
+
+ // construct a command to send to async-profiler
+ String command = "start,event=" + this.access.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
+ if (this.quiet) {
+ command += ",loglevel=NONE";
+ }
+ if (this.threadDumper instanceof ThreadDumper.Specific) {
+ command += ",filter";
+ }
+
+ // start the profiler
+ String resp = execute(command).trim();
+
+ if (!resp.equalsIgnoreCase("profiling started")) {
+ throw new RuntimeException("Unexpected response: " + resp);
+ }
+
+ // append threads to be profiled, if necessary
+ if (this.threadDumper instanceof ThreadDumper.Specific) {
+ ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper;
+ for (Thread thread : threadDumper.getThreads()) {
+ this.profiler.addThread(thread);
+ }
+ }
+
+ } catch (Exception e) {
+ try {
+ this.profiler.stop();
+ } catch (Exception e2) {
+ // ignore
+ }
+ close();
+
+ throw e;
+ }
+ }
+
+ /**
+ * Stops the job.
+ */
+ public void stop() {
+ checkActive();
+
+ try {
+ this.profiler.stop();
+ } catch (IllegalStateException e) {
+ if (!e.getMessage().equals("Profiler is not active")) { // ignore
+ throw e;
+ }
+ } finally {
+ close();
+ }
+ }
+
+ /**
+ * Aggregates the collected data.
+ */
+ public void aggregate(AsyncDataAggregator dataAggregator) {
+
+ Predicate<String> threadFilter;
+ if (this.threadDumper instanceof ThreadDumper.Specific) {
+ ThreadDumper.Specific specificDumper = (ThreadDumper.Specific) this.threadDumper;
+ threadFilter = n -> specificDumper.getThreadNames().contains(n.toLowerCase());
+ } else {
+ threadFilter = n -> true;
+ }
+
+ // read the jfr file produced by async-profiler
+ try (JfrReader reader = new JfrReader(this.outputFile)) {
+ readSegments(reader, threadFilter, dataAggregator, this.window);
+ } catch (Exception e) {
+ boolean fileExists;
+ try {
+ fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0;
+ } catch (IOException ex) {
+ fileExists = false;
+ }
+
+ if (fileExists) {
+ throw new JfrParsingException("Error parsing JFR data from profiler output", e);
+ } else {
+ throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e);
+ }
+ }
+
+ deleteOutputFile();
+ }
+
+ public void deleteOutputFile() {
+ try {
+ Files.deleteIfExists(this.outputFile);
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+
+ private void readSegments(JfrReader reader, Predicate<String> threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException {
+ List<JfrReader.ExecutionSample> samples = reader.readAllEvents(JfrReader.ExecutionSample.class);
+ for (int i = 0; i < samples.size(); i++) {
+ JfrReader.ExecutionSample sample = samples.get(i);
+
+ long duration;
+ if (i == 0) {
+ // we don't really know the duration of the first sample, so just use the sampling
+ // interval
+ duration = this.interval;
+ } else {
+ // calculate the duration of the sample by calculating the time elapsed since the
+ // previous sample
+ duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time);
+ }
+
+ String threadName = reader.threads.get((long) sample.tid);
+ if (threadName == null) {
+ continue;
+ }
+
+ if (!threadFilter.test(threadName)) {
+ continue;
+ }
+
+ // parse the segment and give it to the data aggregator
+ ProfileSegment segment = ProfileSegment.parseSegment(reader, sample, threadName, duration);
+ dataAggregator.insertData(segment, window);
+ }
+ }
+
+ public int getWindow() {
+ return this.window;
+ }
+
+ private void close() {
+ ACTIVE.compareAndSet(this, null);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 5cb7fdc..178f055 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -25,64 +25,43 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.AbstractSampler;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.common.sampler.ThreadGrouper;
-import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+import me.lucko.spark.common.sampler.SamplerSettings;
import me.lucko.spark.common.sampler.node.MergeMode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.common.util.TemporaryFiles;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.window.ProfilingWindowUtils;
+import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import one.profiler.AsyncProfiler;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Comparator;
-import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import java.util.function.Predicate;
+import java.util.function.IntPredicate;
/**
* A sampler implementation using async-profiler.
*/
public class AsyncSampler extends AbstractSampler {
- private final AsyncProfiler profiler;
+ private final AsyncProfilerAccess profilerAccess;
/** Responsible for aggregating and then outputting collected sampling data */
private final AsyncDataAggregator dataAggregator;
- /** Flag to mark if the output has been completed */
- private boolean outputComplete = false;
-
- /** The temporary output file */
- private Path outputFile;
+ /** Mutex for the current profiler job */
+ private final Object[] currentJobMutex = new Object[0];
- /** The executor used for timeouts */
- private ScheduledExecutorService timeoutExecutor;
+ /** Current profiler job */
+ private AsyncProfilerJob currentJob;
- public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) {
- super(interval, threadDumper, endTime);
- this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler();
- this.dataAggregator = new AsyncDataAggregator(threadGrouper);
- }
+ /** The executor used for scheduling and management */
+ private ScheduledExecutorService scheduler;
- /**
- * Executes a profiler command.
- *
- * @param command the command to execute
- * @return the response
- */
- private String execute(String command) {
- try {
- return this.profiler.execute(command);
- } catch (IOException e) {
- throw new RuntimeException("Exception whilst executing profiler command", e);
- }
+ public AsyncSampler(SparkPlatform platform, SamplerSettings settings) {
+ super(platform, settings);
+ this.profilerAccess = AsyncProfilerAccess.getInstance(platform);
+ this.dataAggregator = new AsyncDataAggregator(settings.threadGrouper());
+ this.scheduler = Executors.newSingleThreadScheduledExecutor(
+ new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-worker-thread").build()
+ );
}
/**
@@ -90,51 +69,82 @@ public class AsyncSampler extends AbstractSampler {
*/
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
- try {
- this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp");
- } catch (IOException e) {
- throw new RuntimeException("Unable to create temporary output file", e);
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ this.windowStatisticsCollector.startCountingTicks(tickHook);
}
- String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
- if (this.threadDumper instanceof ThreadDumper.Specific) {
- command += ",filter";
- }
+ int window = ProfilingWindowUtils.windowNow();
- String resp = execute(command).trim();
- if (!resp.equalsIgnoreCase("profiling started")) {
- throw new RuntimeException("Unexpected response: " + resp);
- }
+ AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob();
+ job.init(this.platform, this.interval, this.threadDumper, window, this.background);
+ job.start();
+ this.currentJob = job;
- if (this.threadDumper instanceof ThreadDumper.Specific) {
- ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper;
- for (Thread thread : threadDumper.getThreads()) {
- this.profiler.addThread(thread);
- }
- }
+ // rotate the sampler job to put data into a new window
+ this.scheduler.scheduleAtFixedRate(
+ this::rotateProfilerJob,
+ ProfilingWindowUtils.WINDOW_SIZE_SECONDS,
+ ProfilingWindowUtils.WINDOW_SIZE_SECONDS,
+ TimeUnit.SECONDS
+ );
recordInitialGcStats();
scheduleTimeout();
}
+ private void rotateProfilerJob() {
+ try {
+ synchronized (this.currentJobMutex) {
+ AsyncProfilerJob previousJob = this.currentJob;
+ if (previousJob == null) {
+ return;
+ }
+
+ try {
+ // stop the previous job
+ previousJob.stop();
+
+ // collect statistics for the window
+ this.windowStatisticsCollector.measureNow(previousJob.getWindow());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // start a new job
+ int window = previousJob.getWindow() + 1;
+ AsyncProfilerJob newJob = this.profilerAccess.startNewProfilerJob();
+ newJob.init(this.platform, this.interval, this.threadDumper, window, this.background);
+ newJob.start();
+ this.currentJob = newJob;
+
+ // aggregate the output of the previous job
+ previousJob.aggregate(this.dataAggregator);
+
+ // prune data older than the history size
+ IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(window);
+ this.dataAggregator.pruneData(predicate);
+ this.windowStatisticsCollector.pruneStatistics(predicate);
+ }
+ } catch (Throwable e) {
+ e.printStackTrace();
+ }
+ }
+
private void scheduleTimeout() {
- if (this.endTime == -1) {
+ if (this.autoEndTime == -1) {
return;
}
- long delay = this.endTime - System.currentTimeMillis();
+ long delay = this.autoEndTime - System.currentTimeMillis();
if (delay <= 0) {
return;
}
- this.timeoutExecutor = Executors.newSingleThreadScheduledExecutor(
- new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-timeout-thread").build()
- );
-
- this.timeoutExecutor.schedule(() -> {
- stop();
+ this.scheduler.schedule(() -> {
+ stop(false);
this.future.complete(this);
}, delay, TimeUnit.MILLISECONDS);
}
@@ -143,129 +153,32 @@ public class AsyncSampler extends AbstractSampler {
* Stops the profiler.
*/
@Override
- public void stop() {
- try {
- this.profiler.stop();
- } catch (IllegalStateException e) {
- if (!e.getMessage().equals("Profiler is not active")) { // ignore
- throw e;
+ public void stop(boolean cancelled) {
+ super.stop(cancelled);
+
+ synchronized (this.currentJobMutex) {
+ this.currentJob.stop();
+ if (!cancelled) {
+ this.windowStatisticsCollector.measureNow(this.currentJob.getWindow());
+ this.currentJob.aggregate(this.dataAggregator);
+ } else {
+ this.currentJob.deleteOutputFile();
}
+ this.currentJob = null;
}
-
- if (this.timeoutExecutor != null) {
- this.timeoutExecutor.shutdown();
- this.timeoutExecutor = null;
+ if (this.scheduler != null) {
+ this.scheduler.shutdown();
+ this.scheduler = null;
}
}
@Override
- public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
- aggregateOutput();
- writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
+ writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup);
return proto.build();
}
- private void aggregateOutput() {
- if (this.outputComplete) {
- return;
- }
- this.outputComplete = true;
-
- Predicate<String> threadFilter;
- if (this.threadDumper instanceof ThreadDumper.Specific) {
- ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper;
- threadFilter = n -> threadDumper.getThreadNames().contains(n.toLowerCase());
- } else {
- threadFilter = n -> true;
- }
-
- // read the jfr file produced by async-profiler
- try (JfrReader reader = new JfrReader(this.outputFile)) {
- readSegments(reader, threadFilter);
- } catch (IOException e) {
- throw new RuntimeException("Read error", e);
- }
-
- // delete the output file after reading
- try {
- Files.deleteIfExists(this.outputFile);
- } catch (IOException e) {
- // ignore
- }
- }
-
- private void readSegments(JfrReader reader, Predicate<String> threadFilter) throws IOException {
- List<JfrReader.ExecutionSample> samples = reader.readAllEvents(JfrReader.ExecutionSample.class);
- for (int i = 0; i < samples.size(); i++) {
- JfrReader.ExecutionSample sample = samples.get(i);
-
- long duration;
- if (i == 0) {
- // we don't really know the duration of the first sample, so just use the sampling
- // interval
- duration = this.interval;
- } else {
- // calculate the duration of the sample by calculating the time elapsed since the
- // previous sample
- duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time);
- }
-
- String threadName = reader.threads.get(sample.tid);
- if (!threadFilter.test(threadName)) {
- continue;
- }
-
- // parse the segment and give it to the data aggregator
- ProfileSegment segment = parseSegment(reader, sample, threadName, duration);
- this.dataAggregator.insertData(segment);
- }
- }
-
- private static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) {
- JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId);
- int len = stackTrace.methods.length;
-
- AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len];
- for (int i = 0; i < len; i++) {
- stack[i] = parseStackFrame(reader, stackTrace.methods[i]);
- }
-
- return new ProfileSegment(sample.tid, threadName, stack, duration);
- }
-
- private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) {
- AsyncStackTraceElement result = reader.stackFrames.get(methodId);
- if (result != null) {
- return result;
- }
-
- JfrReader.MethodRef methodRef = reader.methods.get(methodId);
- JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls);
-
- byte[] className = reader.symbols.get(classRef.name);
- byte[] methodName = reader.symbols.get(methodRef.name);
-
- if (className == null || className.length == 0) {
- // native call
- result = new AsyncStackTraceElement(
- AsyncStackTraceElement.NATIVE_CALL,
- new String(methodName, StandardCharsets.UTF_8),
- null
- );
- } else {
- // java method
- byte[] methodDesc = reader.symbols.get(methodRef.sig);
- result = new AsyncStackTraceElement(
- new String(className, StandardCharsets.UTF_8).replace('/', '.'),
- new String(methodName, StandardCharsets.UTF_8),
- new String(methodDesc, StandardCharsets.UTF_8)
- );
- }
-
- reader.stackFrames.put(methodId, result);
- return result;
- }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java
new file mode 100644
index 0000000..6dab359
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java
@@ -0,0 +1,27 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.async;
+
+public class JfrParsingException extends RuntimeException {
+ public JfrParsingException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java
index 154e6fe..26debaf 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java
@@ -20,6 +20,10 @@
package me.lucko.spark.common.sampler.async;
+import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+
+import java.nio.charset.StandardCharsets;
+
/**
* Represents a profile "segment".
*
@@ -58,4 +62,50 @@ public class ProfileSegment {
public long getTime() {
return this.time;
}
+
+ public static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) {
+ JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId);
+ int len = stackTrace.methods.length;
+
+ AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len];
+ for (int i = 0; i < len; i++) {
+ stack[i] = parseStackFrame(reader, stackTrace.methods[i]);
+ }
+
+ return new ProfileSegment(sample.tid, threadName, stack, duration);
+ }
+
+ private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) {
+ AsyncStackTraceElement result = reader.stackFrames.get(methodId);
+ if (result != null) {
+ return result;
+ }
+
+ JfrReader.MethodRef methodRef = reader.methods.get(methodId);
+ JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls);
+
+ byte[] className = reader.symbols.get(classRef.name);
+ byte[] methodName = reader.symbols.get(methodRef.name);
+
+ if (className == null || className.length == 0) {
+ // native call
+ result = new AsyncStackTraceElement(
+ AsyncStackTraceElement.NATIVE_CALL,
+ new String(methodName, StandardCharsets.UTF_8),
+ null
+ );
+ } else {
+ // java method
+ byte[] methodDesc = reader.symbols.get(methodRef.sig);
+ result = new AsyncStackTraceElement(
+ new String(className, StandardCharsets.UTF_8).replace('/', '.'),
+ new String(methodName, StandardCharsets.UTF_8),
+ new String(methodDesc, StandardCharsets.UTF_8)
+ );
+ }
+
+ reader.stackFrames.put(methodId, result);
+ return result;
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java
index 23223a2..60f6543 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java
@@ -37,6 +37,10 @@ public class Dictionary<T> {
size = 0;
}
+ public int size() {
+ return this.size;
+ }
+
public void put(long key, T value) {
if (key == 0) {
throw new IllegalArgumentException("Zero key not allowed");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
index e0cc4e9..ea4985e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
@@ -51,7 +51,7 @@ public class JfrReader implements Closeable {
public final Dictionary<JfrClass> types = new Dictionary<>();
public final Map<String, JfrClass> typesByName = new HashMap<>();
- public final Dictionary<String> threads = new Dictionary<>();
+ public final Map<Long, String> threads = new HashMap<>(); // spark
public final Dictionary<ClassRef> classes = new Dictionary<>();
public final Dictionary<byte[]> symbols = new Dictionary<>();
public final Dictionary<MethodRef> methods = new Dictionary<>();
@@ -324,7 +324,7 @@ public class JfrReader implements Closeable {
}
private void readThreads(boolean hasGroup) {
- int count = threads.preallocate(getVarint());
+ int count = getVarint(); //threads.preallocate(getVarint());
for (int i = 0; i < count; i++) {
long id = getVarlong();
String osName = getString();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
index cc530d6..c51ec05 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
@@ -66,10 +66,11 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
* Inserts sampling data into this aggregator
*
* @param threadInfo the thread info
+ * @param window the window
*/
- public abstract void insertData(ThreadInfo threadInfo);
+ public abstract void insertData(ThreadInfo threadInfo, int window);
- protected void writeData(ThreadInfo threadInfo) {
+ protected void writeData(ThreadInfo threadInfo, int window) {
if (this.ignoreSleeping && isSleeping(threadInfo)) {
return;
}
@@ -79,7 +80,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
try {
ThreadNode node = getNode(this.threadGrouper.getGroup(threadInfo.getThreadId(), threadInfo.getThreadName()));
- node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval);
+ node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval, window);
} catch (Exception e) {
e.printStackTrace();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index cfa0a0f..72a37e8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -25,23 +25,23 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.AbstractSampler;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.SamplerSettings;
import me.lucko.spark.common.sampler.node.MergeMode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.window.ProfilingWindowUtils;
+import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
-import java.util.Comparator;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.IntPredicate;
/**
* A sampler implementation using Java (WarmRoast).
@@ -62,26 +62,47 @@ public class JavaSampler extends AbstractSampler implements Runnable {
/** Responsible for aggregating and then outputting collected sampling data */
private final JavaDataAggregator dataAggregator;
+
+ /** The last window that was profiled */
+ private final AtomicInteger lastWindow = new AtomicInteger();
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) {
- super(interval, threadDumper, endTime);
- this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative);
+ public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative) {
+ super(platform, settings);
+ this.dataAggregator = new SimpleDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative);
}
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
- super(interval, threadDumper, endTime);
- this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold);
+ public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
+ super(platform, settings);
+ this.dataAggregator = new TickedDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold);
}
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
+
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ if (this.dataAggregator instanceof TickedDataAggregator) {
+ WindowStatisticsCollector.ExplicitTickCounter counter = this.windowStatisticsCollector.startCountingTicksExplicit(tickHook);
+ ((TickedDataAggregator) this.dataAggregator).setTickCounter(counter);
+ } else {
+ this.windowStatisticsCollector.startCountingTicks(tickHook);
+ }
+ }
+
this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS);
}
@Override
- public void stop() {
+ public void stop(boolean cancelled) {
+ super.stop(cancelled);
+
this.task.cancel(false);
+
+ if (!cancelled) {
+ // collect statistics for the final window
+ this.windowStatisticsCollector.measureNow(this.lastWindow.get());
+ }
}
@Override
@@ -89,27 +110,30 @@ public class JavaSampler extends AbstractSampler implements Runnable {
// this is effectively synchronized, the worker pool will not allow this task
// to concurrently execute.
try {
- if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) {
+ long time = System.currentTimeMillis();
+
+ if (this.autoEndTime != -1 && this.autoEndTime <= time) {
+ stop(false);
this.future.complete(this);
- stop();
return;
}
+ int window = ProfilingWindowUtils.unixMillisToWindow(time);
ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean);
- this.workerPool.execute(new InsertDataTask(this.dataAggregator, threadDumps));
+ this.workerPool.execute(new InsertDataTask(threadDumps, window));
} catch (Throwable t) {
+ stop(false);
this.future.completeExceptionally(t);
- stop();
}
}
- private static final class InsertDataTask implements Runnable {
- private final JavaDataAggregator dataAggregator;
+ private final class InsertDataTask implements Runnable {
private final ThreadInfo[] threadDumps;
+ private final int window;
- InsertDataTask(JavaDataAggregator dataAggregator, ThreadInfo[] threadDumps) {
- this.dataAggregator = dataAggregator;
+ InsertDataTask(ThreadInfo[] threadDumps, int window) {
this.threadDumps = threadDumps;
+ this.window = window;
}
@Override
@@ -118,16 +142,29 @@ public class JavaSampler extends AbstractSampler implements Runnable {
if (threadInfo.getThreadName() == null || threadInfo.getStackTrace() == null) {
continue;
}
- this.dataAggregator.insertData(threadInfo);
+ JavaSampler.this.dataAggregator.insertData(threadInfo, this.window);
+ }
+
+ // if we have just stepped over into a new window...
+ int previousWindow = JavaSampler.this.lastWindow.getAndUpdate(previous -> Math.max(this.window, previous));
+ if (previousWindow != 0 && previousWindow != this.window) {
+
+ // collect statistics for the previous window
+ JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow);
+
+ // prune data older than the history size
+ IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(this.window);
+ JavaSampler.this.dataAggregator.pruneData(predicate);
+ JavaSampler.this.windowStatisticsCollector.pruneStatistics(predicate);
}
}
}
@Override
- public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
- writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
+ writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
index 39e21aa..54173fe 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
@@ -44,8 +44,8 @@ public class SimpleDataAggregator extends JavaDataAggregator {
}
@Override
- public void insertData(ThreadInfo threadInfo) {
- writeData(threadInfo);
+ public void insertData(ThreadInfo threadInfo, int window) {
+ writeData(threadInfo, window);
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index e817828..d537b96 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler.java;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
@@ -47,11 +48,15 @@ public class TickedDataAggregator extends JavaDataAggregator {
/** The expected number of samples in each tick */
private final int expectedSize;
- private final Object mutex = new Object();
+ /** Counts the number of ticks aggregated */
+ private WindowStatisticsCollector.ExplicitTickCounter tickCounter;
// state
private int currentTick = -1;
- private TickList currentData = new TickList(0);
+ private TickList currentData = null;
+
+ // guards currentData
+ private final Object mutex = new Object();
public TickedDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
super(workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative);
@@ -62,23 +67,34 @@ public class TickedDataAggregator extends JavaDataAggregator {
this.expectedSize = (int) ((50 / intervalMilliseconds) + 10);
}
+ public void setTickCounter(WindowStatisticsCollector.ExplicitTickCounter tickCounter) {
+ this.tickCounter = tickCounter;
+ }
+
@Override
public SamplerMetadata.DataAggregator getMetadata() {
+ // push the current tick (so numberOfTicks is accurate)
+ synchronized (this.mutex) {
+ pushCurrentTick();
+ this.currentData = null;
+ }
+
return SamplerMetadata.DataAggregator.newBuilder()
.setType(SamplerMetadata.DataAggregator.Type.TICKED)
.setThreadGrouper(this.threadGrouper.asProto())
.setTickLengthThreshold(this.tickLengthThreshold)
+ .setNumberOfIncludedTicks(this.tickCounter.getTotalCountedTicks())
.build();
}
@Override
- public void insertData(ThreadInfo threadInfo) {
+ public void insertData(ThreadInfo threadInfo, int window) {
synchronized (this.mutex) {
int tick = this.tickHook.getCurrentTick();
- if (this.currentTick != tick) {
+ if (this.currentTick != tick || this.currentData == null) {
pushCurrentTick();
this.currentTick = tick;
- this.currentData = new TickList(this.expectedSize);
+ this.currentData = new TickList(this.expectedSize, window);
}
this.currentData.addData(threadInfo);
@@ -88,6 +104,9 @@ public class TickedDataAggregator extends JavaDataAggregator {
// guarded by 'mutex'
private void pushCurrentTick() {
TickList currentData = this.currentData;
+ if (currentData == null) {
+ return;
+ }
// approximate how long the tick lasted
int tickLengthMicros = currentData.getList().size() * this.interval;
@@ -98,6 +117,7 @@ public class TickedDataAggregator extends JavaDataAggregator {
}
this.workerPool.submit(currentData);
+ this.tickCounter.increment();
}
@Override
@@ -112,15 +132,17 @@ public class TickedDataAggregator extends JavaDataAggregator {
private final class TickList implements Runnable {
private final List<ThreadInfo> list;
+ private final int window;
- TickList(int expectedSize) {
+ TickList(int expectedSize, int window) {
this.list = new ArrayList<>(expectedSize);
+ this.window = window;
}
@Override
public void run() {
for (ThreadInfo data : this.list) {
- writeData(data);
+ writeData(data, this.window);
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index fd2be8d..163365c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -20,69 +20,81 @@
package me.lucko.spark.common.sampler.node;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
+
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.LongAdder;
+import java.util.function.IntPredicate;
/**
* Encapsulates a timed node in the sampling stack.
*/
public abstract class AbstractNode {
- private static final int MAX_STACK_DEPTH = 300;
+ protected static final int MAX_STACK_DEPTH = Integer.getInteger("spark.maxStackDepth", 300);
/** A map of the nodes children */
private final Map<StackTraceNode.Description, StackTraceNode> children = new ConcurrentHashMap<>();
/** The accumulated sample time for this node, measured in microseconds */
- private final LongAdder totalTime = new LongAdder();
+ // Integer key = the window (effectively System.currentTimeMillis() / 60_000)
+ // LongAdder value = accumulated time in microseconds
+ private final Map<Integer, LongAdder> times = new ConcurrentHashMap<>();
/**
- * Gets the total sample time logged for this node in milliseconds.
+ * Gets the time accumulator for a given window
*
- * @return the total time
+ * @param window the window
+ * @return the accumulator
*/
- public double getTotalTime() {
- return this.totalTime.longValue() / 1000d;
+ protected LongAdder getTimeAccumulator(int window) {
+ LongAdder adder = this.times.get(window);
+ if (adder == null) {
+ adder = new LongAdder();
+ this.times.put(window, adder);
+ }
+ return adder;
}
- public Collection<StackTraceNode> getChildren() {
- return this.children.values();
+ /**
+ * Gets the time windows that have been logged for this node.
+ *
+ * @return the time windows
+ */
+ public Set<Integer> getTimeWindows() {
+ return this.times.keySet();
}
/**
- * Logs the given stack trace against this node and its children.
+ * Removes time windows from this node if they pass the given {@code predicate} test.
*
- * @param describer the function that describes the elements of the stack
- * @param stack the stack
- * @param time the total time to log
- * @param <T> the stack trace element type
+ * @param predicate the predicate
+ * @return true if any time windows were removed
*/
- public <T> void log(StackTraceNode.Describer<T> describer, T[] stack, long time) {
- if (stack.length == 0) {
- return;
- }
-
- this.totalTime.add(time);
-
- AbstractNode node = this;
- T previousElement = null;
-
- for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) {
- T element = stack[(stack.length - 1) - offset];
+ public boolean removeTimeWindows(IntPredicate predicate) {
+ return this.times.keySet().removeIf(predicate::test);
+ }
- node = node.resolveChild(describer.describe(element, previousElement));
- node.totalTime.add(time);
+ /**
+ * Gets the encoded total sample times logged for this node in milliseconds.
+ *
+ * @return the total times
+ */
+ protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) {
+ return encoder.encode(this.times);
+ }
- previousElement = element;
- }
+ public Collection<StackTraceNode> getChildren() {
+ return this.children.values();
}
- private StackTraceNode resolveChild(StackTraceNode.Description description) {
+ protected StackTraceNode resolveChild(StackTraceNode.Description description) {
StackTraceNode result = this.children.get(description); // fast path
if (result != null) {
return result;
@@ -96,7 +108,7 @@ public abstract class AbstractNode {
* @param other the other node
*/
protected void merge(AbstractNode other) {
- this.totalTime.add(other.totalTime.longValue());
+ other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue()));
for (Map.Entry<StackTraceNode.Description, StackTraceNode> child : other.children.entrySet()) {
resolveChild(child.getKey()).merge(child.getValue());
}
@@ -123,7 +135,6 @@ public abstract class AbstractNode {
list.add(child);
}
- list.sort(null);
return list;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index b0d9237..c0dcc5b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -20,6 +20,7 @@
package me.lucko.spark.common.sampler.node;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
import me.lucko.spark.common.util.MethodDisambiguator;
import me.lucko.spark.proto.SparkSamplerProtos;
@@ -30,7 +31,7 @@ import java.util.Objects;
/**
* Represents a stack trace element within the {@link AbstractNode node} structure.
*/
-public final class StackTraceNode extends AbstractNode implements Comparable<StackTraceNode> {
+public final class StackTraceNode extends AbstractNode {
/**
* Magic number to denote "no present" line number for a node.
@@ -64,12 +65,16 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
return this.description.parentLineNumber;
}
- public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode) {
+ public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder, Iterable<Integer> childrenRefs) {
SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder()
- .setTime(getTotalTime())
.setClassName(this.description.className)
.setMethodName(this.description.methodName);
+ double[] times = encodeTimesForProto(timeEncoder);
+ for (double time : times) {
+ proto.addTimes(time);
+ }
+
if (this.description.lineNumber >= 0) {
proto.setLineNumber(this.description.lineNumber);
}
@@ -86,27 +91,11 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
.ifPresent(proto::setMethodDesc);
}
- for (StackTraceNode child : exportChildren(mergeMode)) {
- proto.addChildren(child.toProto(mergeMode));
- }
+ proto.addAllChildrenRefs(childrenRefs);
return proto.build();
}
- @Override
- public int compareTo(StackTraceNode that) {
- if (this == that) {
- return 0;
- }
-
- int i = -Double.compare(this.getTotalTime(), that.getTotalTime());
- if (i != 0) {
- return i;
- }
-
- return this.description.compareTo(that.description);
- }
-
/**
* Function to construct a {@link StackTraceNode.Description} from a stack trace element
* of type {@code T}.
@@ -129,7 +118,7 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
/**
* Encapsulates the attributes of a {@link StackTraceNode}.
*/
- public static final class Description implements Comparable<Description> {
+ public static final class Description {
private final String className;
private final String methodName;
@@ -162,54 +151,6 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
this.hash = Objects.hash(this.className, this.methodName, this.methodDescription);
}
- private static <T extends Comparable<T>> int nullCompare(T a, T b) {
- if (a == null && b == null) {
- return 0;
- } else if (a == null) {
- return -1;
- } else if (b == null) {
- return 1;
- } else {
- return a.compareTo(b);
- }
- }
-
- @Override
- public int compareTo(Description that) {
- if (this == that) {
- return 0;
- }
-
- int i = this.className.compareTo(that.className);
- if (i != 0) {
- return i;
- }
-
- i = this.methodName.compareTo(that.methodName);
- if (i != 0) {
- return i;
- }
-
- i = nullCompare(this.methodDescription, that.methodDescription);
- if (i != 0) {
- return i;
- }
-
- if (this.methodDescription != null && that.methodDescription != null) {
- i = this.methodDescription.compareTo(that.methodDescription);
- if (i != 0) {
- return i;
- }
- }
-
- i = Integer.compare(this.lineNumber, that.lineNumber);
- if (i != 0) {
- return i;
- }
-
- return Integer.compare(this.parentLineNumber, that.parentLineNumber);
- }
-
@Override
public boolean equals(Object o) {
if (this == o) return true;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index ed97443..37ff359 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -20,8 +20,19 @@
package me.lucko.spark.common.sampler.node;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
+import me.lucko.spark.common.util.IndexedListBuilder;
import me.lucko.spark.proto.SparkSamplerProtos;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.Deque;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+import java.util.function.IntPredicate;
+
/**
* The root of a sampling stack for a given thread / thread group.
*/
@@ -53,15 +64,162 @@ public final class ThreadNode extends AbstractNode {
this.label = label;
}
- public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) {
+ /**
+ * Logs the given stack trace against this node and its children.
+ *
+ * @param describer the function that describes the elements of the stack
+ * @param stack the stack
+ * @param time the total time to log
+ * @param window the window
+ * @param <T> the stack trace element type
+ */
+ public <T> void log(StackTraceNode.Describer<T> describer, T[] stack, long time, int window) {
+ if (stack.length == 0) {
+ return;
+ }
+
+ getTimeAccumulator(window).add(time);
+
+ AbstractNode node = this;
+ T previousElement = null;
+
+ for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) {
+ T element = stack[(stack.length - 1) - offset];
+
+ node = node.resolveChild(describer.describe(element, previousElement));
+ node.getTimeAccumulator(window).add(time);
+
+ previousElement = element;
+ }
+ }
+
+ /**
+ * Removes time windows that match the given {@code predicate}.
+ *
+ * @param predicate the predicate to use to test the time windows
+ * @return true if this node is now empty
+ */
+ public boolean removeTimeWindowsRecursively(IntPredicate predicate) {
+ Queue<AbstractNode> queue = new ArrayDeque<>();
+ queue.add(this);
+
+ while (!queue.isEmpty()) {
+ AbstractNode node = queue.remove();
+ Collection<StackTraceNode> children = node.getChildren();
+
+ boolean needToProcessChildren = false;
+
+ for (Iterator<StackTraceNode> it = children.iterator(); it.hasNext(); ) {
+ StackTraceNode child = it.next();
+
+ boolean windowsWereRemoved = child.removeTimeWindows(predicate);
+ boolean childIsNowEmpty = child.getTimeWindows().isEmpty();
+
+ if (childIsNowEmpty) {
+ it.remove();
+ continue;
+ }
+
+ if (windowsWereRemoved) {
+ needToProcessChildren = true;
+ }
+ }
+
+ if (needToProcessChildren) {
+ queue.addAll(children);
+ }
+ }
+
+ removeTimeWindows(predicate);
+ return getTimeWindows().isEmpty();
+ }
+
+ public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) {
SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder()
- .setName(getThreadLabel())
- .setTime(getTotalTime());
+ .setName(getThreadLabel());
+
+ double[] times = encodeTimesForProto(timeEncoder);
+ for (double time : times) {
+ proto.addTimes(time);
+ }
+
+ // When converting to a proto, we change the data structure from a recursive tree to an array.
+ // Effectively, instead of:
+ //
+ // {
+ // data: 'one',
+ // children: [
+ // {
+ // data: 'two',
+ // children: [{ data: 'four' }]
+ // },
+ // { data: 'three' }
+ // ]
+ // }
+ //
+ // we transmit:
+ //
+ // [
+ // { data: 'one', children: [1, 2] },
+ // { data: 'two', children: [3] }
+ // { data: 'three', children: [] }
+ // { data: 'four', children: [] }
+ // ]
+ //
+
+ // the flattened array of nodes
+ IndexedListBuilder<SparkSamplerProtos.StackTraceNode> nodesArray = new IndexedListBuilder<>();
+ // Perform a depth-first post order traversal of the tree
+ Deque<Node> stack = new ArrayDeque<>();
+
+ // push the thread node's children to the stack
+ List<Integer> childrenRefs = new LinkedList<>();
for (StackTraceNode child : exportChildren(mergeMode)) {
- proto.addChildren(child.toProto(mergeMode));
+ stack.push(new Node(child, childrenRefs));
+ }
+
+ Node node;
+ while (!stack.isEmpty()) {
+ node = stack.peek();
+
+ // on the first visit, just push this node's children and leave it on the stack
+ if (node.firstVisit) {
+ for (StackTraceNode child : node.stackTraceNode.exportChildren(mergeMode)) {
+ stack.push(new Node(child, node.childrenRefs));
+ }
+ node.firstVisit = false;
+ continue;
+ }
+
+ // convert StackTraceNode to a proto
+ // - at this stage, we have already visited this node's children
+ // - the refs for each child are stored in node.childrenRefs
+ SparkSamplerProtos.StackTraceNode childProto = node.stackTraceNode.toProto(mergeMode, timeEncoder, node.childrenRefs);
+
+ // add the child proto to the nodes array, and record the ref in the parent
+ int childIndex = nodesArray.add(childProto);
+ node.parentChildrenRefs.add(childIndex);
+
+ // pop from the stack
+ stack.pop();
}
+ proto.addAllChildrenRefs(childrenRefs);
+ proto.addAllChildren(nodesArray.build());
+
return proto.build();
}
+
+ private static final class Node {
+ private final StackTraceNode stackTraceNode;
+ private boolean firstVisit = true;
+ private final List<Integer> childrenRefs = new LinkedList<>();
+ private final List<Integer> parentChildrenRefs;
+
+ private Node(StackTraceNode node, List<Integer> parentChildrenRefs) {
+ this.stackTraceNode = node;
+ this.parentChildrenRefs = parentChildrenRefs;
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java
new file mode 100644
index 0000000..ab63c00
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java
@@ -0,0 +1,462 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.source;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.sampler.node.StackTraceNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassFinder;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.CodeSource;
+import java.security.ProtectionDomain;
+import java.util.ArrayDeque;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Queue;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * A function which defines the source of given {@link Class}es or (Mixin) method calls.
+ */
+public interface ClassSourceLookup {
+
+ /**
+ * Identify the given class.
+ *
+ * @param clazz the class
+ * @return the source of the class
+ */
+ @Nullable String identify(Class<?> clazz) throws Exception;
+
+ /**
+ * Identify the given method call.
+ *
+ * @param methodCall the method call info
+ * @return the source of the method call
+ */
+ default @Nullable String identify(MethodCall methodCall) throws Exception {
+ return null;
+ }
+
+ /**
+ * Identify the given method call.
+ *
+ * @param methodCall the method call info
+ * @return the source of the method call
+ */
+ default @Nullable String identify(MethodCallByLine methodCall) throws Exception {
+ return null;
+ }
+
+ /**
+ * A no-operation {@link ClassSourceLookup}.
+ */
+ ClassSourceLookup NO_OP = new ClassSourceLookup() {
+ @Override
+ public @Nullable String identify(Class<?> clazz) {
+ return null;
+ }
+ };
+
+ static ClassSourceLookup create(SparkPlatform platform) {
+ try {
+ return platform.createClassSourceLookup();
+ } catch (Exception e) {
+ e.printStackTrace();
+ return NO_OP;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}.
+ */
+ abstract class ByClassLoader implements ClassSourceLookup {
+
+ public abstract @Nullable String identify(ClassLoader loader) throws Exception;
+
+ @Override
+ public final @Nullable String identify(Class<?> clazz) throws Exception {
+ ClassLoader loader = clazz.getClassLoader();
+ while (loader != null) {
+ String source = identify(loader);
+ if (source != null) {
+ return source;
+ }
+ loader = loader.getParent();
+ }
+ return null;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on URL.
+ */
+ interface ByUrl extends ClassSourceLookup {
+
+ default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
+ Path path = null;
+
+ String protocol = url.getProtocol();
+ if (protocol.equals("file")) {
+ path = Paths.get(url.toURI());
+ } else if (protocol.equals("jar")) {
+ URL innerUrl = new URL(url.getPath());
+ path = Paths.get(innerUrl.getPath().split("!")[0]);
+ }
+
+ if (path != null) {
+ return identifyFile(path.toAbsolutePath().normalize());
+ }
+
+ return null;
+ }
+
+ default String identifyFile(Path path) {
+ return identifyFileName(path.getFileName().toString());
+ }
+
+ default String identifyFileName(String fileName) {
+ return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}.
+ */
+ class ByFirstUrlSource extends ClassSourceLookup.ByClassLoader implements ClassSourceLookup.ByUrl {
+ @Override
+ public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException {
+ if (loader instanceof URLClassLoader) {
+ URLClassLoader urlClassLoader = (URLClassLoader) loader;
+ URL[] urls = urlClassLoader.getURLs();
+ if (urls.length == 0) {
+ return null;
+ }
+ return identifyUrl(urls[0]);
+ }
+ return null;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}.
+ */
+ class ByCodeSource implements ClassSourceLookup, ClassSourceLookup.ByUrl {
+ @Override
+ public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException {
+ ProtectionDomain protectionDomain = clazz.getProtectionDomain();
+ if (protectionDomain == null) {
+ return null;
+ }
+ CodeSource codeSource = protectionDomain.getCodeSource();
+ if (codeSource == null) {
+ return null;
+ }
+
+ URL url = codeSource.getLocation();
+ return url == null ? null : identifyUrl(url);
+ }
+ }
+
+ interface Visitor {
+ void visit(ThreadNode node);
+
+ boolean hasClassSourceMappings();
+
+ Map<String, String> getClassSourceMapping();
+
+ boolean hasMethodSourceMappings();
+
+ Map<String, String> getMethodSourceMapping();
+
+ boolean hasLineSourceMappings();
+
+ Map<String, String> getLineSourceMapping();
+ }
+
+ static Visitor createVisitor(ClassSourceLookup lookup) {
+ if (lookup == ClassSourceLookup.NO_OP) {
+ return NoOpVisitor.INSTANCE; // don't bother!
+ }
+ return new VisitorImpl(lookup);
+ }
+
+ enum NoOpVisitor implements Visitor {
+ INSTANCE;
+
+ @Override
+ public void visit(ThreadNode node) {
+
+ }
+
+ @Override
+ public boolean hasClassSourceMappings() {
+ return false;
+ }
+
+ @Override
+ public Map<String, String> getClassSourceMapping() {
+ return Collections.emptyMap();
+ }
+
+ @Override
+ public boolean hasMethodSourceMappings() {
+ return false;
+ }
+
+ @Override
+ public Map<String, String> getMethodSourceMapping() {
+ return Collections.emptyMap();
+ }
+
+ @Override
+ public boolean hasLineSourceMappings() {
+ return false;
+ }
+
+ @Override
+ public Map<String, String> getLineSourceMapping() {
+ return Collections.emptyMap();
+ }
+ }
+
+ /**
+ * Visitor which scans {@link StackTraceNode}s and accumulates class/method call identities.
+ */
+ class VisitorImpl implements Visitor {
+ private final ClassSourceLookup lookup;
+ private final ClassFinder classFinder = new ClassFinder();
+
+ private final SourcesMap<String> classSources = new SourcesMap<>(Function.identity());
+ private final SourcesMap<MethodCall> methodSources = new SourcesMap<>(MethodCall::toString);
+ private final SourcesMap<MethodCallByLine> lineSources = new SourcesMap<>(MethodCallByLine::toString);
+
+ VisitorImpl(ClassSourceLookup lookup) {
+ this.lookup = lookup;
+ }
+
+ @Override
+ public void visit(ThreadNode node) {
+ Queue<StackTraceNode> queue = new ArrayDeque<>(node.getChildren());
+ for (StackTraceNode n = queue.poll(); n != null; n = queue.poll()) {
+ visitStackNode(n);
+ queue.addAll(n.getChildren());
+ }
+ }
+
+ private void visitStackNode(StackTraceNode node) {
+ this.classSources.computeIfAbsent(
+ node.getClassName(),
+ className -> {
+ Class<?> clazz = this.classFinder.findClass(className);
+ if (clazz == null) {
+ return null;
+ }
+ return this.lookup.identify(clazz);
+ });
+
+ if (node.getMethodDescription() != null) {
+ MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription());
+ this.methodSources.computeIfAbsent(methodCall, this.lookup::identify);
+ } else {
+ MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber());
+ this.lineSources.computeIfAbsent(methodCall, this.lookup::identify);
+ }
+ }
+
+ @Override
+ public boolean hasClassSourceMappings() {
+ return this.classSources.hasMappings();
+ }
+
+ @Override
+ public Map<String, String> getClassSourceMapping() {
+ return this.classSources.export();
+ }
+
+ @Override
+ public boolean hasMethodSourceMappings() {
+ return this.methodSources.hasMappings();
+ }
+
+ @Override
+ public Map<String, String> getMethodSourceMapping() {
+ return this.methodSources.export();
+ }
+
+ @Override
+ public boolean hasLineSourceMappings() {
+ return this.lineSources.hasMappings();
+ }
+
+ @Override
+ public Map<String, String> getLineSourceMapping() {
+ return this.lineSources.export();
+ }
+ }
+
+ final class SourcesMap<T> {
+ // <key> --> identifier (plugin name)
+ private final Map<T, String> map = new HashMap<>();
+ private final Function<? super T, String> keyToStringFunction;
+
+ private SourcesMap(Function<? super T, String> keyToStringFunction) {
+ this.keyToStringFunction = keyToStringFunction;
+ }
+
+ public void computeIfAbsent(T key, ComputeSourceFunction<T> function) {
+ if (!this.map.containsKey(key)) {
+ try {
+ this.map.put(key, function.compute(key));
+ } catch (Throwable e) {
+ this.map.put(key, null);
+ }
+ }
+ }
+
+ public boolean hasMappings() {
+ this.map.values().removeIf(Objects::isNull);
+ return !this.map.isEmpty();
+ }
+
+ public Map<String, String> export() {
+ this.map.values().removeIf(Objects::isNull);
+ if (this.keyToStringFunction.equals(Function.identity())) {
+ //noinspection unchecked
+ return (Map<String, String>) this.map;
+ } else {
+ return this.map.entrySet().stream().collect(Collectors.toMap(
+ e -> this.keyToStringFunction.apply(e.getKey()),
+ Map.Entry::getValue
+ ));
+ }
+ }
+
+ private interface ComputeSourceFunction<T> {
+ String compute(T key) throws Exception;
+ }
+ }
+
+ /**
+ * Encapsulates information about a given method call using the name + method description.
+ */
+ final class MethodCall {
+ private final String className;
+ private final String methodName;
+ private final String methodDescriptor;
+
+ public MethodCall(String className, String methodName, String methodDescriptor) {
+ this.className = className;
+ this.methodName = methodName;
+ this.methodDescriptor = methodDescriptor;
+ }
+
+ public String getClassName() {
+ return this.className;
+ }
+
+ public String getMethodName() {
+ return this.methodName;
+ }
+
+ public String getMethodDescriptor() {
+ return this.methodDescriptor;
+ }
+
+ @Override
+ public String toString() {
+ return this.className + ";" + this.methodName + ";" + this.methodDescriptor;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof MethodCall)) return false;
+ MethodCall that = (MethodCall) o;
+ return this.className.equals(that.className) &&
+ this.methodName.equals(that.methodName) &&
+ this.methodDescriptor.equals(that.methodDescriptor);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(this.className, this.methodName, this.methodDescriptor);
+ }
+ }
+
+ /**
+ * Encapsulates information about a given method call using the name + line number.
+ */
+ final class MethodCallByLine {
+ private final String className;
+ private final String methodName;
+ private final int lineNumber;
+
+ public MethodCallByLine(String className, String methodName, int lineNumber) {
+ this.className = className;
+ this.methodName = methodName;
+ this.lineNumber = lineNumber;
+ }
+
+ public String getClassName() {
+ return this.className;
+ }
+
+ public String getMethodName() {
+ return this.methodName;
+ }
+
+ public int getLineNumber() {
+ return this.lineNumber;
+ }
+
+ @Override
+ public String toString() {
+ return this.className + ";" + this.lineNumber;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof MethodCallByLine)) return false;
+ MethodCallByLine that = (MethodCallByLine) o;
+ return this.lineNumber == that.lineNumber && this.className.equals(that.className);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(this.className, this.lineNumber);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java
new file mode 100644
index 0000000..0808d66
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java
@@ -0,0 +1,81 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.source;
+
+import com.google.common.collect.ImmutableList;
+
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.function.Function;
+
+/**
+ * A "source" is a plugin or mod on the platform that may be identified
+ * as a source of a method call in a profile.
+ */
+public class SourceMetadata {
+
+ public static <T> List<SourceMetadata> gather(Collection<T> sources, Function<? super T, String> nameFunction, Function<? super T, String> versionFunction, Function<? super T, String> authorFunction) {
+ ImmutableList.Builder<SourceMetadata> builder = ImmutableList.builder();
+
+ for (T source : sources) {
+ String name = nameFunction.apply(source);
+ String version = versionFunction.apply(source);
+ String author = authorFunction.apply(source);
+
+ SourceMetadata metadata = new SourceMetadata(name, version, author);
+ builder.add(metadata);
+ }
+
+ return builder.build();
+ }
+
+ private final String name;
+ private final String version;
+ private final String author;
+
+ public SourceMetadata(String name, String version, String author) {
+ this.name = name;
+ this.version = version;
+ this.author = author;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public String getVersion() {
+ return this.version;
+ }
+
+ public String getAuthor() {
+ return this.author;
+ }
+
+ public SamplerMetadata.SourceMetadata toProto() {
+ return SamplerMetadata.SourceMetadata.newBuilder()
+ .setName(this.name)
+ .setVersion(this.version)
+ .build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java
new file mode 100644
index 0000000..be6f08a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java
@@ -0,0 +1,70 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.window;
+
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+
+import java.util.function.IntPredicate;
+
+public enum ProfilingWindowUtils {
+ ;
+
+ /**
+ * The size/duration of a profiling window in seconds.
+ * (1 window = 1 minute)
+ */
+ public static final int WINDOW_SIZE_SECONDS = 60;
+
+ /**
+ * The number of windows to record in continuous profiling before data is dropped.
+ * (60 windows * 1 minute = 1 hour of profiling data)
+ */
+ public static final int HISTORY_SIZE = Integer.getInteger("spark.continuousProfilingHistorySize", 60);
+
+ /**
+ * Gets the profiling window for the given time in unix-millis.
+ *
+ * @param time the time in milliseconds
+ * @return the window
+ */
+ public static int unixMillisToWindow(long time) {
+ return (int) (time / (WINDOW_SIZE_SECONDS * 1000L));
+ }
+
+ /**
+ * Gets the window at the current time.
+ *
+ * @return the window
+ */
+ public static int windowNow() {
+ return unixMillisToWindow(System.currentTimeMillis());
+ }
+
+ /**
+ * Gets a prune predicate that can be passed to {@link DataAggregator#pruneData(IntPredicate)}.
+ *
+ * @return the prune predicate
+ */
+ public static IntPredicate keepHistoryBefore(int currentWindow) {
+ // windows that were earlier than (currentWindow minus history size) should be pruned
+ return window -> window < (currentWindow - HISTORY_SIZE);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java
new file mode 100644
index 0000000..03da075
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java
@@ -0,0 +1,93 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.window;
+
+import me.lucko.spark.common.sampler.async.jfr.Dictionary;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.LongAdder;
+import java.util.stream.IntStream;
+
+/**
+ * Encodes a map of int->double into a double array.
+ */
+public class ProtoTimeEncoder {
+ /** A sorted array of all possible keys to encode */
+ private final int[] keys;
+ /** A map of key value -> index in the keys array */
+ private final Map<Integer, Integer> keysToIndex;
+
+ public ProtoTimeEncoder(List<ThreadNode> sourceData) {
+ // get an array of all keys that show up in the source data
+ this.keys = sourceData.stream()
+ .map(n -> n.getTimeWindows().stream().mapToInt(i -> i))
+ .reduce(IntStream.empty(), IntStream::concat)
+ .distinct()
+ .sorted()
+ .toArray();
+
+ // construct a reverse index lookup
+ this.keysToIndex = new HashMap<>(this.keys.length);
+ for (int i = 0; i < this.keys.length; i++) {
+ this.keysToIndex.put(this.keys[i], i);
+ }
+ }
+
+ /**
+ * Gets an array of the keys that could be encoded by this encoder.
+ *
+ * @return an array of keys
+ */
+ public int[] getKeys() {
+ return this.keys;
+ }
+
+ /**
+ * Encode a {@link Dictionary} (map) of times/durations into a double array.
+ *
+ * @param times a dictionary of times (unix-time millis -> duration in microseconds)
+ * @return the times encoded as a double array
+ */
+ public double[] encode(Map<Integer, LongAdder> times) {
+ // construct an array of values - length needs to exactly match the
+ // number of keys, even if some values are zero.
+ double[] array = new double[this.keys.length];
+
+ times.forEach((key, value) -> {
+ // get the index for the given key
+ Integer idx = this.keysToIndex.get(key);
+ if (idx == null) {
+ throw new RuntimeException("No index for key " + key + " in " + this.keysToIndex.keySet());
+ }
+
+ // convert the duration from microseconds -> milliseconds
+ double durationInMilliseconds = value.longValue() / 1000d;
+
+ // store in the array
+ array[idx] = durationInMilliseconds;
+ });
+
+ return array;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
new file mode 100644
index 0000000..ce65013
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
@@ -0,0 +1,287 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.window;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.proto.SparkProtos;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.IntPredicate;
+
+/**
+ * Collects statistics for each profiling window.
+ */
+public class WindowStatisticsCollector {
+ private static final SparkProtos.WindowStatistics ZERO = SparkProtos.WindowStatistics.newBuilder().build();
+
+ /** The platform */
+ private final SparkPlatform platform;
+
+ /** Map of profiling window -> statistics */
+ private final Map<Integer, SparkProtos.WindowStatistics> stats;
+
+ private TickCounter tickCounter;
+
+ public WindowStatisticsCollector(SparkPlatform platform) {
+ this.platform = platform;
+ this.stats = new ConcurrentHashMap<>();
+ }
+
+ /**
+ * Indicates to the statistics collector that it should count the number
+ * of ticks in each window using the provided {@link TickHook}.
+ *
+ * @param hook the tick hook
+ */
+ public void startCountingTicks(TickHook hook) {
+ this.tickCounter = new NormalTickCounter(this.platform, hook);
+ }
+
+ /**
+ * Indicates to the statistics collector that it should count the number
+ * of ticks in each window, according to how many times the
+ * {@link ExplicitTickCounter#increment()} method is called.
+ *
+ * @param hook the tick hook
+ * @return the counter
+ */
+ public ExplicitTickCounter startCountingTicksExplicit(TickHook hook) {
+ ExplicitTickCounter counter = new ExplicitTickCounter(this.platform, hook);
+ this.tickCounter = counter;
+ return counter;
+ }
+
+ public void stop() {
+ if (this.tickCounter != null) {
+ this.tickCounter.stop();
+ }
+ }
+
+ /**
+ * Gets the total number of ticks that have passed between the time
+ * when the profiler started and stopped.
+ *
+ * <p>Importantly, note that this metric is different to the total number of ticks in a window
+ * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()}) or the total number
+ * of observed ticks if the 'only-ticks-over' aggregator is being used
+ * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()}
+ * and {@link ExplicitTickCounter#getTotalCountedTicks()}.</p>
+ *
+ * @return the total number of ticks in the profile
+ */
+ public int getTotalTicks() {
+ return this.tickCounter == null ? -1 : this.tickCounter.getTotalTicks();
+ }
+
+ /**
+ * Measures statistics for the given window if none have been recorded yet.
+ *
+ * @param window the window
+ */
+ public void measureNow(int window) {
+ this.stats.computeIfAbsent(window, w -> measure());
+ }
+
+ /**
+ * Ensures that the exported map has statistics (even if they are zeroed) for all windows.
+ *
+ * @param windows the expected windows
+ */
+ public void ensureHasStatisticsForAllWindows(int[] windows) {
+ for (int window : windows) {
+ this.stats.computeIfAbsent(window, w -> ZERO);
+ }
+ }
+
+ public void pruneStatistics(IntPredicate predicate) {
+ this.stats.keySet().removeIf(predicate::test);
+ }
+
+ public Map<Integer, SparkProtos.WindowStatistics> export() {
+ return this.stats;
+ }
+
+ /**
+ * Measures current statistics, where possible averaging over the last minute. (1 min = 1 window)
+ *
+ * @return the current statistics
+ */
+ private SparkProtos.WindowStatistics measure() {
+ SparkProtos.WindowStatistics.Builder builder = SparkProtos.WindowStatistics.newBuilder();
+
+ TickStatistics tickStatistics = this.platform.getTickStatistics();
+ if (tickStatistics != null) {
+ builder.setTps(tickStatistics.tps1Min());
+
+ RollingAverage mspt = tickStatistics.duration1Min();
+ if (mspt != null) {
+ builder.setMsptMedian(mspt.median());
+ builder.setMsptMax(mspt.max());
+ }
+ }
+
+ if (this.tickCounter != null) {
+ int ticks = this.tickCounter.getCountedTicksThisWindowAndReset();
+ builder.setTicks(ticks);
+ }
+
+ builder.setCpuProcess(CpuMonitor.processLoad1MinAvg());
+ builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg());
+
+ try {
+ AsyncWorldInfoProvider worldInfoProvider = new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider());
+ WorldInfoProvider.CountsResult counts = worldInfoProvider.getCounts();
+ if (counts != null) {
+ builder.setPlayers(counts.players());
+ builder.setEntities(counts.entities());
+ builder.setTileEntities(counts.tileEntities());
+ builder.setChunks(counts.chunks());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return builder.build();
+ }
+
+ /**
+ * Responsible for counting the number of ticks in a profile/window.
+ */
+ public interface TickCounter {
+
+ /**
+ * Stop the counter.
+ */
+ void stop();
+
+ /**
+ * Get the total number of ticks.
+ *
+ * <p>See {@link WindowStatisticsCollector#getTotalTicks()} for a longer explanation
+ * of what this means exactly.</p>
+ *
+ * @return the total ticks
+ */
+ int getTotalTicks();
+
+ /**
+ * Gets the total number of ticks counted in the last window,
+ * and resets the counter to zero.
+ *
+ * @return the number of ticks counted since the last time this method was called
+ */
+ int getCountedTicksThisWindowAndReset();
+ }
+
+ private static abstract class BaseTickCounter implements TickCounter {
+ protected final SparkPlatform platform;
+ protected final TickHook tickHook;
+
+ /** The game tick when sampling first began */
+ private final int startTick;
+
+ /** The game tick when sampling stopped */
+ private int stopTick = -1;
+
+ BaseTickCounter(SparkPlatform platform, TickHook tickHook) {
+ this.platform = platform;
+ this.tickHook = tickHook;
+ this.startTick = this.tickHook.getCurrentTick();
+ }
+
+ @Override
+ public void stop() {
+ this.stopTick = this.tickHook.getCurrentTick();
+ }
+
+ @Override
+ public int getTotalTicks() {
+ if (this.startTick == -1) {
+ throw new IllegalStateException("start tick not recorded");
+ }
+ if (this.stopTick == -1) {
+ throw new IllegalStateException("stop tick not recorded");
+ }
+
+ return this.stopTick - this.startTick;
+ }
+ }
+
+ /**
+ * Counts the number of ticks in a window using a {@link TickHook}.
+ */
+ public static final class NormalTickCounter extends BaseTickCounter {
+ private int last;
+
+ NormalTickCounter(SparkPlatform platform, TickHook tickHook) {
+ super(platform, tickHook);
+ this.last = this.tickHook.getCurrentTick();
+ }
+
+ @Override
+ public int getCountedTicksThisWindowAndReset() {
+ synchronized (this) {
+ int now = this.tickHook.getCurrentTick();
+ int ticks = now - this.last;
+ this.last = now;
+ return ticks;
+ }
+ }
+ }
+
+ /**
+ * Counts the number of ticks in a window according to the number of times
+ * {@link #increment()} is called.
+ *
+ * Used by the {@link me.lucko.spark.common.sampler.java.TickedDataAggregator}.
+ */
+ public static final class ExplicitTickCounter extends BaseTickCounter {
+ private final AtomicInteger counted = new AtomicInteger();
+ private final AtomicInteger total = new AtomicInteger();
+
+ ExplicitTickCounter(SparkPlatform platform, TickHook tickHook) {
+ super(platform, tickHook);
+ }
+
+ public void increment() {
+ this.counted.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ public int getTotalCountedTicks() {
+ return this.total.get();
+ }
+
+ @Override
+ public int getCountedTicksThisWindowAndReset() {
+ return this.counted.getAndSet(0);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
index c2ca1b1..e69b94e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -22,73 +22,66 @@ package me.lucko.spark.common.util;
import com.google.protobuf.AbstractMessageLite;
-import okhttp3.MediaType;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-
-import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
import java.util.zip.GZIPOutputStream;
/**
* Utility for posting content to bytebin.
*/
-public class BytebinClient extends AbstractHttpClient {
+public class BytebinClient {
/** The bytebin URL */
private final String url;
/** The client user agent */
private final String userAgent;
- /**
- * Creates a new bytebin instance
- *
- * @param url the bytebin url
- * @param userAgent the client user agent string
- */
- public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) {
- super(okHttpClient);
+ public BytebinClient(String url, String userAgent) {
this.url = url + (url.endsWith("/") ? "" : "/");
this.userAgent = userAgent;
}
- /**
- * POSTs GZIP compressed content to bytebin.
- *
- * @param buf the compressed content
- * @param contentType the type of the content
- * @return the key of the resultant content
- * @throws IOException if an error occurs
- */
- public Content postContent(byte[] buf, MediaType contentType) throws IOException {
- RequestBody body = RequestBody.create(contentType, buf);
+ private Content postContent(String contentType, Consumer<OutputStream> consumer) throws IOException {
+ URL url = new URL(this.url + "post");
+ HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+ try {
+ connection.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(10));
+ connection.setReadTimeout((int) TimeUnit.SECONDS.toMillis(10));
+
+ connection.setDoOutput(true);
+ connection.setRequestMethod("POST");
+ connection.setRequestProperty("Content-Type", contentType);
+ connection.setRequestProperty("User-Agent", this.userAgent);
+ connection.setRequestProperty("Content-Encoding", "gzip");
- Request.Builder requestBuilder = new Request.Builder()
- .url(this.url + "post")
- .header("User-Agent", this.userAgent)
- .header("Content-Encoding", "gzip");
+ connection.connect();
+ try (OutputStream output = connection.getOutputStream()) {
+ consumer.accept(output);
+ }
- Request request = requestBuilder.post(body).build();
- try (Response response = makeHttpRequest(request)) {
- String key = response.header("Location");
+ String key = connection.getHeaderField("Location");
if (key == null) {
throw new IllegalStateException("Key not returned");
}
return new Content(key);
+ } finally {
+ connection.getInputStream().close();
+ connection.disconnect();
}
}
- public Content postContent(AbstractMessageLite<?, ?> proto, MediaType contentType) throws IOException {
- ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
- try (OutputStream out = new GZIPOutputStream(byteOut)) {
- proto.writeTo(out);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- return postContent(byteOut.toByteArray(), contentType);
+ public Content postContent(AbstractMessageLite<?, ?> proto, String contentType) throws IOException {
+ return postContent(contentType, outputStream -> {
+ try (OutputStream out = new GZIPOutputStream(outputStream)) {
+ proto.writeTo(out);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ });
}
public static final class Content {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
deleted file mode 100644
index bd9ec37..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.util;
-
-import me.lucko.spark.common.sampler.node.StackTraceNode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
-
-import org.checkerframework.checker.nullness.qual.Nullable;
-
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.security.CodeSource;
-import java.security.ProtectionDomain;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- * A function which defines the source of given {@link Class}es.
- */
-public interface ClassSourceLookup {
-
- /**
- * Identify the given class.
- *
- * @param clazz the class
- * @return the source of the class
- */
- @Nullable String identify(Class<?> clazz) throws Exception;
-
- /**
- * A no-operation {@link ClassSourceLookup}.
- */
- ClassSourceLookup NO_OP = new ClassSourceLookup() {
- @Override
- public @Nullable String identify(Class<?> clazz) {
- return null;
- }
- };
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}.
- */
- abstract class ByClassLoader implements ClassSourceLookup {
-
- public abstract @Nullable String identify(ClassLoader loader) throws Exception;
-
- @Override
- public final @Nullable String identify(Class<?> clazz) throws Exception {
- ClassLoader loader = clazz.getClassLoader();
- while (loader != null) {
- String source = identify(loader);
- if (source != null) {
- return source;
- }
- loader = loader.getParent();
- }
- return null;
- }
- }
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on URL.
- */
- interface ByUrl extends ClassSourceLookup {
-
- default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
- Path path = null;
-
- String protocol = url.getProtocol();
- if (protocol.equals("file")) {
- path = Paths.get(url.toURI());
- } else if (protocol.equals("jar")) {
- URL innerUrl = new URL(url.getPath());
- path = Paths.get(innerUrl.getPath().split("!")[0]);
- }
-
- if (path != null) {
- return identifyFile(path.toAbsolutePath().normalize());
- }
-
- return null;
- }
-
- default String identifyFile(Path path) {
- return identifyFileName(path.getFileName().toString());
- }
-
- default String identifyFileName(String fileName) {
- return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null;
- }
- }
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}.
- */
- class ByFirstUrlSource extends ByClassLoader implements ByUrl {
- @Override
- public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException {
- if (loader instanceof URLClassLoader) {
- URLClassLoader urlClassLoader = (URLClassLoader) loader;
- URL[] urls = urlClassLoader.getURLs();
- if (urls.length == 0) {
- return null;
- }
- return identifyUrl(urls[0]);
- }
- return null;
- }
- }
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}.
- */
- class ByCodeSource implements ClassSourceLookup, ByUrl {
- @Override
- public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException {
- ProtectionDomain protectionDomain = clazz.getProtectionDomain();
- if (protectionDomain == null) {
- return null;
- }
- CodeSource codeSource = protectionDomain.getCodeSource();
- if (codeSource == null) {
- return null;
- }
-
- URL url = codeSource.getLocation();
- return url == null ? null : identifyUrl(url);
- }
- }
-
- interface Visitor {
- void visit(ThreadNode node);
-
- boolean hasMappings();
-
- Map<String, String> getMapping();
- }
-
- static Visitor createVisitor(ClassSourceLookup lookup) {
- if (lookup == ClassSourceLookup.NO_OP) {
- return NoOpVisitor.INSTANCE; // don't bother!
- }
- return new VisitorImpl(lookup);
- }
-
- enum NoOpVisitor implements Visitor {
- INSTANCE;
-
- @Override
- public void visit(ThreadNode node) {
-
- }
-
- @Override
- public boolean hasMappings() {
- return false;
- }
-
- @Override
- public Map<String, String> getMapping() {
- return Collections.emptyMap();
- }
- }
-
- /**
- * Visitor which scans {@link StackTraceNode}s and accumulates class identities.
- */
- class VisitorImpl implements Visitor {
- private final ClassSourceLookup lookup;
- private final ClassFinder classFinder = new ClassFinder();
-
- // class name --> identifier (plugin name)
- private final Map<String, String> map = new HashMap<>();
-
- VisitorImpl(ClassSourceLookup lookup) {
- this.lookup = lookup;
- }
-
- @Override
- public void visit(ThreadNode node) {
- for (StackTraceNode child : node.getChildren()) {
- visitStackNode(child);
- }
- }
-
- @Override
- public boolean hasMappings() {
- return !this.map.isEmpty();
- }
-
- @Override
- public Map<String, String> getMapping() {
- this.map.values().removeIf(Objects::isNull);
- return this.map;
- }
-
- private void visitStackNode(StackTraceNode node) {
- String className = node.getClassName();
- if (!this.map.containsKey(className)) {
- try {
- Class<?> clazz = this.classFinder.findClass(className);
- Objects.requireNonNull(clazz);
- this.map.put(className, this.lookup.identify(clazz));
- } catch (Throwable e) {
- this.map.put(className, null);
- }
- }
-
- // recursively
- for (StackTraceNode child : node.getChildren()) {
- visitStackNode(child);
- }
- }
- }
-
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
index 9295c25..c8100e1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
@@ -20,10 +20,6 @@
package me.lucko.spark.common.util;
-import org.tukaani.xz.LZMA2Options;
-import org.tukaani.xz.LZMAOutputStream;
-import org.tukaani.xz.XZOutputStream;
-
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -46,35 +42,35 @@ public enum Compression {
}
return compressedFile;
}
- },
- XZ {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- LZMA {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
};
+ // XZ {
+ // @Override
+ // public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
+ // try (InputStream in = Files.newInputStream(file)) {
+ // try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ // try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
+ // copy(in, compressionOut, progressHandler);
+ // }
+ // }
+ // }
+ // return compressedFile;
+ // }
+ // },
+ // LZMA {
+ // @Override
+ // public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
+ // try (InputStream in = Files.newInputStream(file)) {
+ // try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ // try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
+ // copy(in, compressionOut, progressHandler);
+ // }
+ // }
+ // }
+ // return compressedFile;
+ // }
+ // };
public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
index 7588645..32f3bc6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
@@ -20,32 +20,58 @@
package me.lucko.spark.common.util;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
import com.google.gson.JsonPrimitive;
import java.io.BufferedReader;
+import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
public final class Configuration {
- private static final JsonParser PARSER = new JsonParser();
+ private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create();
- private final JsonObject root;
+ private final Path file;
+ private JsonObject root;
public Configuration(Path file) {
+ this.file = file;
+ load();
+ }
+
+ public void load() {
JsonObject root = null;
- if (Files.exists(file)) {
- try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) {
- root = PARSER.parse(reader).getAsJsonObject();
+ if (Files.exists(this.file)) {
+ try (BufferedReader reader = Files.newBufferedReader(this.file, StandardCharsets.UTF_8)) {
+ root = GSON.fromJson(reader, JsonObject.class);
} catch (IOException e) {
e.printStackTrace();
}
}
- this.root = root != null ? root : new JsonObject();
+ if (root == null) {
+ root = new JsonObject();
+ root.addProperty("_header", "spark configuration file - https://spark.lucko.me/docs/Configuration");
+ }
+ this.root = root;
+ }
+
+ public void save() {
+ try {
+ Files.createDirectories(this.file.getParent());
+ } catch (IOException e) {
+ // ignore
+ }
+
+ try (BufferedWriter writer = Files.newBufferedWriter(this.file, StandardCharsets.UTF_8)) {
+ GSON.toJson(this.root, writer);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
public String getString(String path, String def) {
@@ -67,4 +93,34 @@ public final class Configuration {
return val.isBoolean() ? val.getAsBoolean() : def;
}
+ public int getInteger(String path, int def) {
+ JsonElement el = this.root.get(path);
+ if (el == null || !el.isJsonPrimitive()) {
+ return def;
+ }
+
+ JsonPrimitive val = el.getAsJsonPrimitive();
+ return val.isBoolean() ? val.getAsInt() : def;
+ }
+
+ public void setString(String path, String value) {
+ this.root.add(path, new JsonPrimitive(value));
+ }
+
+ public void setBoolean(String path, boolean value) {
+ this.root.add(path, new JsonPrimitive(value));
+ }
+
+ public void setInteger(String path, int value) {
+ this.root.add(path, new JsonPrimitive(value));
+ }
+
+ public boolean contains(String path) {
+ return this.root.has(path);
+ }
+
+ public void remove(String path) {
+ this.root.remove(path);
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
index c4a3d66..1ee3b0f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
@@ -62,4 +62,24 @@ public enum FormatUtil {
.append(Component.text(unit))
.build();
}
+
+ public static String formatSeconds(long seconds) {
+ if (seconds <= 0) {
+ return "0s";
+ }
+
+ long second = seconds;
+ long minute = second / 60;
+ second = second % 60;
+
+ StringBuilder sb = new StringBuilder();
+ if (minute != 0) {
+ sb.append(minute).append("m ");
+ }
+ if (second != 0) {
+ sb.append(second).append("s ");
+ }
+
+ return sb.toString().trim();
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java
index 8ece3d4..b2315f9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java
@@ -20,27 +20,24 @@
package me.lucko.spark.common.util;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.Response;
+import java.util.ArrayList;
+import java.util.List;
-import java.io.IOException;
-
-public class AbstractHttpClient {
-
- /** The http client */
- protected final OkHttpClient okHttp;
+/**
+ * List builder that returns the index of the inserted element.
+ *
+ * @param <T> generic type
+ */
+public class IndexedListBuilder<T> {
+ private int i = 0;
+ private final List<T> nodes = new ArrayList<>();
- public AbstractHttpClient(OkHttpClient okHttp) {
- this.okHttp = okHttp;
+ public int add(T node) {
+ this.nodes.add(node);
+ return this.i++;
}
- protected Response makeHttpRequest(Request request) throws IOException {
- Response response = this.okHttp.newCall(request).execute();
- if (!response.isSuccessful()) {
- response.close();
- throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
- }
- return response;
+ public List<T> build() {
+ return this.nodes;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
new file mode 100644
index 0000000..be5bbc2
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
@@ -0,0 +1,191 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
+
+import java.util.Locale;
+import java.util.function.BiFunction;
+
+public enum SparkPlaceholder {
+
+ TPS((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "5s":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
+ case "10s":
+ return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
+ case "1m":
+ return StatisticFormatter.formatTps(tickStatistics.tps1Min());
+ case "5m":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Min());
+ case "15m":
+ return StatisticFormatter.formatTps(tickStatistics.tps15Min());
+ }
+
+ return null;
+ }),
+
+ TICKDURATION((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
+ case "1m":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
+ }
+
+ return null;
+ }),
+
+ CPU_SYSTEM((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
+ }
+
+ return null;
+ }),
+
+ CPU_PROCESS((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
+ }
+
+ return null;
+ });
+
+ private final String name;
+ private final BiFunction<SparkPlatform, String, TextComponent> function;
+
+ SparkPlaceholder(BiFunction<SparkPlatform, String, TextComponent> function) {
+ this.name = name().toLowerCase(Locale.ROOT);
+ this.function = function;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public TextComponent resolve(SparkPlatform platform, String arg) {
+ return this.function.apply(platform, arg);
+ }
+
+ public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) {
+ String[] parts = placeholder.split("_");
+
+ if (parts.length == 0) {
+ return null;
+ }
+
+ String label = parts[0];
+
+ if (label.equals("tps")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TPS.resolve(platform, arg);
+ }
+
+ if (label.equals("tickduration")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TICKDURATION.resolve(platform, arg);
+ }
+
+ if (label.equals("cpu") && parts.length >= 2) {
+ String type = parts[1];
+ String arg = parts.length < 3 ? null : parts[2];
+
+ if (type.equals("system")) {
+ return CPU_SYSTEM.resolve(platform, arg);
+ }
+ if (type.equals("process")) {
+ return CPU_PROCESS.resolve(platform, arg);
+ }
+ }
+
+ return null;
+ }
+
+ public static String resolveFormattingCode(SparkPlatform platform, String placeholder) {
+ TextComponent result = resolveComponent(platform, placeholder);
+ if (result == null) {
+ return null;
+ }
+ return LegacyComponentSerializer.legacySection().serialize(result);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java
index 8a4a621..91a474c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java
@@ -20,10 +20,18 @@
package me.lucko.spark.common.util;
+import com.google.common.collect.ImmutableList;
+
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.attribute.FileAttribute;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
import java.util.Collections;
+import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
@@ -32,23 +40,47 @@ import java.util.Set;
* Utility for handling temporary files.
*/
public final class TemporaryFiles {
- private TemporaryFiles() {}
- private static final Set<Path> DELETE_SET = Collections.synchronizedSet(new HashSet<>());
+ public static final FileAttribute<?>[] OWNER_ONLY_FILE_PERMISSIONS;
+
+ static {
+ boolean isPosix = FileSystems.getDefault().supportedFileAttributeViews().contains("posix");
+ if (isPosix) {
+ OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[]{PosixFilePermissions.asFileAttribute(EnumSet.of(
+ PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE
+ ))};
+ } else {
+ OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[0];
+ }
+ }
+
+ private final Path tmpDirectory;
+ private final Set<Path> files = Collections.synchronizedSet(new HashSet<>());
- public static Path create(String prefix, String suffix) throws IOException {
- return register(Files.createTempFile(prefix, suffix));
+ public TemporaryFiles(Path tmpDirectory) {
+ this.tmpDirectory = tmpDirectory;
}
- public static Path register(Path path) {
+ public Path create(String prefix, String suffix) throws IOException {
+ Path file;
+ if (ensureDirectoryIsReady()) {
+ String name = prefix + Long.toHexString(System.nanoTime()) + suffix;
+ file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS);
+ } else {
+ file = Files.createTempFile(prefix, suffix);
+ }
+ return register(file);
+ }
+
+ public Path register(Path path) {
path.toFile().deleteOnExit();
- DELETE_SET.add(path);
+ this.files.add(path);
return path;
}
- public static void deleteTemporaryFiles() {
- synchronized (DELETE_SET) {
- for (Iterator<Path> iterator = DELETE_SET.iterator(); iterator.hasNext(); ) {
+ public void deleteTemporaryFiles() {
+ synchronized (this.files) {
+ for (Iterator<Path> iterator = this.files.iterator(); iterator.hasNext(); ) {
Path path = iterator.next();
try {
Files.deleteIfExists(path);
@@ -60,4 +92,35 @@ public final class TemporaryFiles {
}
}
+ private boolean ensureDirectoryIsReady() {
+ if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) {
+ return false;
+ }
+
+ if (Files.isDirectory(this.tmpDirectory)) {
+ return true;
+ }
+
+ try {
+ Files.createDirectories(this.tmpDirectory);
+
+ Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of(
+ "# What is this directory?",
+ "",
+ "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ",
+ "* Previously, a temporary directory provided by the operating system was used for this purpose. ",
+ "* However, this proved to be unreliable in some circumstances, so spark now stores temporary data here instead!",
+ "",
+ "spark will automatically cleanup the contents of this directory. " ,
+ "(but if for some reason it doesn't, if the server is stopped, you can freely delete any files ending in .tmp)",
+ "",
+ "tl;dr: spark uses this folder to store some temporary data."
+ ), StandardCharsets.UTF_8);
+
+ return true;
+ } catch (IOException e) {
+ return false;
+ }
+ }
+
}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index ec0aa88..f61e585 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -94,7 +94,8 @@ message PlatformStatistics {
Tps tps = 4; // optional
Mspt mspt = 5; // optional
Ping ping = 6; // optional
- int64 player_count = 7;
+ int64 player_count = 7; // optional
+ WorldStatistics world = 8; // optional
message Memory {
MemoryPool heap = 1;
@@ -127,6 +128,45 @@ message PlatformStatistics {
}
}
+message WorldStatistics {
+ int32 total_entities = 1;
+ map<string, int32> entity_counts = 2;
+ repeated World worlds = 3;
+
+ message World {
+ string name = 1;
+ int32 total_entities = 2;
+ repeated Region regions = 3;
+ }
+
+ message Region {
+ int32 total_entities = 1;
+ repeated Chunk chunks = 2;
+ }
+
+ message Chunk {
+ int32 x = 1;
+ int32 z = 2;
+ int32 total_entities = 3;
+ map<string, int32> entity_counts = 4;
+ }
+}
+
+message WindowStatistics {
+ int32 ticks = 1;
+ double cpu_process = 2;
+ double cpu_system = 3;
+ double tps = 4;
+ double mspt_median = 5;
+ double mspt_max = 6;
+
+ // world
+ int32 players = 7;
+ int32 entities = 8;
+ int32 tile_entities = 9;
+ int32 chunks = 10;
+}
+
message RollingAverageValues {
double mean = 1;
double max = 2;
diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto
index 51bdd64..245da37 100644
--- a/spark-common/src/main/proto/spark/spark_sampler.proto
+++ b/spark-common/src/main/proto/spark/spark_sampler.proto
@@ -11,6 +11,10 @@ message SamplerData {
SamplerMetadata metadata = 1;
repeated ThreadNode threads = 2;
map<string, string> class_sources = 3; // optional
+ map<string, string> method_sources = 4; // optional
+ map<string, string> line_sources = 5; // optional
+ repeated int32 time_windows = 6;
+ map<int32, WindowStatistics> time_window_statistics = 7;
}
message SamplerMetadata {
@@ -25,6 +29,9 @@ message SamplerMetadata {
SystemStatistics system_statistics = 9;
map<string, string> server_configurations = 10;
int64 end_time = 11;
+ int32 number_of_ticks = 12;
+ map<string, SourceMetadata> sources = 13;
+ map<string, string> extra_platform_metadata = 14;
message ThreadDumper {
Type type = 1;
@@ -42,6 +49,7 @@ message SamplerMetadata {
Type type = 1;
ThreadGrouper thread_grouper = 2;
int64 tick_length_threshold = 3; // optional
+ int32 number_of_included_ticks = 4; // optional
enum Type {
SIMPLE = 0;
@@ -54,20 +62,35 @@ message SamplerMetadata {
AS_ONE = 2;
}
}
+
+ message SourceMetadata {
+ string name = 1;
+ string version = 2;
+ }
}
message ThreadNode {
string name = 1;
- double time = 2;
+
+ // replaced
+ reserved 2;
+ reserved "time";
+
repeated StackTraceNode children = 3;
+ repeated double times = 4;
+ repeated int32 children_refs = 5;
}
message StackTraceNode {
- double time = 1;
- repeated StackTraceNode children = 2;
+ // replaced
+ reserved 1, 2;
+ reserved "time", "children";
+
string class_name = 3;
string method_name = 4;
int32 parent_line_number = 5; // optional
int32 line_number = 6; // optional
string method_desc = 7; // optional
+ repeated double times = 8;
+ repeated int32 children_refs = 9;
}
diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
index 35f83b2..800cf91 100755
--- a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so
new file mode 100755
index 0000000..3c81d1c
--- /dev/null
+++ b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
index edbf103..5af5071 100755
--- a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
index ab818e9..4930c67 100755
--- a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
Binary files differ
diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle
index 9da8e01..8b6d030 100644
--- a/spark-fabric/build.gradle
+++ b/spark-fabric/build.gradle
@@ -28,9 +28,9 @@ configurations {
dependencies {
// https://modmuss50.me/fabric.html
- minecraft 'com.mojang:minecraft:1.19'
- mappings 'net.fabricmc:yarn:1.19+build.1:v2'
- modImplementation 'net.fabricmc:fabric-loader:0.14.7'
+ minecraft 'com.mojang:minecraft:1.19.3'
+ mappings 'net.fabricmc:yarn:1.19.3+build.2:v2'
+ modImplementation 'net.fabricmc:fabric-loader:0.14.11'
Set<String> apiModules = [
"fabric-api-base",
@@ -40,12 +40,12 @@ dependencies {
// Add each module as a dependency
apiModules.forEach {
- modImplementation(fabricApi.module(it, '0.55.3+1.19'))
+ modImplementation(fabricApi.module(it, '0.68.1+1.19.3'))
}
include(modImplementation('me.lucko:fabric-permissions-api:0.1-SNAPSHOT'))
- modImplementation('eu.pb4:placeholder-api:1.1.1+1.17.1')
+ modImplementation('eu.pb4:placeholder-api:2.0.0-beta.4+1.19')
shade project(':spark-common')
}
@@ -66,23 +66,28 @@ processResources {
}
}
+license {
+ exclude '**/smap/SourceMap.java'
+}
+
shadowJar {
archiveFileName = "spark-fabric-${project.pluginVersion}-dev.jar"
configurations = [project.configurations.shade]
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
- relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+// relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+
+ dependencies {
+ exclude(dependency('org.ow2.asm::'))
+ }
}
task remappedShadowJar(type: RemapJarTask) {
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java
index 7030680..51834fc 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java
@@ -22,18 +22,35 @@ package me.lucko.spark.fabric;
import com.google.common.collect.ImmutableMap;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.util.ClassFinder;
+import me.lucko.spark.fabric.smap.MixinUtils;
+import me.lucko.spark.fabric.smap.SourceMap;
+import me.lucko.spark.fabric.smap.SourceMapProvider;
import net.fabricmc.loader.api.FabricLoader;
import net.fabricmc.loader.api.ModContainer;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.objectweb.asm.Type;
+import org.spongepowered.asm.mixin.FabricUtil;
+import org.spongepowered.asm.mixin.extensibility.IMixinConfig;
+import org.spongepowered.asm.mixin.transformer.Config;
+import org.spongepowered.asm.mixin.transformer.meta.MixinMerged;
+
+import java.lang.reflect.Method;
+import java.net.URI;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map;
public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource {
+
+ private final ClassFinder classFinder = new ClassFinder();
+ private final SourceMapProvider smapProvider = new SourceMapProvider();
+
private final Path modsDirectory;
- private final Map<Path, String> pathToModMap;
+ private final Map<String, String> pathToModMap;
public FabricClassSourceLookup() {
FabricLoader loader = FabricLoader.getInstance();
@@ -43,7 +60,7 @@ public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource {
@Override
public String identifyFile(Path path) {
- String id = this.pathToModMap.get(path);
+ String id = this.pathToModMap.get(path.toAbsolutePath().normalize().toString());
if (id != null) {
return id;
}
@@ -55,11 +72,141 @@ public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource {
return super.identifyFileName(this.modsDirectory.relativize(path).toString());
}
- private static Map<Path, String> constructPathToModIdMap(Collection<ModContainer> mods) {
- ImmutableMap.Builder<Path, String> builder = ImmutableMap.builder();
+ @Override
+ public @Nullable String identify(MethodCall methodCall) throws Exception {
+ String className = methodCall.getClassName();
+ String methodName = methodCall.getMethodName();
+ String methodDesc = methodCall.getMethodDescriptor();
+
+ if (className.equals("native") || methodName.equals("<init>") || methodName.equals("<clinit>")) {
+ return null;
+ }
+
+ Class<?> clazz = this.classFinder.findClass(className);
+ if (clazz == null) {
+ return null;
+ }
+
+ Class<?>[] params = getParameterTypesForMethodDesc(methodDesc);
+ Method reflectMethod = clazz.getDeclaredMethod(methodName, params);
+
+ MixinMerged mixinMarker = reflectMethod.getDeclaredAnnotation(MixinMerged.class);
+ if (mixinMarker == null) {
+ return null;
+ }
+
+ return modIdFromMixinClass(mixinMarker.mixin());
+ }
+
+ @Override
+ public @Nullable String identify(MethodCallByLine methodCall) throws Exception {
+ String className = methodCall.getClassName();
+ String methodName = methodCall.getMethodName();
+ int lineNumber = methodCall.getLineNumber();
+
+ if (className.equals("native") || methodName.equals("<init>") || methodName.equals("<clinit>")) {
+ return null;
+ }
+
+ SourceMap smap = this.smapProvider.getSourceMap(className);
+ if (smap == null) {
+ return null;
+ }
+
+ int[] inputLineInfo = smap.getReverseLineMapping().get(lineNumber);
+ if (inputLineInfo == null || inputLineInfo.length == 0) {
+ return null;
+ }
+
+ for (int fileInfoIds : inputLineInfo) {
+ SourceMap.FileInfo inputFileInfo = smap.getFileInfo().get(fileInfoIds);
+ if (inputFileInfo == null) {
+ continue;
+ }
+
+ String path = inputFileInfo.path();
+ if (path.endsWith(".java")) {
+ path = path.substring(0, path.length() - 5);
+ }
+
+ String possibleMixinClassName = path.replace('/', '.');
+ if (possibleMixinClassName.equals(className)) {
+ continue;
+ }
+
+ return modIdFromMixinClass(possibleMixinClassName);
+ }
+
+ return null;
+ }
+
+ private static String modIdFromMixinClass(String mixinClassName) {
+ for (Config config : MixinUtils.getMixinConfigs().values()) {
+ IMixinConfig mixinConfig = config.getConfig();
+ if (mixinClassName.startsWith(mixinConfig.getMixinPackage())) {
+ return mixinConfig.getDecoration(FabricUtil.KEY_MOD_ID);
+ }
+ }
+ return null;
+ }
+
+ private Class<?>[] getParameterTypesForMethodDesc(String methodDesc) {
+ Type methodType = Type.getMethodType(methodDesc);
+ Class<?>[] params = new Class[methodType.getArgumentTypes().length];
+ Type[] argumentTypes = methodType.getArgumentTypes();
+
+ for (int i = 0, argumentTypesLength = argumentTypes.length; i < argumentTypesLength; i++) {
+ Type argumentType = argumentTypes[i];
+ params[i] = getClassFromType(argumentType);
+ }
+
+ return params;
+ }
+
+ private Class<?> getClassFromType(Type type) {
+ return switch (type.getSort()) {
+ case Type.VOID -> void.class;
+ case Type.BOOLEAN -> boolean.class;
+ case Type.CHAR -> char.class;
+ case Type.BYTE -> byte.class;
+ case Type.SHORT -> short.class;
+ case Type.INT -> int.class;
+ case Type.FLOAT -> float.class;
+ case Type.LONG -> long.class;
+ case Type.DOUBLE -> double.class;
+ case Type.ARRAY -> {
+ final Class<?> classFromType = getClassFromType(type.getElementType());
+ Class<?> result = classFromType;
+ if (classFromType != null) {
+ for (int i = 0; i < type.getDimensions(); i++) {
+ result = result.arrayType();
+ }
+ }
+ yield result;
+ }
+ case Type.OBJECT -> this.classFinder.findClass(type.getClassName());
+ default -> null;
+ };
+ }
+
+ private static Map<String, String> constructPathToModIdMap(Collection<ModContainer> mods) {
+ ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
for (ModContainer mod : mods) {
- Path path = mod.getRootPath().toAbsolutePath().normalize();
- builder.put(path, mod.getMetadata().getId());
+ String modId = mod.getMetadata().getId();
+ if (modId.equals("java")) {
+ continue;
+ }
+
+ for (Path path : mod.getRootPaths()) {
+ URI uri = path.toUri();
+ if (uri.getScheme().equals("jar") && path.toString().equals("/")) { // ZipFileSystem
+ String zipFilePath = path.getFileSystem().toString();
+ builder.put(zipFilePath, modId);
+ } else {
+ builder.put(path.toAbsolutePath().normalize().toString(), modId);
+ }
+
+ }
}
return builder.build();
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java
new file mode 100644
index 0000000..22794c2
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java
@@ -0,0 +1,75 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import me.lucko.spark.common.platform.MetadataProvider;
+
+import net.minecraft.resource.ResourcePackManager;
+import net.minecraft.resource.ResourcePackProfile;
+import net.minecraft.resource.ResourcePackSource;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class FabricExtraMetadataProvider implements MetadataProvider {
+
+ private final ResourcePackManager resourcePackManager;
+
+ public FabricExtraMetadataProvider(ResourcePackManager resourcePackManager) {
+ this.resourcePackManager = resourcePackManager;
+ }
+
+ @Override
+ public Map<String, JsonElement> get() {
+ Map<String, JsonElement> metadata = new LinkedHashMap<>();
+ metadata.put("datapacks", datapackMetadata());
+ return metadata;
+ }
+
+ private JsonElement datapackMetadata() {
+ JsonObject datapacks = new JsonObject();
+ for (ResourcePackProfile profile : this.resourcePackManager.getEnabledProfiles()) {
+ JsonObject obj = new JsonObject();
+ obj.addProperty("name", profile.getDisplayName().getString());
+ obj.addProperty("description", profile.getDescription().getString());
+ obj.addProperty("source", resourcePackSource(profile.getSource()));
+ datapacks.add(profile.getName(), obj);
+ }
+ return datapacks;
+ }
+
+ private static String resourcePackSource(ResourcePackSource source) {
+ if (source == ResourcePackSource.NONE) {
+ return "none";
+ } else if (source == ResourcePackSource.BUILTIN) {
+ return "builtin";
+ } else if (source == ResourcePackSource.WORLD) {
+ return "world";
+ } else if (source == ResourcePackSource.SERVER) {
+ return "server";
+ } else {
+ return "unknown";
+ }
+ }
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java
new file mode 100644
index 0000000..325a324
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+
+import java.util.Collection;
+import java.util.Map;
+
+public class FabricServerConfigProvider extends ServerConfigProvider {
+
+ /** A map of provided files and their type */
+ private static final Map<String, ConfigParser> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public FabricServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ static {
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("level-seed")
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE);
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
new file mode 100644
index 0000000..156db89
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
@@ -0,0 +1,178 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric;
+
+import it.unimi.dsi.fastutil.longs.LongIterator;
+import it.unimi.dsi.fastutil.longs.LongSet;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.fabric.mixin.ClientEntityManagerAccessor;
+import me.lucko.spark.fabric.mixin.ClientWorldAccessor;
+import me.lucko.spark.fabric.mixin.ServerEntityManagerAccessor;
+import me.lucko.spark.fabric.mixin.ServerWorldAccessor;
+
+import net.minecraft.client.MinecraftClient;
+import net.minecraft.client.world.ClientEntityManager;
+import net.minecraft.client.world.ClientWorld;
+import net.minecraft.entity.Entity;
+import net.minecraft.entity.EntityType;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.server.world.ServerWorld;
+import net.minecraft.util.math.ChunkPos;
+import net.minecraft.world.entity.EntityIndex;
+import net.minecraft.world.entity.EntityTrackingSection;
+import net.minecraft.world.entity.SectionedEntityCache;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Stream;
+
+public abstract class FabricWorldInfoProvider implements WorldInfoProvider {
+
+ protected List<FabricChunkInfo> getChunksFromCache(SectionedEntityCache<Entity> cache) {
+ LongSet loadedChunks = cache.getChunkPositions();
+ List<FabricChunkInfo> list = new ArrayList<>(loadedChunks.size());
+
+ for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) {
+ long chunkPos = iterator.nextLong();
+ Stream<EntityTrackingSection<Entity>> sections = cache.getTrackingSections(chunkPos);
+
+ list.add(new FabricChunkInfo(chunkPos, sections));
+ }
+
+ return list;
+ }
+
+ public static final class Server extends FabricWorldInfoProvider {
+ private final MinecraftServer server;
+
+ public Server(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getCurrentPlayerCount();
+ int entities = 0;
+ int chunks = 0;
+
+ for (ServerWorld world : this.server.getWorlds()) {
+ ServerEntityManager<Entity> entityManager = ((ServerWorldAccessor) world).getEntityManager();
+ EntityIndex<?> entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex();
+
+ entities += entityIndex.size();
+ chunks += world.getChunkManager().getLoadedChunkCount();
+ }
+
+ return new CountsResult(players, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<FabricChunkInfo> pollChunks() {
+ ChunksResult<FabricChunkInfo> data = new ChunksResult<>();
+
+ for (ServerWorld world : this.server.getWorlds()) {
+ ServerEntityManager<Entity> entityManager = ((ServerWorldAccessor) world).getEntityManager();
+ SectionedEntityCache<Entity> cache = ((ServerEntityManagerAccessor) entityManager).getCache();
+
+ List<FabricChunkInfo> list = getChunksFromCache(cache);
+ data.put(world.getRegistryKey().getValue().getPath(), list);
+ }
+
+ return data;
+ }
+ }
+
+ public static final class Client extends FabricWorldInfoProvider {
+ private final MinecraftClient client;
+
+ public Client(MinecraftClient client) {
+ this.client = client;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ ClientWorld world = this.client.world;
+ if (world == null) {
+ return null;
+ }
+
+ ClientEntityManager<Entity> entityManager = ((ClientWorldAccessor) world).getEntityManager();
+ EntityIndex<?> entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex();
+
+ int entities = entityIndex.size();
+ int chunks = world.getChunkManager().getLoadedChunkCount();
+
+ return new CountsResult(-1, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<FabricChunkInfo> pollChunks() {
+ ChunksResult<FabricChunkInfo> data = new ChunksResult<>();
+
+ ClientWorld world = this.client.world;
+ if (world == null) {
+ return null;
+ }
+
+ ClientEntityManager<Entity> entityManager = ((ClientWorldAccessor) world).getEntityManager();
+ SectionedEntityCache<Entity> cache = ((ClientEntityManagerAccessor) entityManager).getCache();
+
+ List<FabricChunkInfo> list = getChunksFromCache(cache);
+ data.put(world.getRegistryKey().getValue().getPath(), list);
+
+ return data;
+ }
+ }
+
+ static final class FabricChunkInfo extends AbstractChunkInfo<EntityType<?>> {
+ private final CountMap<EntityType<?>> entityCounts;
+
+ FabricChunkInfo(long chunkPos, Stream<EntityTrackingSection<Entity>> entities) {
+ super(ChunkPos.getPackedX(chunkPos), ChunkPos.getPackedZ(chunkPos));
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ entities.forEach(section -> {
+ if (section.getStatus().shouldTrack()) {
+ section.stream().forEach(entity ->
+ this.entityCounts.increment(entity.getType())
+ );
+ }
+ });
+ }
+
+ @Override
+ public CountMap<EntityType<?>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType<?> type) {
+ return EntityType.getId(type).toString();
+ }
+ }
+
+}
+
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
new file mode 100644
index 0000000..994c9a3
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
@@ -0,0 +1,40 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.client.world.ClientEntityManager;
+import net.minecraft.entity.Entity;
+import net.minecraft.world.entity.EntityIndex;
+import net.minecraft.world.entity.SectionedEntityCache;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ClientEntityManager.class)
+public interface ClientEntityManagerAccessor {
+
+ @Accessor
+ SectionedEntityCache<Entity> getCache();
+
+ @Accessor
+ EntityIndex<?> getIndex();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java
new file mode 100644
index 0000000..01562ef
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.client.world.ClientEntityManager;
+import net.minecraft.client.world.ClientWorld;
+import net.minecraft.entity.Entity;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ClientWorld.class)
+public interface ClientWorldAccessor {
+
+ @Accessor
+ ClientEntityManager<Entity> getEntityManager();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java
new file mode 100644
index 0000000..7a4fb78
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java
@@ -0,0 +1,34 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.client.MinecraftClient;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(MinecraftClient.class)
+public interface MinecraftClientAccessor {
+
+ @Accessor
+ Thread getThread();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
new file mode 100644
index 0000000..2c67502
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
@@ -0,0 +1,40 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.entity.Entity;
+import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.world.entity.EntityIndex;
+import net.minecraft.world.entity.SectionedEntityCache;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ServerEntityManager.class)
+public interface ServerEntityManagerAccessor {
+
+ @Accessor
+ SectionedEntityCache<Entity> getCache();
+
+ @Accessor
+ EntityIndex<?> getIndex();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java
new file mode 100644
index 0000000..cf2e7e8
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.entity.Entity;
+import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.server.world.ServerWorld;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ServerWorld.class)
+public interface ServerWorldAccessor {
+
+ @Accessor
+ ServerEntityManager<Entity> getEntityManager();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
index dc2e7d9..69303e3 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
@@ -20,169 +20,48 @@
package me.lucko.spark.fabric.placeholder;
-import eu.pb4.placeholders.PlaceholderAPI;
-import eu.pb4.placeholders.PlaceholderResult;
+import eu.pb4.placeholders.api.PlaceholderContext;
+import eu.pb4.placeholders.api.PlaceholderHandler;
+import eu.pb4.placeholders.api.PlaceholderResult;
+import eu.pb4.placeholders.api.Placeholders;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.monitor.cpu.CpuMonitor;
-import me.lucko.spark.common.monitor.tick.TickStatistics;
-import me.lucko.spark.common.util.RollingAverage;
-import me.lucko.spark.common.util.StatisticFormatter;
+import me.lucko.spark.common.util.SparkPlaceholder;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
import net.minecraft.text.Text;
import net.minecraft.util.Identifier;
-public class SparkFabricPlaceholderApi {
- private final SparkPlatform platform;
+import org.jetbrains.annotations.Nullable;
- public SparkFabricPlaceholderApi(SparkPlatform platform) {
- this.platform = platform;
+public enum SparkFabricPlaceholderApi {
+ ;
- PlaceholderAPI.register(
- new Identifier("spark", "tps"),
- context -> {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null) {
- return PlaceholderResult.invalid();
- }
-
- if (context.hasArgument()) {
- Double tps = switch (context.getArgument()) {
- case "5s":
- yield tickStatistics.tps5Sec();
- case "10s":
- yield tickStatistics.tps10Sec();
- case "1m":
- yield tickStatistics.tps1Min();
- case "5m":
- yield tickStatistics.tps5Min();
- case "15m":
- yield tickStatistics.tps15Min();
- default:
- yield null;
- };
-
- if (tps == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatTps(tps)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
- .build()
- ));
- }
- }
- );
-
- PlaceholderAPI.register(
- new Identifier("spark", "tickduration"),
- context -> {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
- return PlaceholderResult.invalid();
- }
-
- if (context.hasArgument()) {
- RollingAverage duration = switch (context.getArgument()) {
- case "10s":
- yield tickStatistics.duration10Sec();
- case "1m":
- yield tickStatistics.duration1Min();
- default:
- yield null;
- };
-
- if (duration == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatTickDurations(duration)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
- .build()
- ));
- }
- }
- );
-
- PlaceholderAPI.register(
- new Identifier("spark", "cpu_system"),
- context -> {
- if (context.hasArgument()) {
- Double usage = switch (context.getArgument()) {
- case "10s":
- yield CpuMonitor.systemLoad10SecAvg();
- case "1m":
- yield CpuMonitor.systemLoad1MinAvg();
- case "15m":
- yield CpuMonitor.systemLoad15MinAvg();
- default:
- yield null;
- };
-
- if (usage == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
- .build()
- ));
- }
- }
- );
-
- PlaceholderAPI.register(
- new Identifier("spark", "cpu_process"),
- context -> {
- if (context.hasArgument()) {
- Double usage = switch (context.getArgument()) {
- case "10s":
- yield CpuMonitor.processLoad10SecAvg();
- case "1m":
- yield CpuMonitor.processLoad1MinAvg();
- case "15m":
- yield CpuMonitor.processLoad15MinAvg();
- default:
- yield null;
- };
-
- if (usage == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
- .build()
- ));
- }
- }
- );
+ public static void register(SparkPlatform platform) {
+ for (SparkPlaceholder placeholder : SparkPlaceholder.values()) {
+ Placeholders.register(
+ new Identifier("spark", placeholder.getName()),
+ new Handler(platform, placeholder)
+ );
+ }
}
- private Text toText(Component component) {
- return Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(component));
+ private record Handler(SparkPlatform platform, SparkPlaceholder placeholder) implements PlaceholderHandler {
+ @Override
+ public PlaceholderResult onPlaceholderRequest(PlaceholderContext context, @Nullable String argument) {
+ return toResult(this.placeholder.resolve(this.platform, argument));
+ }
+
+ private static PlaceholderResult toResult(Component component) {
+ return component == null
+ ? PlaceholderResult.invalid()
+ : PlaceholderResult.value(toText(component));
+ }
+
+ private static Text toText(Component component) {
+ return Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(component));
+ }
}
+
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
index e94d697..faf4eef 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
@@ -28,14 +28,20 @@ import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
+import me.lucko.spark.fabric.FabricExtraMetadataProvider;
import me.lucko.spark.fabric.FabricPlatformInfo;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
+import me.lucko.spark.fabric.FabricWorldInfoProvider;
+import me.lucko.spark.fabric.mixin.MinecraftClientAccessor;
import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback;
import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource;
@@ -55,10 +61,12 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman
}
private final MinecraftClient minecraft;
+ private final ThreadDumper.GameThread gameThreadDumper;
public FabricClientSparkPlugin(FabricSparkMod mod, MinecraftClient minecraft) {
super(mod);
this.minecraft = minecraft;
+ this.gameThreadDumper = new ThreadDumper.GameThread(() -> ((MinecraftClientAccessor) minecraft).getThread());
}
@Override
@@ -87,7 +95,6 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman
return 0;
}
- this.threadDumper.ensureSetup();
this.platform.executeCommand(new FabricCommandSender(context.getSource().getEntity(), this), args);
return Command.SINGLE_SUCCESS;
}
@@ -113,6 +120,16 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public void executeSync(Runnable task) {
+ this.minecraft.executeSync(task);
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper.get();
+ }
+
+ @Override
public TickHook createTickHook() {
return new FabricTickHook.Client();
}
@@ -123,6 +140,16 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public MetadataProvider createExtraMetadataProvider() {
+ return new FabricExtraMetadataProvider(this.minecraft.getResourcePackManager());
+ }
+
+ @Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new FabricWorldInfoProvider.Client(this.minecraft);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new FabricPlatformInfo(PlatformInfo.Type.CLIENT);
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
index 428ac4c..1606d57 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
@@ -30,15 +30,22 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.fabric.api.permissions.v0.Permissions;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
+import me.lucko.spark.fabric.FabricExtraMetadataProvider;
import me.lucko.spark.fabric.FabricPlatformInfo;
import me.lucko.spark.fabric.FabricPlayerPingProvider;
+import me.lucko.spark.fabric.FabricServerConfigProvider;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
+import me.lucko.spark.fabric.FabricWorldInfoProvider;
import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi;
import net.fabricmc.loader.api.FabricLoader;
@@ -59,10 +66,12 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
private final MinecraftServer server;
+ private final ThreadDumper gameThreadDumper;
public FabricServerSparkPlugin(FabricSparkMod mod, MinecraftServer server) {
super(mod);
this.server = server;
+ this.gameThreadDumper = new ThreadDumper.Specific(server.getThread());
}
@Override
@@ -74,7 +83,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
// placeholders
if (FabricLoader.getInstance().isModLoaded("placeholder-api")) {
- new SparkFabricPlaceholderApi(this.platform);
+ try {
+ SparkFabricPlaceholderApi.register(this.platform);
+ } catch (LinkageError e) {
+ // ignore
+ }
}
}
@@ -89,7 +102,6 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
return 0;
}
- this.threadDumper.ensureSetup();
CommandOutput source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer();
this.platform.executeCommand(new FabricCommandSender(source, this), args);
return Command.SINGLE_SUCCESS;
@@ -107,8 +119,15 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
@Override
public boolean hasPermission(CommandOutput sender, String permission) {
- if (sender instanceof PlayerEntity) {
- return Permissions.check(((PlayerEntity) sender), permission, 4);
+ if (sender instanceof PlayerEntity player) {
+ return Permissions.getPermissionValue(player, permission).orElseGet(() -> {
+ MinecraftServer server = player.getServer();
+ if (server != null && server.isHost(player.getGameProfile())) {
+ return true;
+ }
+
+ return player.hasPermissionLevel(4);
+ });
} else {
return true;
}
@@ -123,6 +142,16 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public void executeSync(Runnable task) {
+ this.server.executeSync(task);
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper;
+ }
+
+ @Override
public TickHook createTickHook() {
return new FabricTickHook.Server();
}
@@ -138,6 +167,21 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new FabricServerConfigProvider();
+ }
+
+ @Override
+ public MetadataProvider createExtraMetadataProvider() {
+ return new FabricExtraMetadataProvider(this.server.getDataPackManager());
+ }
+
+ @Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new FabricWorldInfoProvider.Server(this.server);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new FabricPlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java
new file mode 100644
index 0000000..cfc8c95
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java
@@ -0,0 +1,71 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.plugin;
+
+import me.lucko.spark.fabric.smap.SourceDebugCache;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.objectweb.asm.tree.ClassNode;
+import org.spongepowered.asm.mixin.MixinEnvironment;
+import org.spongepowered.asm.mixin.extensibility.IMixinConfigPlugin;
+import org.spongepowered.asm.mixin.extensibility.IMixinInfo;
+import org.spongepowered.asm.mixin.transformer.IMixinTransformer;
+import org.spongepowered.asm.mixin.transformer.ext.Extensions;
+import org.spongepowered.asm.mixin.transformer.ext.IExtension;
+import org.spongepowered.asm.mixin.transformer.ext.ITargetClassContext;
+
+import java.util.List;
+import java.util.Set;
+
+public class FabricSparkMixinPlugin implements IMixinConfigPlugin, IExtension {
+
+ private static final Logger LOGGER = LogManager.getLogger("spark");
+
+ @Override
+ public void onLoad(String mixinPackage) {
+ Object activeTransformer = MixinEnvironment.getCurrentEnvironment().getActiveTransformer();
+ if (activeTransformer instanceof IMixinTransformer transformer && transformer.getExtensions() instanceof Extensions extensions) {
+ extensions.add(this);
+ } else {
+ LOGGER.error(
+ "Failed to initialize SMAP parser for spark profiler. " +
+ "Mod information for mixin injected methods is now only available with the async-profiler engine."
+ );
+ }
+ }
+
+ @Override
+ public void export(MixinEnvironment env, String name, boolean force, ClassNode classNode) {
+ SourceDebugCache.put(name, classNode);
+ }
+
+ // noop
+ @Override public String getRefMapperConfig() { return null; }
+ @Override public boolean shouldApplyMixin(String targetClassName, String mixinClassName) { return true; }
+ @Override public void acceptTargets(Set<String> myTargets, Set<String> otherTargets) { }
+ @Override public List<String> getMixins() { return null; }
+ @Override public void preApply(String targetClassName, ClassNode targetClass, String mixinClassName, IMixinInfo mixinInfo) { }
+ @Override public void postApply(String targetClassName, ClassNode targetClass, String mixinClassName, IMixinInfo mixinInfo) { }
+ @Override public boolean checkActive(MixinEnvironment environment) { return true; }
+ @Override public void preApply(ITargetClassContext context) { }
+ @Override public void postApply(ITargetClassContext context) { }
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
index b1392d4..9a03b4e 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
@@ -34,12 +34,14 @@ import com.mojang.brigadier.tree.LiteralCommandNode;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.util.SparkThreadFactory;
import me.lucko.spark.fabric.FabricClassSourceLookup;
import me.lucko.spark.fabric.FabricSparkMod;
+import net.fabricmc.loader.api.FabricLoader;
+import net.fabricmc.loader.api.metadata.Person;
import net.minecraft.server.command.CommandOutput;
import org.apache.logging.log4j.LogManager;
@@ -47,10 +49,12 @@ import org.apache.logging.log4j.Logger;
import java.nio.file.Path;
import java.util.Arrays;
+import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.logging.Level;
+import java.util.stream.Collectors;
public abstract class FabricSparkPlugin implements SparkPlugin {
@@ -59,7 +63,6 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
protected final ScheduledExecutorService scheduler;
protected SparkPlatform platform;
- protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected FabricSparkPlugin(FabricSparkMod mod) {
this.mod = mod;
@@ -108,13 +111,20 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
}
@Override
- public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
+ public ClassSourceLookup createClassSourceLookup() {
+ return new FabricClassSourceLookup();
}
@Override
- public ClassSourceLookup createClassSourceLookup() {
- return new FabricClassSourceLookup();
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ FabricLoader.getInstance().getAllMods(),
+ mod -> mod.getMetadata().getId(),
+ mod -> mod.getMetadata().getVersion().getFriendlyString(),
+ mod -> mod.getMetadata().getAuthors().stream()
+ .map(Person::getName)
+ .collect(Collectors.joining(", "))
+ );
}
protected CompletableFuture<Suggestions> generateSuggestions(CommandSender sender, String[] args, SuggestionsBuilder builder) {
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java
new file mode 100644
index 0000000..ebf2766
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java
@@ -0,0 +1,52 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.smap;
+
+import org.spongepowered.asm.mixin.transformer.Config;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+public enum MixinUtils {
+ ;
+
+ private static final Map<String, Config> MIXIN_CONFIGS;
+
+ static {
+ Map<String, Config> configs;
+ try {
+ Field allConfigsField = Config.class.getDeclaredField("allConfigs");
+ allConfigsField.setAccessible(true);
+
+ //noinspection unchecked
+ configs = (Map<String, Config>) allConfigsField.get(null);
+ } catch (Exception e) {
+ e.printStackTrace();
+ configs = new HashMap<>();
+ }
+ MIXIN_CONFIGS = configs;
+ }
+
+ public static Map<String, Config> getMixinConfigs() {
+ return MIXIN_CONFIGS;
+ }
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java
new file mode 100644
index 0000000..88adae6
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java
@@ -0,0 +1,87 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.smap;
+
+import org.objectweb.asm.tree.ClassNode;
+import org.spongepowered.asm.service.IClassBytecodeProvider;
+import org.spongepowered.asm.service.MixinService;
+
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * Caches the lookup of class -> source debug info for classes loaded on the JVM.
+ *
+ * The {@link me.lucko.spark.fabric.plugin.FabricSparkMixinPlugin} also supplements this cache with
+ * extra information as classes are exported.
+ */
+public enum SourceDebugCache {
+ ;
+
+ // class name -> smap
+ private static final Map<String, SmapValue> CACHE = new ConcurrentHashMap<>();
+
+ public static void put(String className, ClassNode node) {
+ if (className == null || node == null) {
+ return;
+ }
+ className = className.replace('/', '.');
+ CACHE.put(className, SmapValue.of(node.sourceDebug));
+ }
+
+ public static String getSourceDebugInfo(String className) {
+ SmapValue cached = CACHE.get(className);
+ if (cached != null) {
+ return cached.value();
+ }
+
+ try {
+ IClassBytecodeProvider provider = MixinService.getService().getBytecodeProvider();
+ ClassNode classNode = provider.getClassNode(className.replace('.', '/'));
+
+ if (classNode != null) {
+ put(className, classNode);
+ return classNode.sourceDebug;
+ }
+
+ } catch (Exception e) {
+ // ignore
+ }
+
+ CACHE.put(className, SmapValue.NULL);
+ return null;
+ }
+
+ private record SmapValue(String value) {
+ static final SmapValue NULL = new SmapValue(null);
+
+ static SmapValue of(String value) {
+ if (value == null) {
+ return NULL;
+ } else {
+ return new SmapValue(value);
+ }
+ }
+
+ }
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java
new file mode 100644
index 0000000..5105a26
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java
@@ -0,0 +1,133 @@
+/*
+ * SMAPSourceDebugExtension.java - Parse source debug extensions and
+ * enhance stack traces.
+ *
+ * Copyright (c) 2012 Michael Schierl
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * - Neither name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND THE CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * HOLDERS OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+ * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+ * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package me.lucko.spark.fabric.smap;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Utility class to parse "SMAP" (source map) information from loaded Java classes.
+ *
+ * @author <a href="https://stackoverflow.com/a/11299757">Michael Schierl</a>
+ */
+public class SourceMap {
+
+ private final String generatedFileName;
+ private final String firstStratum;
+ private final Map<Integer, FileInfo> fileinfo = new HashMap<>();
+ private final Map<Integer, int[]> reverseLineMapping = new HashMap<>();
+
+ private static final Pattern LINE_INFO_PATTERN = Pattern.compile("([0-9]+)(?:#([0-9]+))?(?:,([0-9]+))?:([0-9]+)(?:,([0-9]+))?");
+
+ public SourceMap(String value) {
+ String[] lines = value.split("\n");
+ if (!lines[0].equals("SMAP") || !lines[3].startsWith("*S ") || !lines[4].equals("*F")) {
+ throw new IllegalArgumentException(value);
+ }
+
+ this.generatedFileName = lines[1];
+ this.firstStratum = lines[3].substring(3);
+
+ int idx = 5;
+ while (!lines[idx].startsWith("*")) {
+ String infoline = lines[idx++];
+ String path = null;
+
+ if (infoline.startsWith("+ ")) {
+ path = lines[idx++];
+ infoline = infoline.substring(2);
+ }
+
+ int pos = infoline.indexOf(" ");
+ int filenum = Integer.parseInt(infoline.substring(0, pos));
+ String name = infoline.substring(pos + 1);
+
+ this.fileinfo.put(filenum, new FileInfo(name, path == null ? name : path));
+ }
+
+ if (lines[idx].equals("*L")) {
+ idx++;
+ int lastLFI = 0;
+
+ while (!lines[idx].startsWith("*")) {
+ Matcher m = LINE_INFO_PATTERN.matcher(lines[idx++]);
+ if (!m.matches()) {
+ throw new IllegalArgumentException(lines[idx - 1]);
+ }
+
+ int inputStartLine = Integer.parseInt(m.group(1));
+ int lineFileID = m.group(2) == null ? lastLFI : Integer.parseInt(m.group(2));
+ int repeatCount = m.group(3) == null ? 1 : Integer.parseInt(m.group(3));
+ int outputStartLine = Integer.parseInt(m.group(4));
+ int outputLineIncrement = m.group(5) == null ? 1 : Integer.parseInt(m.group(5));
+
+ for (int i = 0; i < repeatCount; i++) {
+ int[] inputMapping = new int[] { lineFileID, inputStartLine + i };
+ int baseOL = outputStartLine + i * outputLineIncrement;
+
+ for (int ol = baseOL; ol < baseOL + outputLineIncrement; ol++) {
+ if (!this.reverseLineMapping.containsKey(ol)) {
+ this.reverseLineMapping.put(ol, inputMapping);
+ }
+ }
+ }
+
+ lastLFI = lineFileID;
+ }
+ }
+ }
+
+ public String getGeneratedFileName() {
+ return this.generatedFileName;
+ }
+
+ public String getFirstStratum() {
+ return this.firstStratum;
+ }
+
+ public Map<Integer, FileInfo> getFileInfo() {
+ return this.fileinfo;
+ }
+
+ public Map<Integer, int[]> getReverseLineMapping() {
+ return this.reverseLineMapping;
+ }
+
+ public record FileInfo(String name, String path) { }
+} \ No newline at end of file
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java
new file mode 100644
index 0000000..1a4f246
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java
@@ -0,0 +1,53 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.smap;
+
+import org.jetbrains.annotations.Nullable;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class SourceMapProvider {
+ private final Map<String, SourceMap> cache = new HashMap<>();
+
+ public @Nullable SourceMap getSourceMap(String className) {
+ if (this.cache.containsKey(className)) {
+ return this.cache.get(className);
+ }
+
+ SourceMap smap = null;
+ try {
+ String value = SourceDebugCache.getSourceDebugInfo(className);
+ if (value != null) {
+ value = value.replaceAll("\r\n?", "\n");
+ if (value.startsWith("SMAP\n")) {
+ smap = new SourceMap(value);
+ }
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+
+ this.cache.put(className, smap);
+ return smap;
+ }
+
+}
diff --git a/spark-fabric/src/main/resources/fabric.mod.json b/spark-fabric/src/main/resources/fabric.mod.json
index e2e600d..f1f0ad4 100644
--- a/spark-fabric/src/main/resources/fabric.mod.json
+++ b/spark-fabric/src/main/resources/fabric.mod.json
@@ -23,6 +23,9 @@
"me.lucko.spark.fabric.FabricSparkMod::initializeClient"
]
},
+ "mixins": [
+ "spark.mixins.json"
+ ],
"depends": {
"fabricloader": ">=0.4.0",
"fabric-api-base": "*",
diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json
new file mode 100644
index 0000000..beaca2f
--- /dev/null
+++ b/spark-fabric/src/main/resources/spark.mixins.json
@@ -0,0 +1,15 @@
+{
+ "required": true,
+ "package": "me.lucko.spark.fabric.mixin",
+ "compatibilityLevel": "JAVA_17",
+ "client": [
+ "ClientEntityManagerAccessor",
+ "ClientWorldAccessor",
+ "MinecraftClientAccessor"
+ ],
+ "mixins": [
+ "ServerEntityManagerAccessor",
+ "ServerWorldAccessor"
+ ],
+ "plugin": "me.lucko.spark.fabric.plugin.FabricSparkMixinPlugin"
+} \ No newline at end of file
diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle
index 210122b..ef20815 100644
--- a/spark-forge/build.gradle
+++ b/spark-forge/build.gradle
@@ -20,7 +20,8 @@ tasks.withType(JavaCompile) {
}
minecraft {
- mappings channel: 'official', version: '1.19'
+ mappings channel: 'official', version: '1.19.3'
+ accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg')
}
configurations {
@@ -29,7 +30,7 @@ configurations {
}
dependencies {
- minecraft 'net.minecraftforge:forge:1.19-41.0.11'
+ minecraft 'net.minecraftforge:forge:1.19.3-44.0.4'
shade project(':spark-common')
}
@@ -51,12 +52,9 @@ shadowJar {
archiveName = "spark-${project.pluginVersion}-forge.jar"
configurations = [project.configurations.shade]
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java
index 7900bc3..82d66ca 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.forge;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import cpw.mods.modlauncher.TransformingClassLoader;
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java
new file mode 100644
index 0000000..cac2771
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java
@@ -0,0 +1,75 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import me.lucko.spark.common.platform.MetadataProvider;
+
+import net.minecraft.server.packs.repository.Pack;
+import net.minecraft.server.packs.repository.PackRepository;
+import net.minecraft.server.packs.repository.PackSource;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class ForgeExtraMetadataProvider implements MetadataProvider {
+
+ private final PackRepository resourcePackManager;
+
+ public ForgeExtraMetadataProvider(PackRepository resourcePackManager) {
+ this.resourcePackManager = resourcePackManager;
+ }
+
+ @Override
+ public Map<String, JsonElement> get() {
+ Map<String, JsonElement> metadata = new LinkedHashMap<>();
+ metadata.put("datapacks", datapackMetadata());
+ return metadata;
+ }
+
+ private JsonElement datapackMetadata() {
+ JsonObject datapacks = new JsonObject();
+ for (Pack profile : this.resourcePackManager.getSelectedPacks()) {
+ JsonObject obj = new JsonObject();
+ obj.addProperty("name", profile.getTitle().getString());
+ obj.addProperty("description", profile.getDescription().getString());
+ obj.addProperty("source", resourcePackSource(profile.getPackSource()));
+ datapacks.add(profile.getId(), obj);
+ }
+ return datapacks;
+ }
+
+ private static String resourcePackSource(PackSource source) {
+ if (source == PackSource.DEFAULT) {
+ return "none";
+ } else if (source == PackSource.BUILT_IN) {
+ return "builtin";
+ } else if (source == PackSource.WORLD) {
+ return "world";
+ } else if (source == PackSource.SERVER) {
+ return "server";
+ } else {
+ return "unknown";
+ }
+ }
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java
new file mode 100644
index 0000000..6feba52
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+
+import java.util.Collection;
+import java.util.Map;
+
+public class ForgeServerConfigProvider extends ServerConfigProvider {
+
+ /** A map of provided files and their type */
+ private static final Map<String, ConfigParser> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public ForgeServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ static {
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("level-seed")
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE);
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
new file mode 100644
index 0000000..4750c08
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
@@ -0,0 +1,174 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import it.unimi.dsi.fastutil.longs.LongIterator;
+import it.unimi.dsi.fastutil.longs.LongSet;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import net.minecraft.client.Minecraft;
+import net.minecraft.client.multiplayer.ClientLevel;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.level.ServerLevel;
+import net.minecraft.world.entity.Entity;
+import net.minecraft.world.entity.EntityType;
+import net.minecraft.world.level.ChunkPos;
+import net.minecraft.world.level.entity.EntityLookup;
+import net.minecraft.world.level.entity.EntitySection;
+import net.minecraft.world.level.entity.EntitySectionStorage;
+import net.minecraft.world.level.entity.PersistentEntitySectionManager;
+import net.minecraft.world.level.entity.TransientEntitySectionManager;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Stream;
+
+public abstract class ForgeWorldInfoProvider implements WorldInfoProvider {
+
+ protected List<ForgeChunkInfo> getChunksFromCache(EntitySectionStorage<Entity> cache) {
+ LongSet loadedChunks = cache.getAllChunksWithExistingSections();
+ List<ForgeChunkInfo> list = new ArrayList<>(loadedChunks.size());
+
+ for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) {
+ long chunkPos = iterator.nextLong();
+ Stream<EntitySection<Entity>> sections = cache.getExistingSectionsInChunk(chunkPos);
+
+ list.add(new ForgeChunkInfo(chunkPos, sections));
+ }
+
+ return list;
+ }
+
+ public static final class Server extends ForgeWorldInfoProvider {
+ private final MinecraftServer server;
+
+ public Server(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getPlayerCount();
+ int entities = 0;
+ int chunks = 0;
+
+ for (ServerLevel level : this.server.getAllLevels()) {
+ PersistentEntitySectionManager<Entity> entityManager = level.entityManager;
+ EntityLookup<Entity> entityIndex = entityManager.visibleEntityStorage;
+
+ entities += entityIndex.count();
+ chunks += level.getChunkSource().getLoadedChunksCount();
+ }
+
+ return new CountsResult(players, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<ForgeChunkInfo> pollChunks() {
+ ChunksResult<ForgeChunkInfo> data = new ChunksResult<>();
+
+ for (ServerLevel level : this.server.getAllLevels()) {
+ PersistentEntitySectionManager<Entity> entityManager = level.entityManager;
+ EntitySectionStorage<Entity> cache = entityManager.sectionStorage;
+
+ List<ForgeChunkInfo> list = getChunksFromCache(cache);
+ data.put(level.dimension().location().getPath(), list);
+ }
+
+ return data;
+ }
+ }
+
+ public static final class Client extends ForgeWorldInfoProvider {
+ private final Minecraft client;
+
+ public Client(Minecraft client) {
+ this.client = client;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ ClientLevel level = this.client.level;
+ if (level == null) {
+ return null;
+ }
+
+ TransientEntitySectionManager<Entity> entityManager = level.entityStorage;
+ EntityLookup<Entity> entityIndex = entityManager.entityStorage;
+
+ int entities = entityIndex.count();
+ int chunks = level.getChunkSource().getLoadedChunksCount();
+
+ return new CountsResult(-1, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<ForgeChunkInfo> pollChunks() {
+ ChunksResult<ForgeChunkInfo> data = new ChunksResult<>();
+
+ ClientLevel level = this.client.level;
+ if (level == null) {
+ return null;
+ }
+
+ TransientEntitySectionManager<Entity> entityManager = level.entityStorage;
+ EntitySectionStorage<Entity> cache = entityManager.sectionStorage;
+
+ List<ForgeChunkInfo> list = getChunksFromCache(cache);
+ data.put(level.dimension().location().getPath(), list);
+
+ return data;
+ }
+ }
+
+ static final class ForgeChunkInfo extends AbstractChunkInfo<EntityType<?>> {
+ private final CountMap<EntityType<?>> entityCounts;
+
+ ForgeChunkInfo(long chunkPos, Stream<EntitySection<Entity>> entities) {
+ super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos));
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ entities.forEach(section -> {
+ if (section.getStatus().isAccessible()) {
+ section.getEntities().forEach(entity ->
+ this.entityCounts.increment(entity.getType())
+ );
+ }
+ });
+ }
+
+ @Override
+ public CountMap<EntityType<?>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType<?> type) {
+ return EntityType.getKey(type).toString();
+ }
+ }
+
+
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
index cf5c89b..a8c7c92 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
@@ -27,14 +27,19 @@ import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.ForgeCommandSender;
+import me.lucko.spark.forge.ForgeExtraMetadataProvider;
import me.lucko.spark.forge.ForgePlatformInfo;
import me.lucko.spark.forge.ForgeSparkMod;
import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
+import me.lucko.spark.forge.ForgeWorldInfoProvider;
import net.minecraft.client.Minecraft;
import net.minecraft.commands.CommandSource;
@@ -56,10 +61,12 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<
}
private final Minecraft minecraft;
+ private final ThreadDumper gameThreadDumper;
public ForgeClientSparkPlugin(ForgeSparkMod mod, Minecraft minecraft) {
super(mod);
this.minecraft = minecraft;
+ this.gameThreadDumper = new ThreadDumper.Specific(minecraft.gameThread);
}
@Override
@@ -82,7 +89,6 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<
return 0;
}
- this.threadDumper.ensureSetup();
this.platform.executeCommand(new ForgeCommandSender(context.getSource().getEntity(), this), args);
return Command.SINGLE_SUCCESS;
}
@@ -108,6 +114,16 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public void executeSync(Runnable task) {
+ this.minecraft.executeIfPossible(task);
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper;
+ }
+
+ @Override
public TickHook createTickHook() {
return new ForgeTickHook(TickEvent.Type.CLIENT);
}
@@ -118,6 +134,16 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new ForgeWorldInfoProvider.Client(this.minecraft);
+ }
+
+ @Override
+ public MetadataProvider createExtraMetadataProvider() {
+ return new ForgeExtraMetadataProvider(this.minecraft.getResourcePackRepository());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new ForgePlatformInfo(PlatformInfo.Type.CLIENT);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
index e341d6f..8737057 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
@@ -30,15 +30,22 @@ import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.ForgeCommandSender;
+import me.lucko.spark.forge.ForgeExtraMetadataProvider;
import me.lucko.spark.forge.ForgePlatformInfo;
import me.lucko.spark.forge.ForgePlayerPingProvider;
+import me.lucko.spark.forge.ForgeServerConfigProvider;
import me.lucko.spark.forge.ForgeSparkMod;
import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
+import me.lucko.spark.forge.ForgeWorldInfoProvider;
import net.minecraft.commands.CommandSource;
import net.minecraft.commands.CommandSourceStack;
@@ -60,6 +67,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
+import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -70,12 +78,27 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
plugin.enable();
}
+ private static final PermissionResolver<Boolean> DEFAULT_PERMISSION_VALUE = (player, playerUUID, context) -> {
+ if (player == null) {
+ return false;
+ }
+
+ MinecraftServer server = player.getServer();
+ if (server != null && server.isSingleplayerOwner(player.getGameProfile())) {
+ return true;
+ }
+
+ return player.hasPermissions(4);
+ };
+
private final MinecraftServer server;
+ private final ThreadDumper gameThreadDumper;
private Map<String, PermissionNode<Boolean>> registeredPermissions = Collections.emptyMap();
public ForgeServerSparkPlugin(ForgeSparkMod mod, MinecraftServer server) {
super(mod);
this.server = server;
+ this.gameThreadDumper = new ThreadDumper.Specific(server.getRunningThread());
}
@Override
@@ -106,8 +129,6 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
@SubscribeEvent
public void onPermissionGather(PermissionGatherEvent.Nodes e) {
- PermissionResolver<Boolean> defaultValue = (player, playerUUID, context) -> player != null && player.hasPermissions(4);
-
// collect all possible permissions
List<String> permissions = this.platform.getCommands().stream()
.map(me.lucko.spark.common.command.Command::primaryAlias)
@@ -118,10 +139,24 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
// register permissions with forge & keep a copy for lookup
ImmutableMap.Builder<String, PermissionNode<Boolean>> builder = ImmutableMap.builder();
+
+ Map<String, PermissionNode<?>> alreadyRegistered = e.getNodes().stream().collect(Collectors.toMap(PermissionNode::getNodeName, Function.identity()));
+
for (String permission : permissions) {
- PermissionNode<Boolean> node = new PermissionNode<>("spark", permission, PermissionTypes.BOOLEAN, defaultValue);
+ String permissionString = "spark." + permission;
+
+ // there's a weird bug where it seems that this listener can be called twice, causing an
+ // IllegalArgumentException to be thrown the second time e.addNodes is called.
+ PermissionNode<?> existing = alreadyRegistered.get(permissionString);
+ if (existing != null) {
+ //noinspection unchecked
+ builder.put(permissionString, (PermissionNode<Boolean>) existing);
+ continue;
+ }
+
+ PermissionNode<Boolean> node = new PermissionNode<>("spark", permission, PermissionTypes.BOOLEAN, DEFAULT_PERMISSION_VALUE);
e.addNodes(node);
- builder.put("spark." + permission, node);
+ builder.put(permissionString, node);
}
this.registeredPermissions = builder.build();
}
@@ -142,7 +177,6 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
return 0;
}
- this.threadDumper.ensureSetup();
CommandSource source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer();
this.platform.executeCommand(new ForgeCommandSender(source, this), args);
return Command.SINGLE_SUCCESS;
@@ -184,6 +218,16 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public void executeSync(Runnable task) {
+ this.server.executeIfPossible(task);
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper;
+ }
+
+ @Override
public TickHook createTickHook() {
return new ForgeTickHook(TickEvent.Type.SERVER);
}
@@ -199,6 +243,21 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new ForgeServerConfigProvider();
+ }
+
+ @Override
+ public MetadataProvider createExtraMetadataProvider() {
+ return new ForgeExtraMetadataProvider(this.server.getPackRepository());
+ }
+
+ @Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new ForgeWorldInfoProvider.Server(this.server);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new ForgePlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
index f257e34..56061b9 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
@@ -34,19 +34,22 @@ import com.mojang.brigadier.tree.LiteralCommandNode;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.util.SparkThreadFactory;
import me.lucko.spark.forge.ForgeClassSourceLookup;
import me.lucko.spark.forge.ForgeSparkMod;
import net.minecraft.commands.CommandSource;
+import net.minecraftforge.fml.ModList;
+import net.minecraftforge.forgespi.language.IModInfo;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.file.Path;
import java.util.Arrays;
+import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -59,7 +62,6 @@ public abstract class ForgeSparkPlugin implements SparkPlugin {
protected final ScheduledExecutorService scheduler;
protected SparkPlatform platform;
- protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected ForgeSparkPlugin(ForgeSparkMod mod) {
this.mod = mod;
@@ -108,13 +110,18 @@ public abstract class ForgeSparkPlugin implements SparkPlugin {
}
@Override
- public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
+ public ClassSourceLookup createClassSourceLookup() {
+ return new ForgeClassSourceLookup();
}
@Override
- public ClassSourceLookup createClassSourceLookup() {
- return new ForgeClassSourceLookup();
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ ModList.get().getMods(),
+ IModInfo::getModId,
+ mod -> mod.getVersion().toString(),
+ mod -> null // ?
+ );
}
protected CompletableFuture<Suggestions> generateSuggestions(CommandSender sender, String[] args, SuggestionsBuilder builder) {
diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
new file mode 100644
index 0000000..2699a0e
--- /dev/null
+++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
@@ -0,0 +1,7 @@
+public net.minecraft.server.level.ServerLevel f_143244_ # entityManager
+public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage
+public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157494_ # visibleEntityStorage
+public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage
+public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage
+public net.minecraft.world.level.entity.TransientEntitySectionManager f_157637_ # entityStorage
+public net.minecraft.client.Minecraft f_91018_ # gameThread
diff --git a/spark-forge1710/build.gradle b/spark-forge1710/build.gradle
index 4b4f4e5..8fd166d 100644
--- a/spark-forge1710/build.gradle
+++ b/spark-forge1710/build.gradle
@@ -55,6 +55,12 @@ processResources {
}
}
+jar {
+ manifest {
+ attributes 'FMLAT': 'spark_at.cfg'
+ }
+}
+
shadowJar {
archiveName = 'spark-forge1710.jar'
configurations = [project.configurations.shade]
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlayerPingProvider.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlayerPingProvider.java
new file mode 100644
index 0000000..11ee45a
--- /dev/null
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710PlayerPingProvider.java
@@ -0,0 +1,48 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+
+import net.minecraft.entity.player.EntityPlayerMP;
+import net.minecraft.server.MinecraftServer;
+
+import java.util.List;
+import java.util.Map;
+
+public class Forge1710PlayerPingProvider implements PlayerPingProvider {
+ private final MinecraftServer server;
+
+ public Forge1710PlayerPingProvider(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (EntityPlayerMP player : (List<EntityPlayerMP>)this.server.getConfigurationManager().playerEntityList) {
+ builder.put(player.getGameProfile().getName(), player.ping);
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java
new file mode 100644
index 0000000..7252d71
--- /dev/null
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/Forge1710WorldInfoProvider.java
@@ -0,0 +1,150 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import cpw.mods.fml.relauncher.ReflectionHelper;
+import cpw.mods.fml.relauncher.Side;
+import cpw.mods.fml.relauncher.SideOnly;
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import net.minecraft.client.Minecraft;
+import net.minecraft.client.multiplayer.ChunkProviderClient;
+import net.minecraft.client.multiplayer.WorldClient;
+import net.minecraft.entity.Entity;
+import net.minecraft.entity.EntityList;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.world.WorldServer;
+import net.minecraft.world.chunk.Chunk;
+import net.minecraft.world.chunk.IChunkProvider;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public abstract class Forge1710WorldInfoProvider implements WorldInfoProvider {
+ public static final class Server extends Forge1710WorldInfoProvider {
+ private final MinecraftServer server;
+
+ public Server(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public ChunksResult<ForgeChunkInfo> pollChunks() {
+ ChunksResult<ForgeChunkInfo> data = new ChunksResult<>();
+
+ for (WorldServer level : this.server.worldServers) {
+ ArrayList<ForgeChunkInfo> list = new ArrayList<>();
+ for(Chunk chunk : (List<Chunk>)level.theChunkProviderServer.loadedChunks) {
+ list.add(new ForgeChunkInfo(chunk));
+ }
+ data.put(level.provider.getDimensionName(), list);
+ }
+
+ return data;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getCurrentPlayerCount();
+ int entities = 0;
+ int chunks = 0;
+
+ for (WorldServer level : this.server.worldServers) {
+ entities += level.loadedEntityList.size();
+ chunks += level.getChunkProvider().getLoadedChunkCount();
+ }
+
+ return new CountsResult(players, entities, -1, chunks);
+ }
+ }
+
+ @SideOnly(Side.CLIENT)
+ public static final class Client extends Forge1710WorldInfoProvider {
+ private final Minecraft client;
+
+ public Client(Minecraft client) {
+ this.client = client;
+ }
+
+ @Override
+ public ChunksResult<ForgeChunkInfo> pollChunks() {
+ ChunksResult<ForgeChunkInfo> data = new ChunksResult<>();
+
+ WorldClient level = this.client.theWorld;
+ if (level == null) {
+ return null;
+ }
+
+ ArrayList<ForgeChunkInfo> list = new ArrayList<>();
+ IChunkProvider provider = level.getChunkProvider();
+ if(provider instanceof ChunkProviderClient) {
+ List<Chunk> chunks = ReflectionHelper.getPrivateValue(ChunkProviderClient.class, (ChunkProviderClient)provider, "chunkMapping", "field_73236_b");
+ for(Chunk chunk : chunks) {
+ list.add(new ForgeChunkInfo(chunk));
+ }
+ }
+
+ data.put(level.provider.getDimensionName(), list);
+
+ return data;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ WorldClient level = this.client.theWorld;
+ if (level == null) {
+ return null;
+ }
+
+ return new CountsResult(-1, level.loadedEntityList.size(), -1, level.getChunkProvider().getLoadedChunkCount());
+ }
+ }
+
+ static final class ForgeChunkInfo extends AbstractChunkInfo<Class<? extends Entity>> {
+ private final CountMap<Class<? extends Entity>> entityCounts;
+
+ ForgeChunkInfo(Chunk chunk) {
+ super(chunk.xPosition, chunk.zPosition);
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ for(List<Entity> entityList : chunk.entityLists) {
+ entityList.forEach(entity -> {
+ this.entityCounts.increment(entity.getClass());
+ });
+ }
+ }
+
+ @Override
+ public CountMap<Class<? extends Entity>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(Class<? extends Entity> type) {
+ return (String)EntityList.classToStringMapping.get(type);
+ }
+ }
+
+
+}
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java
index 305b06e..446a0c9 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ClientSparkPlugin.java
@@ -22,6 +22,8 @@ package me.lucko.spark.forge.plugin;
import cpw.mods.fml.common.gameevent.TickEvent;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.*;
@@ -46,10 +48,17 @@ public class Forge1710ClientSparkPlugin extends Forge1710SparkPlugin {
}
private final Minecraft minecraft;
+ private final ThreadDumper gameThreadDumper;
public Forge1710ClientSparkPlugin(Forge1710SparkMod mod, Minecraft minecraft) {
super(mod);
this.minecraft = minecraft;
+ this.gameThreadDumper = new ThreadDumper.Specific(minecraft.mcThread);
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper;
}
@Override
@@ -73,6 +82,16 @@ public class Forge1710ClientSparkPlugin extends Forge1710SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new Forge1710WorldInfoProvider.Client(Minecraft.getMinecraft());
+ }
+
+ @Override
+ public void executeSync(Runnable task) {
+ this.minecraft.addScheduledTask(task);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Forge1710PlatformInfo(PlatformInfo.Type.CLIENT);
}
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java
index 303739f..db9f299 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710ServerSparkPlugin.java
@@ -20,9 +20,14 @@
package me.lucko.spark.forge.plugin;
+import com.google.common.collect.Queues;
+import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.event.FMLServerStartingEvent;
+import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.TickEvent;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.*;
@@ -32,32 +37,66 @@ import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.server.MinecraftServer;
import java.util.List;
+import java.util.Queue;
+import java.util.concurrent.FutureTask;
import java.util.stream.Stream;
public class Forge1710ServerSparkPlugin extends Forge1710SparkPlugin {
+ private final Queue<Runnable> scheduledServerTasks = Queues.newArrayDeque();
+
+ private final ThreadDumper.GameThread gameThreadDumper;
public static Forge1710ServerSparkPlugin register(Forge1710SparkMod mod, FMLServerStartingEvent event) {
Forge1710ServerSparkPlugin plugin = new Forge1710ServerSparkPlugin(mod, event.getServer());
plugin.enable();
+ FMLCommonHandler.instance().bus().register(plugin);
+
// register commands & permissions
event.registerServerCommand(plugin);
return plugin;
}
+ @SubscribeEvent
+ public void onServerTickEnd(TickEvent.ServerTickEvent event) {
+ if(event.phase == TickEvent.Phase.START) {
+ synchronized(scheduledServerTasks) {
+ while (!scheduledServerTasks.isEmpty())
+ {
+ scheduledServerTasks.poll().run();
+ }
+ }
+ }
+ }
+
private final MinecraftServer server;
public Forge1710ServerSparkPlugin(Forge1710SparkMod mod, MinecraftServer server) {
super(mod);
this.server = server;
+ this.gameThreadDumper = new ThreadDumper.GameThread();
+ this.scheduledServerTasks.add(() -> this.gameThreadDumper.setThread(Thread.currentThread()));
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper.get();
}
@Override
public boolean hasPermission(ICommandSender sender, String permission) {
if (sender instanceof EntityPlayerMP) {
EntityPlayerMP player = (EntityPlayerMP)sender;
- return isOp(player) || player.mcServer.getServerOwner().equals(player.getGameProfile().getName());
+ if(isOp(player))
+ return true;
+ else {
+ String serverOwner = MinecraftServer.getServer().getServerOwner();
+ if(player.getGameProfile().getName() != null && serverOwner != null)
+ return serverOwner.equals(player.getGameProfile().getName());
+ else
+ return false;
+ }
} else {
return true;
}
@@ -82,6 +121,18 @@ public class Forge1710ServerSparkPlugin extends Forge1710SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new Forge1710WorldInfoProvider.Server(FMLCommonHandler.instance().getMinecraftServerInstance());
+ }
+
+ @Override
+ public void executeSync(Runnable task) {
+ synchronized (scheduledServerTasks) {
+ scheduledServerTasks.add(task);
+ }
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Forge1710PlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java
index b67659f..26e1752 100644
--- a/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java
+++ b/spark-forge1710/src/main/java/me/lucko/spark/forge/plugin/Forge1710SparkPlugin.java
@@ -23,7 +23,6 @@ package me.lucko.spark.forge.plugin;
import cpw.mods.fml.common.FMLCommonHandler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
-import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.forge.Forge1710CommandSender;
import me.lucko.spark.forge.Forge1710SparkMod;
import net.minecraft.command.ICommand;
@@ -45,7 +44,6 @@ public abstract class Forge1710SparkPlugin implements SparkPlugin, ICommand {
private final Logger logger;
protected final ScheduledExecutorService scheduler;
protected final SparkPlatform platform;
- protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected Forge1710SparkPlugin(Forge1710SparkMod mod) {
this.mod = mod;
@@ -98,11 +96,6 @@ public abstract class Forge1710SparkPlugin implements SparkPlugin, ICommand {
}
}
- @Override
- public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
- }
-
// implement ICommand
@Override
@@ -122,7 +115,6 @@ public abstract class Forge1710SparkPlugin implements SparkPlugin, ICommand {
@Override
public void processCommand(ICommandSender sender, String[] args) {
- this.threadDumper.ensureSetup();
this.platform.executeCommand(new Forge1710CommandSender(sender, this), args);
}
diff --git a/spark-forge1710/src/main/resources/META-INF/spark_at.cfg b/spark-forge1710/src/main/resources/META-INF/spark_at.cfg
new file mode 100644
index 0000000..ff29bd3
--- /dev/null
+++ b/spark-forge1710/src/main/resources/META-INF/spark_at.cfg
@@ -0,0 +1 @@
+public net.minecraft.client.Minecraft field_152352_aC # mcThread \ No newline at end of file
diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle
index 26cdc2c..d0b6928 100644
--- a/spark-minestom/build.gradle
+++ b/spark-minestom/build.gradle
@@ -9,7 +9,7 @@ tasks.withType(JavaCompile) {
dependencies {
implementation project(':spark-common')
- compileOnly 'com.github.Minestom:Minestom:367c389bc6'
+ compileOnly 'com.github.Minestom:Minestom:1a013728fd'
implementation 'com.google.guava:guava:19.0'
}
@@ -30,11 +30,8 @@ shadowJar {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
}
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java
index 252060e..ca44eea 100644
--- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.minestom;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import net.minestom.server.MinecraftServer;
import net.minestom.server.extensions.Extension;
diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java
index 2b43cae..9014476 100644
--- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java
+++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java
@@ -24,9 +24,10 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.common.util.ClassSourceLookup;
import net.minestom.server.MinecraftServer;
import net.minestom.server.command.CommandSender;
@@ -45,6 +46,7 @@ import org.jetbrains.annotations.NotNull;
import java.nio.file.Path;
import java.util.Arrays;
+import java.util.Collection;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -118,6 +120,16 @@ public class MinestomSparkPlugin extends Extension implements SparkPlugin {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ MinecraftServer.getExtensionManager().getExtensions(),
+ extension -> extension.getOrigin().getName(),
+ extension -> extension.getOrigin().getVersion(),
+ extension -> String.join(", ", extension.getOrigin().getAuthors())
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
return new MinestomPlayerPingProvider();
}
diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle
index 2e1ad55..9efb653 100644
--- a/spark-nukkit/build.gradle
+++ b/spark-nukkit/build.gradle
@@ -4,7 +4,7 @@ plugins {
dependencies {
implementation project(':spark-common')
- implementation 'net.kyori:adventure-text-serializer-legacy:4.4.0'
+ implementation 'net.kyori:adventure-text-serializer-legacy:4.12.0'
compileOnly 'cn.nukkit:nukkit:1.0-SNAPSHOT'
}
@@ -25,11 +25,8 @@ processResources {
shadowJar {
archiveName = "spark-${project.pluginVersion}-nukkit.jar"
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java
index 4fed396..180e0af 100644
--- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.nukkit;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import cn.nukkit.plugin.PluginClassLoader;
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
index 18132c3..ae21241 100644
--- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
@@ -25,13 +25,12 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import cn.nukkit.command.Command;
import cn.nukkit.command.CommandSender;
import cn.nukkit.plugin.PluginBase;
import cn.nukkit.plugin.service.ServicePriority;
-import cn.nukkit.scheduler.AsyncTask;
import java.nio.file.Path;
import java.util.logging.Level;
@@ -82,12 +81,7 @@ public class NukkitSparkPlugin extends PluginBase implements SparkPlugin {
@Override
public void executeAsync(Runnable task) {
- getServer().getScheduler().scheduleAsyncTask(this, new AsyncTask() {
- @Override
- public void onRun() {
- task.run();
- }
- });
+ getServer().getScheduler().scheduleTask(this, task, true);
}
@Override
diff --git a/spark-sponge7/build.gradle b/spark-sponge7/build.gradle
index b6f8273..b06d3bd 100644
--- a/spark-sponge7/build.gradle
+++ b/spark-sponge7/build.gradle
@@ -22,12 +22,9 @@ blossom {
shadowJar {
archiveFileName = "spark-${project.pluginVersion}-sponge7.jar"
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java
index 90f3b8f..899ce58 100644
--- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.sponge;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import org.spongepowered.api.Game;
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
index 670e0c5..0e3f4eb 100644
--- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
@@ -27,9 +27,10 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.common.util.ClassSourceLookup;
import org.slf4j.Logger;
import org.spongepowered.api.Game;
@@ -44,6 +45,7 @@ import org.spongepowered.api.plugin.Plugin;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.AsynchronousExecutor;
import org.spongepowered.api.scheduler.SpongeExecutorService;
+import org.spongepowered.api.scheduler.SynchronousExecutor;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
@@ -70,17 +72,21 @@ public class Sponge7SparkPlugin implements SparkPlugin {
private final Game game;
private final Path configDirectory;
private final SpongeExecutorService asyncExecutor;
+ private final SpongeExecutorService syncExecutor;
+ private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread();
private SparkPlatform platform;
- private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Inject
- public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
+ public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor, @SynchronousExecutor SpongeExecutorService syncExecutor) {
this.pluginContainer = pluginContainer;
this.logger = logger;
this.game = game;
this.configDirectory = configDirectory;
this.asyncExecutor = asyncExecutor;
+ this.syncExecutor = syncExecutor;
+
+ this.syncExecutor.execute(() -> this.gameThreadDumper.setThread(Thread.currentThread()));
}
@Listener
@@ -112,10 +118,14 @@ public class Sponge7SparkPlugin implements SparkPlugin {
@Override
public Stream<Sponge7CommandSender> getCommandSenders() {
- return Stream.concat(
- this.game.getServer().getOnlinePlayers().stream(),
- Stream.of(this.game.getServer().getConsole())
- ).map(Sponge7CommandSender::new);
+ if (this.game.isServerAvailable()) {
+ return Stream.concat(
+ this.game.getServer().getOnlinePlayers().stream(),
+ Stream.of(this.game.getServer().getConsole())
+ ).map(Sponge7CommandSender::new);
+ } else {
+ return Stream.of(this.game.getServer().getConsole()).map(Sponge7CommandSender::new);
+ }
}
@Override
@@ -124,6 +134,11 @@ public class Sponge7SparkPlugin implements SparkPlugin {
}
@Override
+ public void executeSync(Runnable task) {
+ this.syncExecutor.execute(task);
+ }
+
+ @Override
public void log(Level level, String msg) {
if (level == Level.INFO) {
this.logger.info(msg);
@@ -138,7 +153,7 @@ public class Sponge7SparkPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
+ return this.gameThreadDumper.get();
}
@Override
@@ -161,6 +176,15 @@ public class Sponge7SparkPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge7WorldInfoProvider(this.game.getServer());
+ } else {
+ return WorldInfoProvider.NO_OP;
+ }
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Sponge7PlatformInfo(this.game);
}
@@ -179,7 +203,6 @@ public class Sponge7SparkPlugin implements SparkPlugin {
@Override
public CommandResult process(CommandSource source, String arguments) {
- this.plugin.threadDumper.ensureSetup();
this.plugin.platform.executeCommand(new Sponge7CommandSender(source), arguments.split(" "));
return CommandResult.empty();
}
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
new file mode 100644
index 0000000..df58028
--- /dev/null
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
@@ -0,0 +1,104 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.Entity;
+import org.spongepowered.api.entity.EntityType;
+import org.spongepowered.api.world.Chunk;
+import org.spongepowered.api.world.World;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class Sponge7WorldInfoProvider implements WorldInfoProvider {
+ private final Server server;
+
+ public Sponge7WorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getOnlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (World world : this.server.getWorlds()) {
+ entities += world.getEntities().size();
+ tileEntities += world.getTileEntities().size();
+ chunks += Iterables.size(world.getLoadedChunks());
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<Sponge7ChunkInfo> pollChunks() {
+ ChunksResult<Sponge7ChunkInfo> data = new ChunksResult<>();
+
+ for (World world : this.server.getWorlds()) {
+ List<Chunk> chunks = Lists.newArrayList(world.getLoadedChunks());
+
+ List<Sponge7ChunkInfo> list = new ArrayList<>(chunks.size());
+ for (Chunk chunk : chunks) {
+ list.add(new Sponge7ChunkInfo(chunk));
+ }
+
+ data.put(world.getName(), list);
+ }
+
+ return data;
+ }
+
+ static final class Sponge7ChunkInfo extends AbstractChunkInfo<EntityType> {
+ private final CountMap<EntityType> entityCounts;
+
+ Sponge7ChunkInfo(Chunk chunk) {
+ super(chunk.getPosition().getX(), chunk.getPosition().getZ());
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ for (Entity entity : chunk.getEntities()) {
+ this.entityCounts.increment(entity.getType());
+ }
+ }
+
+ @Override
+ public CountMap<EntityType> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType type) {
+ return type.getName();
+ }
+
+ }
+}
diff --git a/spark-sponge8/build.gradle b/spark-sponge8/build.gradle
index 314ab18..202c308 100644
--- a/spark-sponge8/build.gradle
+++ b/spark-sponge8/build.gradle
@@ -28,11 +28,8 @@ shadowJar {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
}
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
index fa4ac45..7f02e75 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java
@@ -22,7 +22,7 @@ package me.lucko.spark.sponge;
import com.google.common.collect.ImmutableMap;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import org.spongepowered.api.Game;
import org.spongepowered.plugin.PluginCandidate;
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
index e867a75..b1d31e9 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
@@ -20,6 +20,7 @@
package me.lucko.spark.sponge;
+import com.google.common.base.Suppliers;
import com.google.inject.Inject;
import me.lucko.spark.common.SparkPlatform;
@@ -27,9 +28,11 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.common.util.ClassSourceLookup;
import net.kyori.adventure.text.Component;
@@ -50,11 +53,14 @@ import org.spongepowered.api.event.lifecycle.StartedEngineEvent;
import org.spongepowered.api.event.lifecycle.StoppingEngineEvent;
import org.spongepowered.plugin.PluginContainer;
import org.spongepowered.plugin.builtin.jvm.Plugin;
+import org.spongepowered.plugin.metadata.model.PluginContributor;
import java.nio.file.Path;
+import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
+import java.util.function.Supplier;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -67,9 +73,10 @@ public class Sponge8SparkPlugin implements SparkPlugin {
private final Game game;
private final Path configDirectory;
private final ExecutorService asyncExecutor;
+ private final Supplier<ExecutorService> syncExecutor;
+ private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread();
private SparkPlatform platform;
- private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Inject
public Sponge8SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory) {
@@ -78,6 +85,15 @@ public class Sponge8SparkPlugin implements SparkPlugin {
this.game = game;
this.configDirectory = configDirectory;
this.asyncExecutor = game.asyncScheduler().executor(pluginContainer);
+ this.syncExecutor = Suppliers.memoize(() -> {
+ if (this.game.isServerAvailable()) {
+ return this.game.server().scheduler().executor(this.pluginContainer);
+ } else if (this.game.isClientAvailable()) {
+ return this.game.client().scheduler().executor(this.pluginContainer);
+ } else {
+ throw new IllegalStateException("Server and client both unavailable");
+ }
+ });
}
@@ -88,6 +104,8 @@ public class Sponge8SparkPlugin implements SparkPlugin {
@Listener
public void onEnable(StartedEngineEvent<Server> event) {
+ executeSync(() -> this.gameThreadDumper.setThread(Thread.currentThread()));
+
this.platform = new SparkPlatform(this);
this.platform.enable();
}
@@ -114,10 +132,14 @@ public class Sponge8SparkPlugin implements SparkPlugin {
@Override
public Stream<CommandSender> getCommandSenders() {
- return Stream.concat(
- this.game.server().onlinePlayers().stream(),
- Stream.of(this.game.systemSubject())
- ).map(Sponge8CommandSender::new);
+ if (this.game.isServerAvailable()) {
+ return Stream.concat(
+ this.game.server().onlinePlayers().stream(),
+ Stream.of(this.game.systemSubject())
+ ).map(Sponge8CommandSender::new);
+ } else {
+ return Stream.of(this.game.systemSubject()).map(Sponge8CommandSender::new);
+ }
}
@Override
@@ -126,6 +148,11 @@ public class Sponge8SparkPlugin implements SparkPlugin {
}
@Override
+ public void executeSync(Runnable task) {
+ this.syncExecutor.get().execute(task);
+ }
+
+ @Override
public void log(Level level, String msg) {
if (level == Level.INFO) {
this.logger.info(msg);
@@ -140,7 +167,7 @@ public class Sponge8SparkPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
+ return this.gameThreadDumper.get();
}
@Override
@@ -154,6 +181,18 @@ public class Sponge8SparkPlugin implements SparkPlugin {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ this.game.pluginManager().plugins(),
+ plugin -> plugin.metadata().id(),
+ plugin -> plugin.metadata().version().toString(),
+ plugin -> plugin.metadata().contributors().stream()
+ .map(PluginContributor::name)
+ .collect(Collectors.joining(", "))
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
if (this.game.isServerAvailable()) {
return new Sponge8PlayerPingProvider(this.game.server());
@@ -163,6 +202,15 @@ public class Sponge8SparkPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge8WorldInfoProvider(this.game.server());
+ } else {
+ return WorldInfoProvider.NO_OP;
+ }
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Sponge8PlatformInfo(this.game);
}
@@ -176,7 +224,6 @@ public class Sponge8SparkPlugin implements SparkPlugin {
@Override
public CommandResult process(CommandCause cause, ArgumentReader.Mutable arguments) {
- this.plugin.threadDumper.ensureSetup();
this.plugin.platform.executeCommand(new Sponge8CommandSender(cause), arguments.input().split(" "));
return CommandResult.success();
}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
new file mode 100644
index 0000000..69b4515
--- /dev/null
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
@@ -0,0 +1,105 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.Entity;
+import org.spongepowered.api.entity.EntityType;
+import org.spongepowered.api.entity.EntityTypes;
+import org.spongepowered.api.world.chunk.WorldChunk;
+import org.spongepowered.api.world.server.ServerWorld;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class Sponge8WorldInfoProvider implements WorldInfoProvider {
+ private final Server server;
+
+ public Sponge8WorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.onlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (ServerWorld world : this.server.worldManager().worlds()) {
+ entities += world.entities().size();
+ tileEntities += world.blockEntities().size();
+ chunks += Iterables.size(world.loadedChunks());
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<Sponge7ChunkInfo> pollChunks() {
+ ChunksResult<Sponge7ChunkInfo> data = new ChunksResult<>();
+
+ for (ServerWorld world : this.server.worldManager().worlds()) {
+ List<WorldChunk> chunks = Lists.newArrayList(world.loadedChunks());
+
+ List<Sponge7ChunkInfo> list = new ArrayList<>(chunks.size());
+ for (WorldChunk chunk : chunks) {
+ list.add(new Sponge7ChunkInfo(chunk));
+ }
+
+ data.put(world.key().value(), list);
+ }
+
+ return data;
+ }
+
+ static final class Sponge7ChunkInfo extends AbstractChunkInfo<EntityType<?>> {
+ private final CountMap<EntityType<?>> entityCounts;
+
+ Sponge7ChunkInfo(WorldChunk chunk) {
+ super(chunk.chunkPosition().x(), chunk.chunkPosition().z());
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ for (Entity entity : chunk.entities()) {
+ this.entityCounts.increment(entity.type());
+ }
+ }
+
+ @Override
+ public CountMap<EntityType<?>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType<?> type) {
+ return EntityTypes.registry().valueKey(type).value();
+ }
+
+ }
+}
diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle
index b2e938b..275d3df 100644
--- a/spark-velocity/build.gradle
+++ b/spark-velocity/build.gradle
@@ -26,11 +26,8 @@ shadowJar {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
}
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java
index bcb8176..9b697c3 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java
@@ -23,7 +23,7 @@ package me.lucko.spark.velocity;
import com.velocitypowered.api.plugin.PluginContainer;
import com.velocitypowered.api.plugin.PluginManager;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import org.checkerframework.checker.nullness.qual.Nullable;
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
index 7d9ced8..4a89a4e 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java
@@ -34,11 +34,13 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import org.slf4j.Logger;
import java.nio.file.Path;
+import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -134,6 +136,16 @@ public class VelocitySparkPlugin implements SparkPlugin, SimpleCommand {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ this.proxy.getPluginManager().getPlugins(),
+ plugin -> plugin.getDescription().getId(),
+ plugin -> plugin.getDescription().getVersion().orElse("unspecified"),
+ plugin -> String.join(", ", plugin.getDescription().getAuthors())
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
return new VelocityPlayerPingProvider(this.proxy);
}
diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle
index 5bef80b..1f8e8ee 100644
--- a/spark-velocity4/build.gradle
+++ b/spark-velocity4/build.gradle
@@ -31,11 +31,8 @@ shadowJar {
exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
}
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java
index c5c22c3..84840d2 100644
--- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java
@@ -23,7 +23,7 @@ package me.lucko.spark.velocity;
import com.velocitypowered.api.plugin.PluginContainer;
import com.velocitypowered.api.plugin.PluginManager;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -48,7 +48,7 @@ public class Velocity4ClassSourceLookup extends ClassSourceLookup.ByClassLoader
for (PluginContainer plugin : pluginManager.plugins()) {
Object instance = plugin.instance();
if (instance != null) {
- this.classLoadersToPlugin.put(instance.getClass().getClassLoader(), plugin.description().name());
+ this.classLoadersToPlugin.put(instance.getClass().getClassLoader(), plugin.description().id());
}
}
}
diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
index 0c57689..b638246 100644
--- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
+++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java
@@ -34,11 +34,13 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import org.slf4j.Logger;
import java.nio.file.Path;
+import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -134,6 +136,16 @@ public class Velocity4SparkPlugin implements SparkPlugin, SimpleCommand {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ this.proxy.pluginManager().plugins(),
+ plugin -> plugin.description().id(),
+ plugin -> plugin.description().version(),
+ plugin -> String.join(", ", plugin.description().authors())
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
return new Velocity4PlayerPingProvider(this.proxy);
}
diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle
index c11e3fb..9c1a4d3 100644
--- a/spark-waterdog/build.gradle
+++ b/spark-waterdog/build.gradle
@@ -9,7 +9,7 @@ tasks.withType(JavaCompile) {
dependencies {
implementation project(':spark-common')
- implementation 'net.kyori:adventure-text-serializer-legacy:4.4.0'
+ implementation 'net.kyori:adventure-text-serializer-legacy:4.12.0'
compileOnly 'dev.waterdog.waterdogpe:waterdog:1.1.3-SNAPSHOT'
}
@@ -30,11 +30,8 @@ processResources {
shadowJar {
archiveName = "spark-${project.pluginVersion}-waterdog.jar"
- relocate 'okio', 'me.lucko.spark.lib.okio'
- relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3'
relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
- relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java
index 36e6a57..2207c9e 100644
--- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.waterdog;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import dev.waterdog.waterdogpe.ProxyServer;
import dev.waterdog.waterdogpe.plugin.Plugin;
diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
index 07b153a..1a64a98 100644
--- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
+++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java
@@ -24,7 +24,8 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import dev.waterdog.waterdogpe.ProxyServer;
import dev.waterdog.waterdogpe.command.Command;
@@ -32,6 +33,7 @@ import dev.waterdog.waterdogpe.command.CommandSender;
import dev.waterdog.waterdogpe.plugin.Plugin;
import java.nio.file.Path;
+import java.util.Collection;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -101,6 +103,16 @@ public class WaterdogSparkPlugin extends Plugin implements SparkPlugin {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ getProxy().getPluginManager().getPlugins(),
+ Plugin::getName,
+ plugin -> plugin.getDescription().getVersion(),
+ plugin -> plugin.getDescription().getAuthor()
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
return new WaterdogPlayerPingProvider(getProxy());
}