aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--spark-bukkit/build.gradle8
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java118
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java13
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java87
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java3
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java3
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java123
-rw-r--r--spark-bungeecord/build.gradle8
-rw-r--r--spark-common/build.gradle9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java19
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java15
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java95
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java48
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java97
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java)29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java55
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java110
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java220
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java15
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java191
-rw-r--r--spark-common/src/main/proto/spark/spark.proto27
-rw-r--r--spark-common/src/main/proto/spark/spark_sampler.proto2
-rw-r--r--spark-fabric/build.gradle2
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java57
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java144
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java36
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java36
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java36
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java36
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java183
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java12
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java25
-rw-r--r--spark-fabric/src/main/resources/fabric.mod.json3
-rw-r--r--spark-fabric/src/main/resources/spark.mixins.json14
-rw-r--r--spark-forge/build.gradle1
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java57
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java140
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java12
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java19
-rw-r--r--spark-forge/src/main/resources/META-INF/accesstransformer.cfg4
-rw-r--r--spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java8
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java32
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java87
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java36
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java88
55 files changed, 2072 insertions, 472 deletions
diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle
index 8e111e8..7144291 100644
--- a/spark-bukkit/build.gradle
+++ b/spark-bukkit/build.gradle
@@ -4,13 +4,7 @@ plugins {
dependencies {
implementation project(':spark-common')
- implementation('me.lucko:adventure-platform-bukkit:4.9.4') {
- exclude(module: 'adventure-api')
- exclude(module: 'checker-qual')
- exclude(module: 'annotations')
- exclude(module: 'adventure-text-serializer-gson')
- exclude(module: 'adventure-text-serializer-legacy')
- }
+ implementation 'net.kyori:adventure-platform-bukkit:4.1.1'
compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT'
// placeholders
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 953e171..4c587fb 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -25,11 +25,16 @@ import com.google.common.collect.ImmutableSet;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
import com.google.gson.JsonSerializer;
import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
-import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import org.bukkit.Bukkit;
+import org.bukkit.World;
import org.bukkit.configuration.MemorySection;
import org.bukkit.configuration.file.YamlConfiguration;
@@ -37,23 +42,19 @@ import co.aikar.timings.TimingsManager;
import java.io.BufferedReader;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-public class BukkitServerConfigProvider extends AbstractServerConfigProvider<BukkitServerConfigProvider.FileType> {
- private static final Gson GSON = new GsonBuilder()
- .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
- .create();
+public class BukkitServerConfigProvider extends AbstractServerConfigProvider {
/** A map of provided files and their type */
- private static final Map<String, FileType> FILES;
+ private static final Map<String, ConfigParser> FILES;
/** A collection of paths to be excluded from the files */
private static final Collection<String> HIDDEN_PATHS;
@@ -61,51 +62,89 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
super(FILES, HIDDEN_PATHS);
}
- @Override
- protected JsonElement load(String path, FileType type) throws IOException {
- Path filePath = Paths.get(path);
- if (!Files.exists(filePath)) {
- return null;
+ private static class YamlConfigParser implements ConfigParser {
+ public static final YamlConfigParser INSTANCE = new YamlConfigParser();
+ protected static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
+
+ return filter.apply(GSON.toJsonTree(values));
}
- try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
- Map<String, Object> values;
-
- if (type == FileType.PROPERTIES) {
- PropertiesFileReader propertiesReader = new PropertiesFileReader(reader);
- values = propertiesReader.readProperties();
- } else if (type == FileType.YAML) {
- YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
- values = config.getValues(false);
- } else {
- throw new IllegalArgumentException("Unknown file type: " + type);
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ return config.getValues(false);
+ }
+ }
+
+ // Paper 1.19+ split config layout
+ private static class SplitYamlConfigParser extends YamlConfigParser {
+ public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser();
+
+ @Override
+ public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException {
+ String prefix = group.replace("/", "");
+
+ Path configDir = Paths.get("config");
+ if (!Files.exists(configDir)) {
+ return null;
}
- return GSON.toJsonTree(values);
+ JsonObject root = new JsonObject();
+
+ for (Map.Entry<String, Path> entry : getNestedFiles(configDir, prefix).entrySet()) {
+ String fileName = entry.getKey();
+ Path path = entry.getValue();
+
+ Map<String, Object> values = this.parse(path);
+ if (values == null) {
+ continue;
+ }
+
+ // apply the filter individually to each nested file
+ root.add(fileName, filter.apply(GSON.toJsonTree(values)));
+ }
+
+ return root;
}
- }
- enum FileType {
- PROPERTIES,
- YAML
+ private static Map<String, Path> getNestedFiles(Path configDir, String prefix) {
+ Map<String, Path> files = new LinkedHashMap<>();
+ files.put("global.yml", configDir.resolve(prefix + "-global.yml"));
+ files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml"));
+ for (World world : Bukkit.getWorlds()) {
+ files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml"));
+ }
+ return files;
+ }
}
static {
- ImmutableMap.Builder<String, FileType> files = ImmutableMap.<String, FileType>builder()
- .put("server.properties", FileType.PROPERTIES)
- .put("bukkit.yml", FileType.YAML)
- .put("spigot.yml", FileType.YAML)
- .put("paper.yml", FileType.YAML)
- .put("purpur.yml", FileType.YAML);
+ ImmutableMap.Builder<String, ConfigParser> files = ImmutableMap.<String, ConfigParser>builder()
+ .put("server.properties", PropertiesConfigParser.INSTANCE)
+ .put("bukkit.yml", YamlConfigParser.INSTANCE)
+ .put("spigot.yml", YamlConfigParser.INSTANCE)
+ .put("paper.yml", YamlConfigParser.INSTANCE)
+ .put("paper/", SplitYamlConfigParser.INSTANCE)
+ .put("purpur.yml", YamlConfigParser.INSTANCE);
for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
- files.put(config, FileType.YAML);
+ files.put(config, YamlConfigParser.INSTANCE);
}
ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
.add("database")
.add("settings.bungeecord-addresses")
.add("settings.velocity-support.secret")
+ .add("proxies.velocity.secret")
.add("server-ip")
.add("motd")
.add("resource-pack")
@@ -120,13 +159,6 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
HIDDEN_PATHS = hiddenPaths.build();
}
- private static List<String> getSystemPropertyList(String property) {
- String value = System.getProperty(property);
- return value == null
- ? Collections.emptyList()
- : Arrays.asList(value.split(","));
- }
-
private static List<String> getTimingsHiddenConfigs() {
try {
return TimingsManager.hiddenConfigs;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index 9727277..fddd66b 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -28,6 +28,7 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -136,7 +137,12 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public void executeAsync(Runnable task) {
- getServer().getScheduler().runTaskAsynchronously(BukkitSparkPlugin.this, task);
+ getServer().getScheduler().runTaskAsynchronously(this, task);
+ }
+
+ @Override
+ public void executeSync(Runnable task) {
+ getServer().getScheduler().runTask(this, task);
}
@Override
@@ -188,6 +194,11 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new BukkitWorldInfoProvider(getServer());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
new file mode 100644
index 0000000..f34899b
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
@@ -0,0 +1,87 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.bukkit;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.bukkit.Chunk;
+import org.bukkit.Server;
+import org.bukkit.World;
+import org.bukkit.entity.Entity;
+import org.bukkit.entity.EntityType;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class BukkitWorldInfoProvider implements WorldInfoProvider {
+ private final Server server;
+
+ public BukkitWorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Result<BukkitChunkInfo> poll() {
+ Result<BukkitChunkInfo> data = new Result<>();
+
+ for (World world : this.server.getWorlds()) {
+ Chunk[] chunks = world.getLoadedChunks();
+
+ List<BukkitChunkInfo> list = new ArrayList<>(chunks.length);
+ for (Chunk chunk : chunks) {
+ list.add(new BukkitChunkInfo(chunk));
+ }
+
+ data.put(world.getName(), list);
+ }
+
+ return data;
+ }
+
+ static final class BukkitChunkInfo extends AbstractChunkInfo<EntityType> {
+ private final CountMap<EntityType> entityCounts;
+
+ BukkitChunkInfo(Chunk chunk) {
+ super(chunk.getX(), chunk.getZ());
+
+ this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class);
+ for (Entity entity : chunk.getEntities()) {
+ this.entityCounts.increment(entity.getType());
+ }
+ }
+
+ @Override
+ public CountMap<EntityType> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public String entityTypeName(EntityType type) {
+ return type.getName();
+ }
+
+ }
+
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
index 078d027..7fa6e02 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
@@ -22,6 +22,7 @@ package me.lucko.spark.bukkit.placeholder;
import me.lucko.spark.bukkit.BukkitSparkPlugin;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.util.SparkPlaceholder;
import be.maximvdw.placeholderapi.PlaceholderAPI;
import be.maximvdw.placeholderapi.PlaceholderReplaceEvent;
@@ -43,6 +44,6 @@ public class SparkMVdWPlaceholders implements PlaceholderReplacer {
}
String identifier = placeholder.substring("spark_".length());
- return SparkPlaceholderProvider.respond(this.platform, identifier);
+ return SparkPlaceholder.resolveFormattingCode(this.platform, identifier);
}
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
index 69dca72..b3919dd 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
@@ -23,6 +23,7 @@ package me.lucko.spark.bukkit.placeholder;
import me.clip.placeholderapi.expansion.PlaceholderExpansion;
import me.lucko.spark.bukkit.BukkitSparkPlugin;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.util.SparkPlaceholder;
import org.bukkit.OfflinePlayer;
import org.bukkit.entity.Player;
@@ -44,7 +45,7 @@ public class SparkPlaceholderApi extends PlaceholderExpansion {
@Override
public String onRequest(OfflinePlayer p, String params) {
- return SparkPlaceholderProvider.respond(this.platform, params);
+ return SparkPlaceholder.resolveFormattingCode(this.platform, params);
}
@Override
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
deleted file mode 100644
index 5b57857..0000000
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.bukkit.placeholder;
-
-import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.monitor.cpu.CpuMonitor;
-import me.lucko.spark.common.monitor.tick.TickStatistics;
-import me.lucko.spark.common.util.StatisticFormatter;
-
-import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.TextComponent;
-import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
-
-enum SparkPlaceholderProvider {
- ;
-
- public static TextComponent respondComponent(SparkPlatform platform, String placeholder) {
- if (placeholder.startsWith("tps")) {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null) {
- return null;
- }
-
- switch (placeholder) {
- case "tps":
- return Component.text()
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
- .build();
- case "tps_5s":
- return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
- case "tps_10s":
- return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
- case "tps_1m":
- return StatisticFormatter.formatTps(tickStatistics.tps1Min());
- case "tps_5m":
- return StatisticFormatter.formatTps(tickStatistics.tps5Min());
- case "tps_15m":
- return StatisticFormatter.formatTps(tickStatistics.tps15Min());
- }
- }
-
- if (placeholder.startsWith("tickduration")) {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
- return null;
- }
-
- switch (placeholder) {
- case "tickduration":
- return Component.text()
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
- .build();
- case "tickduration_10s":
- return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
- case "tickduration_1m":
- return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
- }
- }
-
- if (placeholder.startsWith("cpu")) {
- switch (placeholder) {
- case "cpu_system":
- return Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
- .build();
- case "cpu_system_10s":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
- case "cpu_system_1m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
- case "cpu_system_15m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
- case "cpu_process":
- return Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
- .build();
- case "cpu_process_10s":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
- case "cpu_process_1m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
- case "cpu_process_15m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
- }
- }
-
- return null;
- }
-
- public static String respond(SparkPlatform platform, String placeholder) {
- TextComponent result = respondComponent(platform, placeholder);
- if (result == null) {
- return null;
- }
- return LegacyComponentSerializer.legacySection().serialize(result);
- }
-
-}
diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle
index ccea89d..1e92621 100644
--- a/spark-bungeecord/build.gradle
+++ b/spark-bungeecord/build.gradle
@@ -4,13 +4,7 @@ plugins {
dependencies {
implementation project(':spark-common')
- implementation('me.lucko:adventure-platform-bungeecord:4.9.4') {
- exclude(module: 'adventure-api')
- exclude(module: 'checker-qual')
- exclude(module: 'annotations')
- exclude(module: 'adventure-text-serializer-gson')
- exclude(module: 'adventure-text-serializer-legacy')
- }
+ implementation 'net.kyori:adventure-platform-bungeecord:4.1.1'
compileOnly 'net.md-5:bungeecord-api:1.16-R0.4'
}
diff --git a/spark-common/build.gradle b/spark-common/build.gradle
index 554eec2..bc493f3 100644
--- a/spark-common/build.gradle
+++ b/spark-common/build.gradle
@@ -15,15 +15,18 @@ dependencies {
implementation 'com.squareup.okio:okio:1.17.3'
implementation 'net.bytebuddy:byte-buddy-agent:1.11.0'
implementation 'org.tukaani:xz:1.8'
- api('net.kyori:adventure-api:4.9.3') {
+ api('net.kyori:adventure-api:4.11.0') {
+ exclude(module: 'adventure-bom')
exclude(module: 'checker-qual')
exclude(module: 'annotations')
}
- api('net.kyori:adventure-text-serializer-gson:4.9.3') {
+ api('net.kyori:adventure-text-serializer-gson:4.11.0') {
+ exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
exclude(module: 'gson')
}
- api('net.kyori:adventure-text-serializer-legacy:4.9.3') {
+ api('net.kyori:adventure-text-serializer-legacy:4.11.0') {
+ exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
}
implementation('net.kyori:adventure-text-feature-pagination:4.0.0-SNAPSHOT') {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index b817df1..1116b04 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -75,6 +76,15 @@ public interface SparkPlugin {
void executeAsync(Runnable task);
/**
+ * Executes the given {@link Runnable} on the server/client main thread.
+ *
+ * @param task the task
+ */
+ default void executeSync(Runnable task) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
* Print to the plugin logger.
*
* @param level the log level
@@ -143,6 +153,15 @@ public interface SparkPlugin {
}
/**
+ * Creates a world info provider.
+ *
+ * @return the world info provider function
+ */
+ default WorldInfoProvider createWorldInfoProvider() {
+ return WorldInfoProvider.NO_OP;
+ }
+
+ /**
* Gets information for the platform.
*
* @return information about the platform
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
index b777f3e..6252ac7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
@@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class ActivityLogModule implements CommandModule, RowRenderer<Activity> {
private final Pagination.Builder pagination = Pagination.builder()
+ .width(45)
.renderer(new Renderer() {
@Override
public Component renderEmpty() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 970d062..fd5cd67 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -266,7 +266,7 @@ public class SamplerModule implements CommandModule {
if (this.activeSampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- long timeout = this.activeSampler.getEndTime();
+ long timeout = this.activeSampler.getAutoEndTime();
if (timeout == -1) {
resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout."));
} else {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index f35bbbe..49cfed5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
import me.lucko.spark.common.monitor.net.NetworkMonitor;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldStatisticsProvider;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
@@ -182,6 +185,18 @@ public class PlatformStatisticsProvider {
builder.setPlayerCount(playerCount);
}
+ try {
+ WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider();
+ WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo);
+ WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics();
+ if (worldStatistics != null) {
+ builder.setWorld(worldStatistics);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+
return builder.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
index ead2131..559ae95 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -22,45 +22,39 @@ package me.lucko.spark.common.platform.serverconfig;
import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
-import java.util.Deque;
-import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
-import java.util.stream.Collectors;
/**
* Abstract implementation of {@link ServerConfigProvider}.
*
* <p>This implementation is able to delete hidden paths from
* the configurations before they are sent to the viewer.</p>
- *
- * @param <T> the file type
*/
-public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
- private final Map<String, T> files;
- private final Collection<String> hiddenPaths;
+public abstract class AbstractServerConfigProvider implements ServerConfigProvider {
+ private final Map<String, ConfigParser> files;
+ private final ExcludedConfigFilter hiddenPathFilters;
- protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
+ protected AbstractServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
this.files = files;
- this.hiddenPaths = hiddenPaths;
+ this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths);
}
@Override
public final Map<String, JsonElement> loadServerConfigurations() {
ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
- this.files.forEach((path, type) -> {
+ this.files.forEach((path, parser) -> {
try {
- JsonElement json = load(path, type);
- if (json != null) {
- delete(json, this.hiddenPaths);
- builder.put(path, json);
+ JsonElement json = parser.load(path, this.hiddenPathFilters);
+ if (json == null) {
+ return;
}
+ builder.put(path, json);
} catch (Exception e) {
e.printStackTrace();
}
@@ -69,68 +63,11 @@ public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements
return builder.build();
}
- /**
- * Loads a file from the system.
- *
- * @param path the name of the file to load
- * @param type the type of the file
- * @return the loaded file
- * @throws IOException if an error occurs performing i/o
- */
- protected abstract JsonElement load(String path, T type) throws IOException;
-
- /**
- * Deletes the given paths from the json element.
- *
- * @param json the json element
- * @param paths the paths to delete
- */
- private static void delete(JsonElement json, Collection<String> paths) {
- for (String path : paths) {
- Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
- delete(json, pathDeque);
- }
- }
-
- private static void delete(JsonElement json, Deque<String> path) {
- if (path.isEmpty()) {
- return;
- }
- if (!json.isJsonObject()) {
- return;
- }
-
- JsonObject jsonObject = json.getAsJsonObject();
- String expected = path.removeFirst().replace("<dot>", ".");
-
- Collection<String> keys;
- if (expected.equals("*")) {
- keys = jsonObject.entrySet().stream()
- .map(Map.Entry::getKey)
- .collect(Collectors.toList());
- } else if (expected.endsWith("*")) {
- String pattern = expected.substring(0, expected.length() - 1);
- keys = jsonObject.entrySet().stream()
- .map(Map.Entry::getKey)
- .filter(key -> key.startsWith(pattern))
- .collect(Collectors.toList());
- } else if (jsonObject.has(expected)) {
- keys = Collections.singletonList(expected);
- } else {
- keys = Collections.emptyList();
- }
-
- for (String key : keys) {
- if (path.isEmpty()) {
- jsonObject.remove(key);
- } else {
- Deque<String> pathCopy = keys.size() > 1
- ? new LinkedList<>(path)
- : path;
-
- delete(jsonObject.get(key), pathCopy);
- }
- }
+ protected static List<String> getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
new file mode 100644
index 0000000..675a32e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
@@ -0,0 +1,48 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+
+public interface ConfigParser {
+
+ JsonElement load(String file, ExcludedConfigFilter filter) throws IOException;
+
+ default Map<String, Object> parse(Path file) throws IOException {
+ if (!Files.exists(file)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) {
+ return this.parse(reader);
+ }
+ }
+
+ Map<String, Object> parse(BufferedReader reader) throws IOException;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
new file mode 100644
index 0000000..c11c7f8
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
@@ -0,0 +1,97 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Filtered excluded paths from {@link JsonElement}s (parsed configuration files).
+ */
+public class ExcludedConfigFilter {
+ private final Collection<String> pathsToExclude;
+
+ public ExcludedConfigFilter(Collection<String> pathsToExclude) {
+ this.pathsToExclude = pathsToExclude;
+ }
+
+ /**
+ * Deletes the excluded paths from the json element.
+ *
+ * @param json the json element
+ */
+ public JsonElement apply(JsonElement json) {
+ for (String path : this.pathsToExclude) {
+ Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
+ delete(json, pathDeque);
+ }
+ return json;
+ }
+
+ private static void delete(JsonElement json, Deque<String> path) {
+ if (path.isEmpty()) {
+ return;
+ }
+ if (!json.isJsonObject()) {
+ return;
+ }
+
+ JsonObject jsonObject = json.getAsJsonObject();
+ String expected = path.removeFirst().replace("<dot>", ".");
+
+ Collection<String> keys;
+ if (expected.equals("*")) {
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .collect(Collectors.toList());
+ } else if (expected.endsWith("*")) {
+ String pattern = expected.substring(0, expected.length() - 1);
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .filter(key -> key.startsWith(pattern))
+ .collect(Collectors.toList());
+ } else if (jsonObject.has(expected)) {
+ keys = Collections.singletonList(expected);
+ } else {
+ keys = Collections.emptyList();
+ }
+
+ for (String key : keys) {
+ if (path.isEmpty()) {
+ jsonObject.remove(key);
+ } else {
+ Deque<String> pathCopy = keys.size() > 1
+ ? new LinkedList<>(path)
+ : path;
+
+ delete(jsonObject.get(key), pathCopy);
+ }
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
index 8fc89d7..344ba1c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
@@ -20,25 +20,38 @@
package me.lucko.spark.common.platform.serverconfig;
-import java.io.FilterReader;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
import java.io.IOException;
-import java.io.Reader;
+import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
- * A {@link Reader} that can parse a .properties file.
+ * A {@link ConfigParser} that can parse a .properties file.
*/
-public class PropertiesFileReader extends FilterReader {
+public enum PropertiesConfigParser implements ConfigParser {
+ INSTANCE;
+
+ private static final Gson GSON = new Gson();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
- public PropertiesFileReader(Reader in) {
- super(in);
+ return filter.apply(GSON.toJsonTree(values));
}
- public Map<String, Object> readProperties() throws IOException {
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
Properties properties = new Properties();
- properties.load(this);
+ properties.load(reader);
Map<String, Object> values = new HashMap<>();
properties.forEach((k, v) -> {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
index 1fc2391..c66305f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -23,8 +23,8 @@ package me.lucko.spark.common.platform.serverconfig;
import com.google.gson.JsonElement;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.Map;
-import java.util.stream.Collectors;
/**
* Function to export server configuration files for access within the spark viewer.
@@ -43,12 +43,9 @@ public interface ServerConfigProvider {
Map<String, JsonElement> loadServerConfigurations();
default Map<String, String> exportServerConfigurations() {
- return loadServerConfigurations().entrySet()
- .stream()
- .collect(Collectors.toMap(
- Map.Entry::getKey,
- e -> e.getValue().toString()
- ));
+ Map<String, String> map = new LinkedHashMap<>();
+ loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString()));
+ return map;
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
new file mode 100644
index 0000000..80026cd
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
@@ -0,0 +1,55 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+public abstract class AbstractChunkInfo<E> implements ChunkInfo<E> {
+ private final int x;
+ private final int z;
+
+ protected AbstractChunkInfo(int x, int z) {
+ this.x = x;
+ this.z = z;
+ }
+
+ @Override
+ public int getX() {
+ return this.x;
+ }
+
+ @Override
+ public int getZ() {
+ return this.z;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof AbstractChunkInfo)) return false;
+ AbstractChunkInfo<?> that = (AbstractChunkInfo<?>) obj;
+ return this.x == that.x && this.z == that.z;
+ }
+
+ @Override
+ public int hashCode() {
+ return this.x ^ this.z;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
new file mode 100644
index 0000000..2193a50
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+/**
+ * Information about a given chunk.
+ *
+ * @param <E> the type used to describe entities
+ */
+public interface ChunkInfo<E> {
+
+ int getX();
+
+ int getZ();
+
+ CountMap<E> getEntityCounts();
+
+ /**
+ * Converts entity type {@link E} to a string.
+ *
+ * @param type the entity type
+ * @return a string
+ */
+ String entityTypeName(E type);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
new file mode 100644
index 0000000..3083266
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
@@ -0,0 +1,110 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A map of (key) -> count.
+ *
+ * @param <T> the key type
+ */
+public interface CountMap<T> {
+
+ /**
+ * Increment the counter for the given key
+ *
+ * @param key the key
+ */
+ void increment(T key);
+
+ /**
+ * Add to the counter for the given key
+ *
+ * @param key the key
+ */
+ void add(T key, int delta);
+
+ AtomicInteger total();
+
+ Map<T, AtomicInteger> asMap();
+
+ /**
+ * A simple {@link CountMap} backed by the provided {@link Map}
+ *
+ * @param <T> the key type
+ */
+ class Simple<T> implements CountMap<T> {
+ private final Map<T, AtomicInteger> counts;
+ private final AtomicInteger total;
+
+ public Simple(Map<T, AtomicInteger> counts) {
+ this.counts = counts;
+ this.total = new AtomicInteger();
+ }
+
+ @Override
+ public void increment(T key) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ @Override
+ public void add(T key, int delta) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.addAndGet(delta);
+ this.total.addAndGet(delta);
+ }
+
+ @Override
+ public AtomicInteger total() {
+ return this.total;
+ }
+
+ @Override
+ public Map<T, AtomicInteger> asMap() {
+ return this.counts;
+ }
+ }
+
+ /**
+ * A {@link CountMap} backed by an {@link EnumMap}.
+ *
+ * @param <T> the key type - must be an enum
+ */
+ class EnumKeyed<T extends Enum<T>> extends Simple<T> {
+ public EnumKeyed(Class<T> keyClass) {
+ super(new EnumMap<>(keyClass));
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
new file mode 100644
index 0000000..9494816
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Provides information about worlds.
+ */
+public interface WorldInfoProvider {
+
+ WorldInfoProvider NO_OP = () -> null;
+
+ /**
+ * Polls for information.
+ *
+ * @return the information
+ */
+ Result<? extends ChunkInfo<?>> poll();
+
+ default boolean mustCallSync() {
+ return true;
+ }
+
+ final class Result<T> {
+ private final Map<String, List<T>> worlds = new HashMap<>();
+
+ public void put(String worldName, List<T> chunks) {
+ this.worlds.put(worldName, chunks);
+ }
+
+ public Map<String, List<T>> getWorlds() {
+ return this.worlds;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
new file mode 100644
index 0000000..80c35a6
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
@@ -0,0 +1,220 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+
+public class WorldStatisticsProvider {
+ private final SparkPlatform platform;
+ private final WorldInfoProvider provider;
+
+ public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) {
+ this.platform = platform;
+ this.provider = provider;
+ }
+
+ public WorldStatistics getWorldStatistics() {
+ if (this.provider == WorldInfoProvider.NO_OP) {
+ return null;
+ }
+
+ CompletableFuture<WorldInfoProvider.Result<? extends ChunkInfo<?>>> future;
+
+ if (this.provider.mustCallSync()) {
+ SparkPlugin plugin = this.platform.getPlugin();
+ future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync);
+ } else {
+ future = CompletableFuture.completedFuture(this.provider.poll());
+ }
+
+ WorldInfoProvider.Result<? extends ChunkInfo<?>> result;
+ try {
+ result = future.get(5, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException(e);
+ } catch (TimeoutException e) {
+ this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
+ return null;
+ }
+
+ if (result == null) {
+ return null;
+ }
+
+ WorldStatistics.Builder stats = WorldStatistics.newBuilder();
+
+ AtomicInteger combinedTotal = new AtomicInteger();
+ CountMap<String> combined = new CountMap.Simple<>(new HashMap<>());
+
+ result.getWorlds().forEach((worldName, chunks) -> {
+ WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder();
+ builder.setName(worldName);
+
+ List<Region> regions = groupIntoRegions(chunks);
+
+ int total = 0;
+
+ for (Region region : regions) {
+ builder.addRegions(regionToProto(region, combined));
+ total += region.getTotalEntities().get();
+ }
+
+ builder.setTotalEntities(total);
+ combinedTotal.addAndGet(total);
+
+ stats.addWorlds(builder.build());
+ });
+
+ stats.setTotalEntities(combinedTotal.get());
+ combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get()));
+
+ return stats.build();
+ }
+
+ private static WorldStatistics.Region regionToProto(Region region, CountMap<String> combined) {
+ WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder();
+ builder.setTotalEntities(region.getTotalEntities().get());
+ for (ChunkInfo<?> chunk : region.getChunks()) {
+ builder.addChunks(chunkToProto(chunk, combined));
+ }
+ return builder.build();
+ }
+
+ private static <E> WorldStatistics.Chunk chunkToProto(ChunkInfo<E> chunk, CountMap<String> combined) {
+ WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder();
+ builder.setX(chunk.getX());
+ builder.setZ(chunk.getZ());
+ builder.setTotalEntities(chunk.getEntityCounts().total().get());
+ chunk.getEntityCounts().asMap().forEach((key, value) -> {
+ String name = chunk.entityTypeName(key);
+ int count = value.get();
+
+ if (name == null) {
+ name = "unknown[" + key.toString() + "]";
+ }
+
+ builder.putEntityCounts(name, count);
+ combined.add(name, count);
+ });
+ return builder.build();
+ }
+
+ private static List<Region> groupIntoRegions(List<? extends ChunkInfo<?>> chunks) {
+ List<Region> regions = new ArrayList<>();
+
+ for (ChunkInfo<?> chunk : chunks) {
+ CountMap<?> counts = chunk.getEntityCounts();
+ if (counts.total().get() == 0) {
+ continue;
+ }
+
+ boolean found = false;
+
+ for (Region region : regions) {
+ if (region.isAdjacent(chunk)) {
+ found = true;
+ region.add(chunk);
+
+ // if the chunk is adjacent to more than one region, merge the regions together
+ for (Iterator<Region> iterator = regions.iterator(); iterator.hasNext(); ) {
+ Region otherRegion = iterator.next();
+ if (region != otherRegion && otherRegion.isAdjacent(chunk)) {
+ iterator.remove();
+ region.merge(otherRegion);
+ }
+ }
+
+ break;
+ }
+ }
+
+ if (!found) {
+ regions.add(new Region(chunk));
+ }
+ }
+
+ return regions;
+ }
+
+ /**
+ * A map of nearby chunks grouped together by Euclidean distance.
+ */
+ private static final class Region {
+ private static final int DISTANCE_THRESHOLD = 2;
+ private final Set<ChunkInfo<?>> chunks;
+ private final AtomicInteger totalEntities;
+
+ private Region(ChunkInfo<?> initial) {
+ this.chunks = new HashSet<>();
+ this.chunks.add(initial);
+ this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get());
+ }
+
+ public Set<ChunkInfo<?>> getChunks() {
+ return this.chunks;
+ }
+
+ public AtomicInteger getTotalEntities() {
+ return this.totalEntities;
+ }
+
+ public boolean isAdjacent(ChunkInfo<?> chunk) {
+ for (ChunkInfo<?> el : this.chunks) {
+ if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public void add(ChunkInfo<?> chunk) {
+ this.chunks.add(chunk);
+ this.totalEntities.addAndGet(chunk.getEntityCounts().total().get());
+ }
+
+ public void merge(Region group) {
+ this.chunks.addAll(group.getChunks());
+ this.totalEntities.addAndGet(group.getTotalEntities().get());
+ }
+
+ private static long squaredEuclideanDistance(ChunkInfo<?> a, ChunkInfo<?> b) {
+ long dx = a.getX() - b.getX();
+ long dz = a.getZ() - b.getZ();
+ return (dx * dx) + (dz * dz);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index ce466a0..1c217db 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -27,6 +27,7 @@ import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
@@ -41,6 +42,9 @@ import java.util.concurrent.CompletableFuture;
*/
public abstract class AbstractSampler implements Sampler {
+ /** The spark platform instance */
+ protected final SparkPlatform platform;
+
/** The interval to wait between sampling, in microseconds */
protected final int interval;
@@ -50,8 +54,11 @@ public abstract class AbstractSampler implements Sampler {
/** The time when sampling first began */
protected long startTime = -1;
+ /** The game tick when sampling first began */
+ protected int startTick = -1;
+
/** The unix timestamp (in millis) when this sampler should automatically complete. */
- protected final long endTime; // -1 for nothing
+ protected final long autoEndTime; // -1 for nothing
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture<Sampler> future = new CompletableFuture<>();
@@ -59,10 +66,11 @@ public abstract class AbstractSampler implements Sampler {
/** The garbage collector statistics when profiling started */
protected Map<String, GarbageCollectorStatistics> initialGcStats;
- protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
+ protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) {
+ this.platform = platform;
this.interval = interval;
this.threadDumper = threadDumper;
- this.endTime = endTime;
+ this.autoEndTime = autoEndTime;
}
@Override
@@ -74,8 +82,8 @@ public abstract class AbstractSampler implements Sampler {
}
@Override
- public long getEndTime() {
- return this.endTime;
+ public long getAutoEndTime() {
+ return this.autoEndTime;
}
@Override
@@ -91,6 +99,16 @@ public abstract class AbstractSampler implements Sampler {
return this.initialGcStats;
}
+ @Override
+ public void start() {
+ this.startTime = System.currentTimeMillis();
+
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ this.startTick = tickHook.getCurrentTick();
+ }
+ }
+
protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
@@ -105,6 +123,14 @@ public abstract class AbstractSampler implements Sampler {
metadata.setComment(comment);
}
+ if (this.startTick != -1) {
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ int numberOfTicks = tickHook.getCurrentTick() - this.startTick;
+ metadata.setNumberOfTicks(numberOfTicks);
+ }
+ }
+
try {
metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
} catch (Exception e) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 845043f..84f2da1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -57,7 +57,7 @@ public interface Sampler {
*
* @return the end time, or -1 if undefined
*/
- long getEndTime();
+ long getAutoEndTime();
/**
* Gets a future to encapsulate the completion of the sampler
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index 88cf018..88b9d91 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -97,11 +97,11 @@ public class SamplerBuilder {
Sampler sampler;
if (this.ticksOver != -1 && this.tickHook != null) {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
+ sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
} else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) {
- sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout);
+ sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout);
} else {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative);
+ sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative);
}
sampler.start();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 5cb7fdc..d8288da 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -65,8 +65,8 @@ public class AsyncSampler extends AbstractSampler {
/** The executor used for timeouts */
private ScheduledExecutorService timeoutExecutor;
- public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) {
- super(interval, threadDumper, endTime);
+ public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) {
+ super(platform, interval, threadDumper, endTime);
this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler();
this.dataAggregator = new AsyncDataAggregator(threadGrouper);
}
@@ -90,7 +90,7 @@ public class AsyncSampler extends AbstractSampler {
*/
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
try {
this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp");
@@ -120,11 +120,11 @@ public class AsyncSampler extends AbstractSampler {
}
private void scheduleTimeout() {
- if (this.endTime == -1) {
+ if (this.autoEndTime == -1) {
return;
}
- long delay = this.endTime - System.currentTimeMillis();
+ long delay = this.autoEndTime - System.currentTimeMillis();
if (delay <= 0) {
return;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index cfa0a0f..913faee 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -63,19 +63,19 @@ public class JavaSampler extends AbstractSampler implements Runnable {
/** Responsible for aggregating and then outputting collected sampling data */
private final JavaDataAggregator dataAggregator;
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) {
- super(interval, threadDumper, endTime);
+ public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) {
+ super(platform, interval, threadDumper, endTime);
this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative);
}
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
- super(interval, threadDumper, endTime);
+ public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
+ super(platform, interval, threadDumper, endTime);
this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold);
}
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS);
}
@@ -89,7 +89,7 @@ public class JavaSampler extends AbstractSampler implements Runnable {
// this is effectively synchronized, the worker pool will not allow this task
// to concurrently execute.
try {
- if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) {
+ if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) {
this.future.complete(this);
stop();
return;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index e817828..e062f31 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -31,6 +31,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
/**
* Implementation of {@link DataAggregator} which supports only including sampling data from "ticks"
@@ -47,6 +48,9 @@ public class TickedDataAggregator extends JavaDataAggregator {
/** The expected number of samples in each tick */
private final int expectedSize;
+ /** The number of ticks aggregated so far */
+ private final AtomicInteger numberOfTicks = new AtomicInteger();
+
private final Object mutex = new Object();
// state
@@ -64,10 +68,16 @@ public class TickedDataAggregator extends JavaDataAggregator {
@Override
public SamplerMetadata.DataAggregator getMetadata() {
+ // push the current tick (so numberOfTicks is accurate)
+ synchronized (this.mutex) {
+ pushCurrentTick();
+ }
+
return SamplerMetadata.DataAggregator.newBuilder()
.setType(SamplerMetadata.DataAggregator.Type.TICKED)
.setThreadGrouper(this.threadGrouper.asProto())
.setTickLengthThreshold(this.tickLengthThreshold)
+ .setNumberOfIncludedTicks(this.numberOfTicks.get())
.build();
}
@@ -97,6 +107,7 @@ public class TickedDataAggregator extends JavaDataAggregator {
return;
}
+ this.numberOfTicks.incrementAndGet();
this.workerPool.submit(currentData);
}
@@ -110,6 +121,10 @@ public class TickedDataAggregator extends JavaDataAggregator {
return super.exportData();
}
+ public int getNumberOfTicks() {
+ return this.numberOfTicks.get();
+ }
+
private final class TickList implements Runnable {
private final List<ThreadInfo> list;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
new file mode 100644
index 0000000..be5bbc2
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
@@ -0,0 +1,191 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
+
+import java.util.Locale;
+import java.util.function.BiFunction;
+
+public enum SparkPlaceholder {
+
+ TPS((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "5s":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
+ case "10s":
+ return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
+ case "1m":
+ return StatisticFormatter.formatTps(tickStatistics.tps1Min());
+ case "5m":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Min());
+ case "15m":
+ return StatisticFormatter.formatTps(tickStatistics.tps15Min());
+ }
+
+ return null;
+ }),
+
+ TICKDURATION((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
+ case "1m":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
+ }
+
+ return null;
+ }),
+
+ CPU_SYSTEM((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
+ }
+
+ return null;
+ }),
+
+ CPU_PROCESS((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
+ }
+
+ return null;
+ });
+
+ private final String name;
+ private final BiFunction<SparkPlatform, String, TextComponent> function;
+
+ SparkPlaceholder(BiFunction<SparkPlatform, String, TextComponent> function) {
+ this.name = name().toLowerCase(Locale.ROOT);
+ this.function = function;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public TextComponent resolve(SparkPlatform platform, String arg) {
+ return this.function.apply(platform, arg);
+ }
+
+ public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) {
+ String[] parts = placeholder.split("_");
+
+ if (parts.length == 0) {
+ return null;
+ }
+
+ String label = parts[0];
+
+ if (label.equals("tps")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TPS.resolve(platform, arg);
+ }
+
+ if (label.equals("tickduration")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TICKDURATION.resolve(platform, arg);
+ }
+
+ if (label.equals("cpu") && parts.length >= 2) {
+ String type = parts[1];
+ String arg = parts.length < 3 ? null : parts[2];
+
+ if (type.equals("system")) {
+ return CPU_SYSTEM.resolve(platform, arg);
+ }
+ if (type.equals("process")) {
+ return CPU_PROCESS.resolve(platform, arg);
+ }
+ }
+
+ return null;
+ }
+
+ public static String resolveFormattingCode(SparkPlatform platform, String placeholder) {
+ TextComponent result = resolveComponent(platform, placeholder);
+ if (result == null) {
+ return null;
+ }
+ return LegacyComponentSerializer.legacySection().serialize(result);
+ }
+
+}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index ec0aa88..2ea341f 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -94,7 +94,8 @@ message PlatformStatistics {
Tps tps = 4; // optional
Mspt mspt = 5; // optional
Ping ping = 6; // optional
- int64 player_count = 7;
+ int64 player_count = 7; // optional
+ WorldStatistics world = 8; // optional
message Memory {
MemoryPool heap = 1;
@@ -127,6 +128,30 @@ message PlatformStatistics {
}
}
+message WorldStatistics {
+ int32 total_entities = 1;
+ map<string, int32> entity_counts = 2;
+ repeated World worlds = 3;
+
+ message World {
+ string name = 1;
+ int32 total_entities = 2;
+ repeated Region regions = 3;
+ }
+
+ message Region {
+ int32 total_entities = 1;
+ repeated Chunk chunks = 2;
+ }
+
+ message Chunk {
+ int32 x = 1;
+ int32 z = 2;
+ int32 total_entities = 3;
+ map<string, int32> entity_counts = 4;
+ }
+}
+
message RollingAverageValues {
double mean = 1;
double max = 2;
diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto
index 51bdd64..8d9512a 100644
--- a/spark-common/src/main/proto/spark/spark_sampler.proto
+++ b/spark-common/src/main/proto/spark/spark_sampler.proto
@@ -25,6 +25,7 @@ message SamplerMetadata {
SystemStatistics system_statistics = 9;
map<string, string> server_configurations = 10;
int64 end_time = 11;
+ int32 number_of_ticks = 12;
message ThreadDumper {
Type type = 1;
@@ -42,6 +43,7 @@ message SamplerMetadata {
Type type = 1;
ThreadGrouper thread_grouper = 2;
int64 tick_length_threshold = 3; // optional
+ int32 number_of_included_ticks = 4; // optional
enum Type {
SIMPLE = 0;
diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle
index 9da8e01..31008bb 100644
--- a/spark-fabric/build.gradle
+++ b/spark-fabric/build.gradle
@@ -45,7 +45,7 @@ dependencies {
include(modImplementation('me.lucko:fabric-permissions-api:0.1-SNAPSHOT'))
- modImplementation('eu.pb4:placeholder-api:1.1.1+1.17.1')
+ modImplementation('eu.pb4:placeholder-api:2.0.0-beta.4+1.19')
shade project(':spark-common')
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java
new file mode 100644
index 0000000..18079d3
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+
+import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+
+import java.util.Collection;
+import java.util.Map;
+
+public class FabricServerConfigProvider extends AbstractServerConfigProvider {
+
+ /** A map of provided files and their type */
+ private static final Map<String, ConfigParser> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public FabricServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ static {
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("level-seed")
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE);
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
new file mode 100644
index 0000000..f2f7b96
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
@@ -0,0 +1,144 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric;
+
+import it.unimi.dsi.fastutil.longs.LongIterator;
+import it.unimi.dsi.fastutil.longs.LongSet;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.fabric.mixin.ClientEntityManagerAccessor;
+import me.lucko.spark.fabric.mixin.ClientWorldAccessor;
+import me.lucko.spark.fabric.mixin.ServerEntityManagerAccessor;
+import me.lucko.spark.fabric.mixin.ServerWorldAccessor;
+
+import net.minecraft.client.MinecraftClient;
+import net.minecraft.client.world.ClientEntityManager;
+import net.minecraft.client.world.ClientWorld;
+import net.minecraft.entity.Entity;
+import net.minecraft.entity.EntityType;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.server.world.ServerWorld;
+import net.minecraft.util.math.ChunkPos;
+import net.minecraft.world.entity.EntityTrackingSection;
+import net.minecraft.world.entity.SectionedEntityCache;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Stream;
+
+public abstract class FabricWorldInfoProvider implements WorldInfoProvider {
+
+ protected List<FabricChunkInfo> getChunksFromCache(SectionedEntityCache<Entity> cache) {
+ LongSet loadedChunks = cache.getChunkPositions();
+ List<FabricChunkInfo> list = new ArrayList<>(loadedChunks.size());
+
+ for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) {
+ long chunkPos = iterator.nextLong();
+ Stream<EntityTrackingSection<Entity>> sections = cache.getTrackingSections(chunkPos);
+
+ list.add(new FabricChunkInfo(chunkPos, sections));
+ }
+
+ return list;
+ }
+
+ public static final class Server extends FabricWorldInfoProvider {
+ private final MinecraftServer server;
+
+ public Server(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Result<FabricChunkInfo> poll() {
+ Result<FabricChunkInfo> data = new Result<>();
+
+ for (ServerWorld world : this.server.getWorlds()) {
+ ServerEntityManager<Entity> entityManager = ((ServerWorldAccessor) world).getEntityManager();
+ SectionedEntityCache<Entity> cache = ((ServerEntityManagerAccessor) entityManager).getCache();
+
+ List<FabricChunkInfo> list = getChunksFromCache(cache);
+ data.put(world.getRegistryKey().getValue().getPath(), list);
+ }
+
+ return data;
+ }
+ }
+
+ public static final class Client extends FabricWorldInfoProvider {
+ private final MinecraftClient client;
+
+ public Client(MinecraftClient client) {
+ this.client = client;
+ }
+
+ @Override
+ public Result<FabricChunkInfo> poll() {
+ Result<FabricChunkInfo> data = new Result<>();
+
+ ClientWorld world = this.client.world;
+ if (world == null) {
+ return null;
+ }
+
+ ClientEntityManager<Entity> entityManager = ((ClientWorldAccessor) world).getEntityManager();
+ SectionedEntityCache<Entity> cache = ((ClientEntityManagerAccessor) entityManager).getCache();
+
+ List<FabricChunkInfo> list = getChunksFromCache(cache);
+ data.put(world.getRegistryKey().getValue().getPath(), list);
+
+ return data;
+ }
+ }
+
+ static final class FabricChunkInfo extends AbstractChunkInfo<EntityType<?>> {
+ private final CountMap<EntityType<?>> entityCounts;
+
+ FabricChunkInfo(long chunkPos, Stream<EntityTrackingSection<Entity>> entities) {
+ super(ChunkPos.getPackedX(chunkPos), ChunkPos.getPackedZ(chunkPos));
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ entities.forEach(section -> {
+ if (section.getStatus().shouldTrack()) {
+ section.stream().forEach(entity ->
+ this.entityCounts.increment(entity.getType())
+ );
+ }
+ });
+ }
+
+ @Override
+ public CountMap<EntityType<?>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType<?> type) {
+ return EntityType.getId(type).toString();
+ }
+ }
+
+}
+
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
new file mode 100644
index 0000000..88c9521
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.client.world.ClientEntityManager;
+import net.minecraft.entity.Entity;
+import net.minecraft.world.entity.SectionedEntityCache;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ClientEntityManager.class)
+public interface ClientEntityManagerAccessor {
+
+ @Accessor
+ SectionedEntityCache<Entity> getCache();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java
new file mode 100644
index 0000000..01562ef
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.client.world.ClientEntityManager;
+import net.minecraft.client.world.ClientWorld;
+import net.minecraft.entity.Entity;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ClientWorld.class)
+public interface ClientWorldAccessor {
+
+ @Accessor
+ ClientEntityManager<Entity> getEntityManager();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
new file mode 100644
index 0000000..160a12b
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.entity.Entity;
+import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.world.entity.SectionedEntityCache;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ServerEntityManager.class)
+public interface ServerEntityManagerAccessor {
+
+ @Accessor
+ SectionedEntityCache<Entity> getCache();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java
new file mode 100644
index 0000000..cf2e7e8
--- /dev/null
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.fabric.mixin;
+
+import net.minecraft.entity.Entity;
+import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.server.world.ServerWorld;
+
+import org.spongepowered.asm.mixin.Mixin;
+import org.spongepowered.asm.mixin.gen.Accessor;
+
+@Mixin(ServerWorld.class)
+public interface ServerWorldAccessor {
+
+ @Accessor
+ ServerEntityManager<Entity> getEntityManager();
+
+}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
index dc2e7d9..69303e3 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/placeholder/SparkFabricPlaceholderApi.java
@@ -20,169 +20,48 @@
package me.lucko.spark.fabric.placeholder;
-import eu.pb4.placeholders.PlaceholderAPI;
-import eu.pb4.placeholders.PlaceholderResult;
+import eu.pb4.placeholders.api.PlaceholderContext;
+import eu.pb4.placeholders.api.PlaceholderHandler;
+import eu.pb4.placeholders.api.PlaceholderResult;
+import eu.pb4.placeholders.api.Placeholders;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.monitor.cpu.CpuMonitor;
-import me.lucko.spark.common.monitor.tick.TickStatistics;
-import me.lucko.spark.common.util.RollingAverage;
-import me.lucko.spark.common.util.StatisticFormatter;
+import me.lucko.spark.common.util.SparkPlaceholder;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.gson.GsonComponentSerializer;
import net.minecraft.text.Text;
import net.minecraft.util.Identifier;
-public class SparkFabricPlaceholderApi {
- private final SparkPlatform platform;
+import org.jetbrains.annotations.Nullable;
- public SparkFabricPlaceholderApi(SparkPlatform platform) {
- this.platform = platform;
+public enum SparkFabricPlaceholderApi {
+ ;
- PlaceholderAPI.register(
- new Identifier("spark", "tps"),
- context -> {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null) {
- return PlaceholderResult.invalid();
- }
-
- if (context.hasArgument()) {
- Double tps = switch (context.getArgument()) {
- case "5s":
- yield tickStatistics.tps5Sec();
- case "10s":
- yield tickStatistics.tps10Sec();
- case "1m":
- yield tickStatistics.tps1Min();
- case "5m":
- yield tickStatistics.tps5Min();
- case "15m":
- yield tickStatistics.tps15Min();
- default:
- yield null;
- };
-
- if (tps == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatTps(tps)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
- .build()
- ));
- }
- }
- );
-
- PlaceholderAPI.register(
- new Identifier("spark", "tickduration"),
- context -> {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
- return PlaceholderResult.invalid();
- }
-
- if (context.hasArgument()) {
- RollingAverage duration = switch (context.getArgument()) {
- case "10s":
- yield tickStatistics.duration10Sec();
- case "1m":
- yield tickStatistics.duration1Min();
- default:
- yield null;
- };
-
- if (duration == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatTickDurations(duration)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
- .build()
- ));
- }
- }
- );
-
- PlaceholderAPI.register(
- new Identifier("spark", "cpu_system"),
- context -> {
- if (context.hasArgument()) {
- Double usage = switch (context.getArgument()) {
- case "10s":
- yield CpuMonitor.systemLoad10SecAvg();
- case "1m":
- yield CpuMonitor.systemLoad1MinAvg();
- case "15m":
- yield CpuMonitor.systemLoad15MinAvg();
- default:
- yield null;
- };
-
- if (usage == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
- .build()
- ));
- }
- }
- );
-
- PlaceholderAPI.register(
- new Identifier("spark", "cpu_process"),
- context -> {
- if (context.hasArgument()) {
- Double usage = switch (context.getArgument()) {
- case "10s":
- yield CpuMonitor.processLoad10SecAvg();
- case "1m":
- yield CpuMonitor.processLoad1MinAvg();
- case "15m":
- yield CpuMonitor.processLoad15MinAvg();
- default:
- yield null;
- };
-
- if (usage == null) {
- return PlaceholderResult.invalid("Invalid argument");
- } else {
- return PlaceholderResult.value(toText(StatisticFormatter.formatCpuUsage(usage)));
- }
- } else {
- return PlaceholderResult.value(toText(
- Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
- .build()
- ));
- }
- }
- );
+ public static void register(SparkPlatform platform) {
+ for (SparkPlaceholder placeholder : SparkPlaceholder.values()) {
+ Placeholders.register(
+ new Identifier("spark", placeholder.getName()),
+ new Handler(platform, placeholder)
+ );
+ }
}
- private Text toText(Component component) {
- return Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(component));
+ private record Handler(SparkPlatform platform, SparkPlaceholder placeholder) implements PlaceholderHandler {
+ @Override
+ public PlaceholderResult onPlaceholderRequest(PlaceholderContext context, @Nullable String argument) {
+ return toResult(this.placeholder.resolve(this.platform, argument));
+ }
+
+ private static PlaceholderResult toResult(Component component) {
+ return component == null
+ ? PlaceholderResult.invalid()
+ : PlaceholderResult.value(toText(component));
+ }
+
+ private static Text toText(Component component) {
+ return Text.Serializer.fromJson(GsonComponentSerializer.gson().serialize(component));
+ }
}
+
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
index e94d697..1876658 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
@@ -29,6 +29,7 @@ import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
@@ -36,6 +37,7 @@ import me.lucko.spark.fabric.FabricPlatformInfo;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
+import me.lucko.spark.fabric.FabricWorldInfoProvider;
import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback;
import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource;
@@ -113,6 +115,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public void executeSync(Runnable task) {
+ this.minecraft.executeSync(task);
+ }
+
+ @Override
public TickHook createTickHook() {
return new FabricTickHook.Client();
}
@@ -123,6 +130,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new FabricWorldInfoProvider.Client(this.minecraft);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new FabricPlatformInfo(PlatformInfo.Type.CLIENT);
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
index 428ac4c..bb1d68c 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
@@ -31,14 +31,18 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.fabric.api.permissions.v0.Permissions;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.fabric.FabricCommandSender;
import me.lucko.spark.fabric.FabricPlatformInfo;
import me.lucko.spark.fabric.FabricPlayerPingProvider;
+import me.lucko.spark.fabric.FabricServerConfigProvider;
import me.lucko.spark.fabric.FabricSparkMod;
import me.lucko.spark.fabric.FabricTickHook;
import me.lucko.spark.fabric.FabricTickReporter;
+import me.lucko.spark.fabric.FabricWorldInfoProvider;
import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi;
import net.fabricmc.loader.api.FabricLoader;
@@ -74,7 +78,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
// placeholders
if (FabricLoader.getInstance().isModLoaded("placeholder-api")) {
- new SparkFabricPlaceholderApi(this.platform);
+ try {
+ SparkFabricPlaceholderApi.register(this.platform);
+ } catch (LinkageError e) {
+ // ignore
+ }
}
}
@@ -123,6 +131,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public void executeSync(Runnable task) {
+ this.server.executeSync(task);
+ }
+
+ @Override
public TickHook createTickHook() {
return new FabricTickHook.Server();
}
@@ -138,6 +151,16 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
}
@Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new FabricServerConfigProvider();
+ }
+
+ @Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new FabricWorldInfoProvider.Server(this.server);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new FabricPlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-fabric/src/main/resources/fabric.mod.json b/spark-fabric/src/main/resources/fabric.mod.json
index e2e600d..f1f0ad4 100644
--- a/spark-fabric/src/main/resources/fabric.mod.json
+++ b/spark-fabric/src/main/resources/fabric.mod.json
@@ -23,6 +23,9 @@
"me.lucko.spark.fabric.FabricSparkMod::initializeClient"
]
},
+ "mixins": [
+ "spark.mixins.json"
+ ],
"depends": {
"fabricloader": ">=0.4.0",
"fabric-api-base": "*",
diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json
new file mode 100644
index 0000000..09587fe
--- /dev/null
+++ b/spark-fabric/src/main/resources/spark.mixins.json
@@ -0,0 +1,14 @@
+{
+ "required": true,
+ "package": "me.lucko.spark.fabric.mixin",
+ "compatibilityLevel": "JAVA_17",
+ "mixins": [],
+ "client": [
+ "ClientEntityManagerAccessor",
+ "ClientWorldAccessor"
+ ],
+ "server": [
+ "ServerEntityManagerAccessor",
+ "ServerWorldAccessor"
+ ]
+} \ No newline at end of file
diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle
index 210122b..3f46b95 100644
--- a/spark-forge/build.gradle
+++ b/spark-forge/build.gradle
@@ -21,6 +21,7 @@ tasks.withType(JavaCompile) {
minecraft {
mappings channel: 'official', version: '1.19'
+ accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg')
}
configurations {
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java
new file mode 100644
index 0000000..baa1358
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+
+import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+
+import java.util.Collection;
+import java.util.Map;
+
+public class ForgeServerConfigProvider extends AbstractServerConfigProvider {
+
+ /** A map of provided files and their type */
+ private static final Map<String, ConfigParser> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public ForgeServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ static {
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("level-seed")
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE);
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
new file mode 100644
index 0000000..1d65d6a
--- /dev/null
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
@@ -0,0 +1,140 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.forge;
+
+import it.unimi.dsi.fastutil.longs.LongIterator;
+import it.unimi.dsi.fastutil.longs.LongSet;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import net.minecraft.client.Minecraft;
+import net.minecraft.client.multiplayer.ClientLevel;
+import net.minecraft.server.MinecraftServer;
+import net.minecraft.server.level.ServerLevel;
+import net.minecraft.world.entity.Entity;
+import net.minecraft.world.entity.EntityType;
+import net.minecraft.world.level.ChunkPos;
+import net.minecraft.world.level.entity.EntitySection;
+import net.minecraft.world.level.entity.EntitySectionStorage;
+import net.minecraft.world.level.entity.PersistentEntitySectionManager;
+import net.minecraft.world.level.entity.TransientEntitySectionManager;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Stream;
+
+public abstract class ForgeWorldInfoProvider implements WorldInfoProvider {
+
+ protected List<ForgeChunkInfo> getChunksFromCache(EntitySectionStorage<Entity> cache) {
+ LongSet loadedChunks = cache.getAllChunksWithExistingSections();
+ List<ForgeChunkInfo> list = new ArrayList<>(loadedChunks.size());
+
+ for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) {
+ long chunkPos = iterator.nextLong();
+ Stream<EntitySection<Entity>> sections = cache.getExistingSectionsInChunk(chunkPos);
+
+ list.add(new ForgeChunkInfo(chunkPos, sections));
+ }
+
+ return list;
+ }
+
+ public static final class Server extends ForgeWorldInfoProvider {
+ private final MinecraftServer server;
+
+ public Server(MinecraftServer server) {
+ this.server = server;
+ }
+
+ @Override
+ public Result<ForgeChunkInfo> poll() {
+ Result<ForgeChunkInfo> data = new Result<>();
+
+ for (ServerLevel level : this.server.getAllLevels()) {
+ PersistentEntitySectionManager<Entity> entityManager = level.entityManager;
+ EntitySectionStorage<Entity> cache = entityManager.sectionStorage;
+
+ List<ForgeChunkInfo> list = getChunksFromCache(cache);
+ data.put(level.dimension().location().getPath(), list);
+ }
+
+ return data;
+ }
+ }
+
+ public static final class Client extends ForgeWorldInfoProvider {
+ private final Minecraft client;
+
+ public Client(Minecraft client) {
+ this.client = client;
+ }
+
+ @Override
+ public Result<ForgeChunkInfo> poll() {
+ Result<ForgeChunkInfo> data = new Result<>();
+
+ ClientLevel level = this.client.level;
+ if (level == null) {
+ return null;
+ }
+
+ TransientEntitySectionManager<Entity> entityManager = level.entityStorage;
+ EntitySectionStorage<Entity> cache = entityManager.sectionStorage;
+
+ List<ForgeChunkInfo> list = getChunksFromCache(cache);
+ data.put(level.dimension().location().getPath(), list);
+
+ return data;
+ }
+ }
+
+ static final class ForgeChunkInfo extends AbstractChunkInfo<EntityType<?>> {
+ private final CountMap<EntityType<?>> entityCounts;
+
+ ForgeChunkInfo(long chunkPos, Stream<EntitySection<Entity>> entities) {
+ super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos));
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ entities.forEach(section -> {
+ if (section.getStatus().isAccessible()) {
+ section.getEntities().forEach(entity ->
+ this.entityCounts.increment(entity.getType())
+ );
+ }
+ });
+ }
+
+ @Override
+ public CountMap<EntityType<?>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType<?> type) {
+ return EntityType.getKey(type).toString();
+ }
+ }
+
+
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
index cf5c89b..04c8785 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
@@ -28,6 +28,7 @@ import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.ForgeCommandSender;
@@ -35,6 +36,7 @@ import me.lucko.spark.forge.ForgePlatformInfo;
import me.lucko.spark.forge.ForgeSparkMod;
import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
+import me.lucko.spark.forge.ForgeWorldInfoProvider;
import net.minecraft.client.Minecraft;
import net.minecraft.commands.CommandSource;
@@ -108,6 +110,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public void executeSync(Runnable task) {
+ this.minecraft.executeIfPossible(task);
+ }
+
+ @Override
public TickHook createTickHook() {
return new ForgeTickHook(TickEvent.Type.CLIENT);
}
@@ -118,6 +125,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new ForgeWorldInfoProvider.Client(this.minecraft);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new ForgePlatformInfo(PlatformInfo.Type.CLIENT);
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
index e341d6f..f4a51e0 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
@@ -31,14 +31,18 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.forge.ForgeCommandSender;
import me.lucko.spark.forge.ForgePlatformInfo;
import me.lucko.spark.forge.ForgePlayerPingProvider;
+import me.lucko.spark.forge.ForgeServerConfigProvider;
import me.lucko.spark.forge.ForgeSparkMod;
import me.lucko.spark.forge.ForgeTickHook;
import me.lucko.spark.forge.ForgeTickReporter;
+import me.lucko.spark.forge.ForgeWorldInfoProvider;
import net.minecraft.commands.CommandSource;
import net.minecraft.commands.CommandSourceStack;
@@ -184,6 +188,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public void executeSync(Runnable task) {
+ this.server.executeIfPossible(task);
+ }
+
+ @Override
public TickHook createTickHook() {
return new ForgeTickHook(TickEvent.Type.SERVER);
}
@@ -199,6 +208,16 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
}
@Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new ForgeServerConfigProvider();
+ }
+
+ @Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new ForgeWorldInfoProvider.Server(this.server);
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new ForgePlatformInfo(PlatformInfo.Type.SERVER);
}
diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
new file mode 100644
index 0000000..1e418b8
--- /dev/null
+++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
@@ -0,0 +1,4 @@
+public net.minecraft.server.level.ServerLevel f_143244_ # entityManager
+public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage
+public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage
+public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage
diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
index 18132c3..87d9f09 100644
--- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
+++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java
@@ -31,7 +31,6 @@ import cn.nukkit.command.Command;
import cn.nukkit.command.CommandSender;
import cn.nukkit.plugin.PluginBase;
import cn.nukkit.plugin.service.ServicePriority;
-import cn.nukkit.scheduler.AsyncTask;
import java.nio.file.Path;
import java.util.logging.Level;
@@ -82,12 +81,7 @@ public class NukkitSparkPlugin extends PluginBase implements SparkPlugin {
@Override
public void executeAsync(Runnable task) {
- getServer().getScheduler().scheduleAsyncTask(this, new AsyncTask() {
- @Override
- public void onRun() {
- task.run();
- }
- });
+ getServer().getScheduler().scheduleTask(this, task, true);
}
@Override
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
index 670e0c5..324e242 100644
--- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java
@@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -44,6 +45,7 @@ import org.spongepowered.api.plugin.Plugin;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.AsynchronousExecutor;
import org.spongepowered.api.scheduler.SpongeExecutorService;
+import org.spongepowered.api.scheduler.SynchronousExecutor;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
@@ -70,17 +72,19 @@ public class Sponge7SparkPlugin implements SparkPlugin {
private final Game game;
private final Path configDirectory;
private final SpongeExecutorService asyncExecutor;
+ private final SpongeExecutorService syncExecutor;
private SparkPlatform platform;
private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Inject
- public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
+ public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor, @SynchronousExecutor SpongeExecutorService syncExecutor) {
this.pluginContainer = pluginContainer;
this.logger = logger;
this.game = game;
this.configDirectory = configDirectory;
this.asyncExecutor = asyncExecutor;
+ this.syncExecutor = syncExecutor;
}
@Listener
@@ -112,10 +116,14 @@ public class Sponge7SparkPlugin implements SparkPlugin {
@Override
public Stream<Sponge7CommandSender> getCommandSenders() {
- return Stream.concat(
- this.game.getServer().getOnlinePlayers().stream(),
- Stream.of(this.game.getServer().getConsole())
- ).map(Sponge7CommandSender::new);
+ if (this.game.isServerAvailable()) {
+ return Stream.concat(
+ this.game.getServer().getOnlinePlayers().stream(),
+ Stream.of(this.game.getServer().getConsole())
+ ).map(Sponge7CommandSender::new);
+ } else {
+ return Stream.of(this.game.getServer().getConsole()).map(Sponge7CommandSender::new);
+ }
}
@Override
@@ -124,6 +132,11 @@ public class Sponge7SparkPlugin implements SparkPlugin {
}
@Override
+ public void executeSync(Runnable task) {
+ this.syncExecutor.execute(task);
+ }
+
+ @Override
public void log(Level level, String msg) {
if (level == Level.INFO) {
this.logger.info(msg);
@@ -161,6 +174,15 @@ public class Sponge7SparkPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge7WorldInfoProvider(this.game.getServer());
+ } else {
+ return WorldInfoProvider.NO_OP;
+ }
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Sponge7PlatformInfo(this.game);
}
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
new file mode 100644
index 0000000..fa6fa6b
--- /dev/null
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
@@ -0,0 +1,87 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.Lists;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.Entity;
+import org.spongepowered.api.entity.EntityType;
+import org.spongepowered.api.world.Chunk;
+import org.spongepowered.api.world.World;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class Sponge7WorldInfoProvider implements WorldInfoProvider {
+ private final Server server;
+
+ public Sponge7WorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Result<Sponge7ChunkInfo> poll() {
+ Result<Sponge7ChunkInfo> data = new Result<>();
+
+ for (World world : this.server.getWorlds()) {
+ List<Chunk> chunks = Lists.newArrayList(world.getLoadedChunks());
+
+ List<Sponge7ChunkInfo> list = new ArrayList<>(chunks.size());
+ for (Chunk chunk : chunks) {
+ list.add(new Sponge7ChunkInfo(chunk));
+ }
+
+ data.put(world.getName(), list);
+ }
+
+ return data;
+ }
+
+ static final class Sponge7ChunkInfo extends AbstractChunkInfo<EntityType> {
+ private final CountMap<EntityType> entityCounts;
+
+ Sponge7ChunkInfo(Chunk chunk) {
+ super(chunk.getPosition().getX(), chunk.getPosition().getZ());
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ for (Entity entity : chunk.getEntities()) {
+ this.entityCounts.increment(entity.getType());
+ }
+ }
+
+ @Override
+ public CountMap<EntityType> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType type) {
+ return type.getName();
+ }
+
+ }
+}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
index e867a75..68e47e3 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java
@@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
@@ -67,6 +68,7 @@ public class Sponge8SparkPlugin implements SparkPlugin {
private final Game game;
private final Path configDirectory;
private final ExecutorService asyncExecutor;
+ private final ExecutorService syncExecutor;
private SparkPlatform platform;
private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@@ -78,6 +80,14 @@ public class Sponge8SparkPlugin implements SparkPlugin {
this.game = game;
this.configDirectory = configDirectory;
this.asyncExecutor = game.asyncScheduler().executor(pluginContainer);
+
+ if (game.isServerAvailable()) {
+ this.syncExecutor = game.server().scheduler().executor(pluginContainer);
+ } else if (game.isClientAvailable()) {
+ this.syncExecutor = game.client().scheduler().executor(pluginContainer);
+ } else {
+ throw new IllegalStateException("Server and client both unavailable");
+ }
}
@@ -114,10 +124,14 @@ public class Sponge8SparkPlugin implements SparkPlugin {
@Override
public Stream<CommandSender> getCommandSenders() {
- return Stream.concat(
- this.game.server().onlinePlayers().stream(),
- Stream.of(this.game.systemSubject())
- ).map(Sponge8CommandSender::new);
+ if (this.game.isServerAvailable()) {
+ return Stream.concat(
+ this.game.server().onlinePlayers().stream(),
+ Stream.of(this.game.systemSubject())
+ ).map(Sponge8CommandSender::new);
+ } else {
+ return Stream.of(this.game.systemSubject()).map(Sponge8CommandSender::new);
+ }
}
@Override
@@ -126,6 +140,11 @@ public class Sponge8SparkPlugin implements SparkPlugin {
}
@Override
+ public void executeSync(Runnable task) {
+ this.syncExecutor.execute(task);
+ }
+
+ @Override
public void log(Level level, String msg) {
if (level == Level.INFO) {
this.logger.info(msg);
@@ -163,6 +182,15 @@ public class Sponge8SparkPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ if (this.game.isServerAvailable()) {
+ return new Sponge8WorldInfoProvider(this.game.server());
+ } else {
+ return WorldInfoProvider.NO_OP;
+ }
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new Sponge8PlatformInfo(this.game);
}
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
new file mode 100644
index 0000000..bff4d6e
--- /dev/null
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
@@ -0,0 +1,88 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.sponge;
+
+import com.google.common.collect.Lists;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.spongepowered.api.Server;
+import org.spongepowered.api.entity.Entity;
+import org.spongepowered.api.entity.EntityType;
+import org.spongepowered.api.entity.EntityTypes;
+import org.spongepowered.api.world.chunk.WorldChunk;
+import org.spongepowered.api.world.server.ServerWorld;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class Sponge8WorldInfoProvider implements WorldInfoProvider {
+ private final Server server;
+
+ public Sponge8WorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Result<Sponge7ChunkInfo> poll() {
+ Result<Sponge7ChunkInfo> data = new Result<>();
+
+ for (ServerWorld world : this.server.worldManager().worlds()) {
+ List<WorldChunk> chunks = Lists.newArrayList(world.loadedChunks());
+
+ List<Sponge7ChunkInfo> list = new ArrayList<>(chunks.size());
+ for (WorldChunk chunk : chunks) {
+ list.add(new Sponge7ChunkInfo(chunk));
+ }
+
+ data.put(world.key().value(), list);
+ }
+
+ return data;
+ }
+
+ static final class Sponge7ChunkInfo extends AbstractChunkInfo<EntityType<?>> {
+ private final CountMap<EntityType<?>> entityCounts;
+
+ Sponge7ChunkInfo(WorldChunk chunk) {
+ super(chunk.chunkPosition().x(), chunk.chunkPosition().z());
+
+ this.entityCounts = new CountMap.Simple<>(new HashMap<>());
+ for (Entity entity : chunk.entities()) {
+ this.entityCounts.increment(entity.type());
+ }
+ }
+
+ @Override
+ public CountMap<EntityType<?>> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @Override
+ public String entityTypeName(EntityType<?> type) {
+ return EntityTypes.registry().valueKey(type).value();
+ }
+
+ }
+}