From a551317a0acc0f6ccb2d1bb66e8475b42387e59c Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 11 Jun 2022 21:05:08 +0100 Subject: Tidy up placeholder handling Co-authored-by: Caden Kriese --- .../lucko/spark/common/util/SparkPlaceholder.java | 191 +++++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java (limited to 'spark-common') diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java new file mode 100644 index 0000000..be5bbc2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java @@ -0,0 +1,191 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.tick.TickStatistics; + +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.TextComponent; +import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; + +import java.util.Locale; +import java.util.function.BiFunction; + +public enum SparkPlaceholder { + + TPS((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps15Min())) + .build(); + } + + switch (arg) { + case "5s": + return StatisticFormatter.formatTps(tickStatistics.tps5Sec()); + case "10s": + return StatisticFormatter.formatTps(tickStatistics.tps10Sec()); + case "1m": + return StatisticFormatter.formatTps(tickStatistics.tps1Min()); + case "5m": + return StatisticFormatter.formatTps(tickStatistics.tps5Min()); + case "15m": + return StatisticFormatter.formatTps(tickStatistics.tps15Min()); + } + + return null; + }), + + TICKDURATION((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null || !tickStatistics.isDurationSupported()) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; ")) + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec()); + case "1m": + return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()); + } + + return null; + }), + + CPU_SYSTEM((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()); + } + + return null; + }), + + CPU_PROCESS((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()); + } + + return null; + }); + + private final String name; + private final BiFunction function; + + SparkPlaceholder(BiFunction function) { + this.name = name().toLowerCase(Locale.ROOT); + this.function = function; + } + + public String getName() { + return this.name; + } + + public TextComponent resolve(SparkPlatform platform, String arg) { + return this.function.apply(platform, arg); + } + + public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) { + String[] parts = placeholder.split("_"); + + if (parts.length == 0) { + return null; + } + + String label = parts[0]; + + if (label.equals("tps")) { + String arg = parts.length < 2 ? null : parts[1]; + return TPS.resolve(platform, arg); + } + + if (label.equals("tickduration")) { + String arg = parts.length < 2 ? null : parts[1]; + return TICKDURATION.resolve(platform, arg); + } + + if (label.equals("cpu") && parts.length >= 2) { + String type = parts[1]; + String arg = parts.length < 3 ? null : parts[2]; + + if (type.equals("system")) { + return CPU_SYSTEM.resolve(platform, arg); + } + if (type.equals("process")) { + return CPU_PROCESS.resolve(platform, arg); + } + } + + return null; + } + + public static String resolveFormattingCode(SparkPlatform platform, String placeholder) { + TextComponent result = resolveComponent(platform, placeholder); + if (result == null) { + return null; + } + return LegacyComponentSerializer.legacySection().serialize(result); + } + +} -- cgit From 28cf3185c1374c4b5af277ef28482299694209a3 Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 20 Jun 2022 22:21:16 +0100 Subject: New paper config location (#217) --- .../spark/bukkit/BukkitServerConfigProvider.java | 70 +++++++++------------- .../serverconfig/AbstractServerConfigProvider.java | 63 +++++++++++++------ .../common/platform/serverconfig/ConfigParser.java | 31 ++++++++++ .../serverconfig/PropertiesConfigParser.java | 61 +++++++++++++++++++ .../serverconfig/PropertiesFileReader.java | 64 -------------------- 5 files changed, 165 insertions(+), 124 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java (limited to 'spark-common') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index 953e171..ff1b55f 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -22,13 +22,12 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.gson.Gson; import com.google.gson.GsonBuilder; -import com.google.gson.JsonElement; import com.google.gson.JsonSerializer; import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; -import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.file.YamlConfiguration; @@ -37,23 +36,16 @@ import co.aikar.timings.TimingsManager; import java.io.BufferedReader; import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; -public class BukkitServerConfigProvider extends AbstractServerConfigProvider { - private static final Gson GSON = new GsonBuilder() - .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))) - .create(); +public class BukkitServerConfigProvider extends AbstractServerConfigProvider { /** A map of provided files and their type */ - private static final Map FILES; + private static final Map FILES; /** A collection of paths to be excluded from the files */ private static final Collection HIDDEN_PATHS; @@ -62,50 +54,46 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))); + } - try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { - Map values; + @Override + protected String rewriteConfigPath(String path) { + return path.startsWith("config/") + ? path.substring("config/".length()) + : path; + } - if (type == FileType.PROPERTIES) { - PropertiesFileReader propertiesReader = new PropertiesFileReader(reader); - values = propertiesReader.readProperties(); - } else if (type == FileType.YAML) { - YamlConfiguration config = YamlConfiguration.loadConfiguration(reader); - values = config.getValues(false); - } else { - throw new IllegalArgumentException("Unknown file type: " + type); - } + private enum YamlConfigParser implements ConfigParser { + INSTANCE; - return GSON.toJsonTree(values); + @Override + public Map parse(BufferedReader reader) throws IOException { + YamlConfiguration config = YamlConfiguration.loadConfiguration(reader); + return config.getValues(false); } } - enum FileType { - PROPERTIES, - YAML - } - static { - ImmutableMap.Builder files = ImmutableMap.builder() - .put("server.properties", FileType.PROPERTIES) - .put("bukkit.yml", FileType.YAML) - .put("spigot.yml", FileType.YAML) - .put("paper.yml", FileType.YAML) - .put("purpur.yml", FileType.YAML); + ImmutableMap.Builder files = ImmutableMap.builder() + .put("server.properties", PropertiesConfigParser.INSTANCE) + .put("bukkit.yml", YamlConfigParser.INSTANCE) + .put("spigot.yml", YamlConfigParser.INSTANCE) + .put("paper.yml", YamlConfigParser.INSTANCE) + .put("config/paper-global.yml", YamlConfigParser.INSTANCE) + .put("config/paper-world-defaults.yml", YamlConfigParser.INSTANCE) + .put("purpur.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { - files.put(config, FileType.YAML); + files.put(config, YamlConfigParser.INSTANCE); } ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() .add("database") .add("settings.bungeecord-addresses") .add("settings.velocity-support.secret") + .add("proxies.velocity.secret") .add("server-ip") .add("motd") .add("resource-pack") diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index ead2131..0eef111 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -21,10 +21,17 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import java.io.BufferedReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -38,29 +45,37 @@ import java.util.stream.Collectors; * *

This implementation is able to delete hidden paths from * the configurations before they are sent to the viewer.

- * - * @param the file type */ -public abstract class AbstractServerConfigProvider> implements ServerConfigProvider { - private final Map files; +public abstract class AbstractServerConfigProvider implements ServerConfigProvider { + private final Map files; private final Collection hiddenPaths; - protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { + private final Gson gson; + + protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { this.files = files; this.hiddenPaths = hiddenPaths; + + GsonBuilder gson = new GsonBuilder(); + customiseGson(gson); + this.gson = gson.create(); } @Override public final Map loadServerConfigurations() { ImmutableMap.Builder builder = ImmutableMap.builder(); - this.files.forEach((path, type) -> { + this.files.forEach((path, reader) -> { try { - JsonElement json = load(path, type); - if (json != null) { - delete(json, this.hiddenPaths); - builder.put(path, json); + JsonElement json = load(path, reader); + if (json == null) { + return; } + + delete(json, this.hiddenPaths); + + String name = rewriteConfigPath(path); + builder.put(name, json); } catch (Exception e) { e.printStackTrace(); } @@ -69,15 +84,25 @@ public abstract class AbstractServerConfigProvider> implements return builder.build(); } - /** - * Loads a file from the system. - * - * @param path the name of the file to load - * @param type the type of the file - * @return the loaded file - * @throws IOException if an error occurs performing i/o - */ - protected abstract JsonElement load(String path, T type) throws IOException; + private JsonElement load(String path, ConfigParser parser) throws IOException { + Path filePath = Paths.get(path); + if (!Files.exists(filePath)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { + Map values = parser.parse(reader); + return this.gson.toJsonTree(values); + } + } + + protected void customiseGson(GsonBuilder gson) { + + } + + protected String rewriteConfigPath(String path) { + return path; + } /** * Deletes the given paths from the json element. diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java new file mode 100644 index 0000000..2dd15fe --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -0,0 +1,31 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.Map; + +public interface ConfigParser { + + Map parse(BufferedReader reader) throws IOException; + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java new file mode 100644 index 0000000..4c7c2c1 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +/** + * A {@link ConfigParser} that can parse a .properties file. + */ +public enum PropertiesConfigParser implements ConfigParser { + INSTANCE; + + @Override + public Map parse(BufferedReader reader) throws IOException { + Properties properties = new Properties(); + properties.load(reader); + + Map values = new HashMap<>(); + properties.forEach((k, v) -> { + String key = k.toString(); + String value = v.toString(); + + if ("true".equals(value) || "false".equals(value)) { + values.put(key, Boolean.parseBoolean(value)); + } else if (value.matches("\\d+")) { + try { + values.put(key, Long.parseLong(value)); + } catch (NumberFormatException e) { + values.put(key, value); + } + } else { + values.put(key, value); + } + }); + + return values; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java deleted file mode 100644 index 8fc89d7..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.platform.serverconfig; - -import java.io.FilterReader; -import java.io.IOException; -import java.io.Reader; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * A {@link Reader} that can parse a .properties file. - */ -public class PropertiesFileReader extends FilterReader { - - public PropertiesFileReader(Reader in) { - super(in); - } - - public Map readProperties() throws IOException { - Properties properties = new Properties(); - properties.load(this); - - Map values = new HashMap<>(); - properties.forEach((k, v) -> { - String key = k.toString(); - String value = v.toString(); - - if ("true".equals(value) || "false".equals(value)) { - values.put(key, Boolean.parseBoolean(value)); - } else if (value.matches("\\d+")) { - try { - values.put(key, Long.parseLong(value)); - } catch (NumberFormatException e) { - values.put(key, value); - } - } else { - values.put(key, value); - } - }); - - return values; - } - -} -- cgit From 4d45579d2bf57b417d5d3eca041c2131177183e4 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 25 Jun 2022 22:48:55 +0100 Subject: Add providers for world (entity/chunk) statistics --- .../me/lucko/spark/bukkit/BukkitSparkPlugin.java | 13 +- .../spark/bukkit/BukkitWorldInfoProvider.java | 87 +++++++++ .../java/me/lucko/spark/common/SparkPlugin.java | 19 ++ .../platform/PlatformStatisticsProvider.java | 15 ++ .../common/platform/world/AbstractChunkInfo.java | 55 ++++++ .../spark/common/platform/world/ChunkInfo.java | 44 +++++ .../spark/common/platform/world/CountMap.java | 110 +++++++++++ .../common/platform/world/WorldInfoProvider.java | 57 ++++++ .../platform/world/WorldStatisticsProvider.java | 216 +++++++++++++++++++++ spark-common/src/main/proto/spark/spark.proto | 27 ++- .../spark/fabric/FabricWorldInfoProvider.java | 145 ++++++++++++++ .../fabric/mixin/ClientEntityManagerAccessor.java | 36 ++++ .../spark/fabric/mixin/ClientWorldAccessor.java | 36 ++++ .../fabric/mixin/ServerEntityManagerAccessor.java | 36 ++++ .../spark/fabric/mixin/ServerWorldAccessor.java | 36 ++++ .../fabric/plugin/FabricClientSparkPlugin.java | 12 ++ .../fabric/plugin/FabricServerSparkPlugin.java | 12 ++ spark-fabric/src/main/resources/fabric.mod.json | 3 + spark-fabric/src/main/resources/spark.mixins.json | 14 ++ spark-forge/build.gradle | 1 + .../lucko/spark/forge/ForgeWorldInfoProvider.java | 141 ++++++++++++++ .../spark/forge/plugin/ForgeClientSparkPlugin.java | 12 ++ .../spark/forge/plugin/ForgeServerSparkPlugin.java | 12 ++ .../main/resources/META-INF/accesstransformer.cfg | 4 + .../me/lucko/spark/nukkit/NukkitSparkPlugin.java | 8 +- .../me/lucko/spark/sponge/Sponge7SparkPlugin.java | 32 ++- .../spark/sponge/Sponge7WorldInfoProvider.java | 87 +++++++++ .../me/lucko/spark/sponge/Sponge8SparkPlugin.java | 36 +++- .../spark/sponge/Sponge8WorldInfoProvider.java | 88 +++++++++ 29 files changed, 1376 insertions(+), 18 deletions(-) create mode 100644 spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java create mode 100644 spark-fabric/src/main/resources/spark.mixins.json create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java create mode 100644 spark-forge/src/main/resources/META-INF/accesstransformer.cfg create mode 100644 spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java create mode 100644 spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java (limited to 'spark-common') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 9727277..fddd66b 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -136,7 +137,12 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public void executeAsync(Runnable task) { - getServer().getScheduler().runTaskAsynchronously(BukkitSparkPlugin.this, task); + getServer().getScheduler().runTaskAsynchronously(this, task); + } + + @Override + public void executeSync(Runnable task) { + getServer().getScheduler().runTask(this, task); } @Override @@ -187,6 +193,11 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { return new BukkitServerConfigProvider(); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new BukkitWorldInfoProvider(getServer()); + } + @Override public PlatformInfo getPlatformInfo() { return new BukkitPlatformInfo(getServer()); diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java new file mode 100644 index 0000000..f34899b --- /dev/null +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -0,0 +1,87 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.bukkit; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import org.bukkit.Chunk; +import org.bukkit.Server; +import org.bukkit.World; +import org.bukkit.entity.Entity; +import org.bukkit.entity.EntityType; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class BukkitWorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public BukkitWorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (World world : this.server.getWorlds()) { + Chunk[] chunks = world.getLoadedChunks(); + + List list = new ArrayList<>(chunks.length); + for (Chunk chunk : chunks) { + list.add(new BukkitChunkInfo(chunk)); + } + + data.put(world.getName(), list); + } + + return data; + } + + static final class BukkitChunkInfo extends AbstractChunkInfo { + private final CountMap entityCounts; + + BukkitChunkInfo(Chunk chunk) { + super(chunk.getX(), chunk.getZ()); + + this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class); + for (Entity entity : chunk.getEntities()) { + this.entityCounts.increment(entity.getType()); + } + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @SuppressWarnings("deprecation") + @Override + public String entityTypeName(EntityType type) { + return type.getName(); + } + + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index b817df1..1116b04 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -74,6 +75,15 @@ public interface SparkPlugin { */ void executeAsync(Runnable task); + /** + * Executes the given {@link Runnable} on the server/client main thread. + * + * @param task the task + */ + default void executeSync(Runnable task) { + throw new UnsupportedOperationException(); + } + /** * Print to the plugin logger. * @@ -142,6 +152,15 @@ public interface SparkPlugin { return ServerConfigProvider.NO_OP; } + /** + * Creates a world info provider. + * + * @return the world info provider function + */ + default WorldInfoProvider createWorldInfoProvider() { + return WorldInfoProvider.NO_OP; + } + /** * Gets information for the platform. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index f35bbbe..49cfed5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.platform.world.WorldStatisticsProvider; import me.lucko.spark.proto.SparkProtos.PlatformStatistics; import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; @@ -182,6 +185,18 @@ public class PlatformStatisticsProvider { builder.setPlayerCount(playerCount); } + try { + WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider(); + WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo); + WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics(); + if (worldStatistics != null) { + builder.setWorld(worldStatistics); + } + } catch (Exception e) { + e.printStackTrace(); + } + + return builder.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java new file mode 100644 index 0000000..80026cd --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java @@ -0,0 +1,55 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +public abstract class AbstractChunkInfo implements ChunkInfo { + private final int x; + private final int z; + + protected AbstractChunkInfo(int x, int z) { + this.x = x; + this.z = z; + } + + @Override + public int getX() { + return this.x; + } + + @Override + public int getZ() { + return this.z; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) return true; + if (!(obj instanceof AbstractChunkInfo)) return false; + AbstractChunkInfo that = (AbstractChunkInfo) obj; + return this.x == that.x && this.z == that.z; + } + + @Override + public int hashCode() { + return this.x ^ this.z; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java new file mode 100644 index 0000000..2193a50 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java @@ -0,0 +1,44 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +/** + * Information about a given chunk. + * + * @param the type used to describe entities + */ +public interface ChunkInfo { + + int getX(); + + int getZ(); + + CountMap getEntityCounts(); + + /** + * Converts entity type {@link E} to a string. + * + * @param type the entity type + * @return a string + */ + String entityTypeName(E type); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java new file mode 100644 index 0000000..3083266 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java @@ -0,0 +1,110 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import java.util.EnumMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * A map of (key) -> count. + * + * @param the key type + */ +public interface CountMap { + + /** + * Increment the counter for the given key + * + * @param key the key + */ + void increment(T key); + + /** + * Add to the counter for the given key + * + * @param key the key + */ + void add(T key, int delta); + + AtomicInteger total(); + + Map asMap(); + + /** + * A simple {@link CountMap} backed by the provided {@link Map} + * + * @param the key type + */ + class Simple implements CountMap { + private final Map counts; + private final AtomicInteger total; + + public Simple(Map counts) { + this.counts = counts; + this.total = new AtomicInteger(); + } + + @Override + public void increment(T key) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.incrementAndGet(); + this.total.incrementAndGet(); + } + + @Override + public void add(T key, int delta) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.addAndGet(delta); + this.total.addAndGet(delta); + } + + @Override + public AtomicInteger total() { + return this.total; + } + + @Override + public Map asMap() { + return this.counts; + } + } + + /** + * A {@link CountMap} backed by an {@link EnumMap}. + * + * @param the key type - must be an enum + */ + class EnumKeyed> extends Simple { + public EnumKeyed(Class keyClass) { + super(new EnumMap<>(keyClass)); + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java new file mode 100644 index 0000000..9494816 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Provides information about worlds. + */ +public interface WorldInfoProvider { + + WorldInfoProvider NO_OP = () -> null; + + /** + * Polls for information. + * + * @return the information + */ + Result> poll(); + + default boolean mustCallSync() { + return true; + } + + final class Result { + private final Map> worlds = new HashMap<>(); + + public void put(String worldName, List chunks) { + this.worlds.put(worldName, chunks); + } + + public Map> getWorlds() { + return this.worlds; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java new file mode 100644 index 0000000..864a296 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -0,0 +1,216 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.logging.Level; + +public class WorldStatisticsProvider { + private final SparkPlatform platform; + private final WorldInfoProvider provider; + + public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) { + this.platform = platform; + this.provider = provider; + } + + public WorldStatistics getWorldStatistics() { + if (this.provider == WorldInfoProvider.NO_OP) { + return null; + } + + CompletableFuture>> future; + + if (this.provider.mustCallSync()) { + SparkPlugin plugin = this.platform.getPlugin(); + future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync); + } else { + future = CompletableFuture.completedFuture(this.provider.poll()); + } + + WorldInfoProvider.Result> result; + try { + result = future.get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } catch (TimeoutException e) { + this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); + return null; + } + + if (result == null) { + return null; + } + + WorldStatistics.Builder stats = WorldStatistics.newBuilder(); + + AtomicInteger combinedTotal = new AtomicInteger(); + CountMap combined = new CountMap.Simple<>(new HashMap<>()); + + result.getWorlds().forEach((worldName, chunks) -> { + WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder(); + builder.setName(worldName); + + List regions = groupIntoRegions(chunks); + + int total = 0; + + for (Region region : regions) { + builder.addRegions(regionToProto(region, combined)); + total += region.getTotalEntities().get(); + } + + builder.setTotalEntities(total); + combinedTotal.addAndGet(total); + + stats.addWorlds(builder.build()); + }); + + stats.setTotalEntities(combinedTotal.get()); + combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get())); + + return stats.build(); + } + + private static WorldStatistics.Region regionToProto(Region region, CountMap combined) { + WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder(); + builder.setTotalEntities(region.getTotalEntities().get()); + for (ChunkInfo chunk : region.getChunks()) { + builder.addChunks(chunkToProto(chunk, combined)); + } + return builder.build(); + } + + private static WorldStatistics.Chunk chunkToProto(ChunkInfo chunk, CountMap combined) { + WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder(); + builder.setX(chunk.getX()); + builder.setZ(chunk.getZ()); + builder.setTotalEntities(chunk.getEntityCounts().total().get()); + chunk.getEntityCounts().asMap().forEach((key, value) -> { + String name = chunk.entityTypeName(key); + int count = value.get(); + + builder.putEntityCounts(name, count); + combined.add(name, count); + }); + return builder.build(); + } + + private static List groupIntoRegions(List> chunks) { + List regions = new ArrayList<>(); + + for (ChunkInfo chunk : chunks) { + CountMap counts = chunk.getEntityCounts(); + if (counts.total().get() == 0) { + continue; + } + + boolean found = false; + + for (Region region : regions) { + if (region.isAdjacent(chunk)) { + found = true; + region.add(chunk); + + // if the chunk is adjacent to more than one region, merge the regions together + for (Iterator iterator = regions.iterator(); iterator.hasNext(); ) { + Region otherRegion = iterator.next(); + if (region != otherRegion && otherRegion.isAdjacent(chunk)) { + iterator.remove(); + region.merge(otherRegion); + } + } + + break; + } + } + + if (!found) { + regions.add(new Region(chunk)); + } + } + + return regions; + } + + /** + * A map of nearby chunks grouped together by Euclidean distance. + */ + private static final class Region { + private static final int DISTANCE_THRESHOLD = 2; + private final Set> chunks; + private final AtomicInteger totalEntities; + + private Region(ChunkInfo initial) { + this.chunks = new HashSet<>(); + this.chunks.add(initial); + this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get()); + } + + public Set> getChunks() { + return this.chunks; + } + + public AtomicInteger getTotalEntities() { + return this.totalEntities; + } + + public boolean isAdjacent(ChunkInfo chunk) { + for (ChunkInfo el : this.chunks) { + if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) { + return true; + } + } + return false; + } + + public void add(ChunkInfo chunk) { + this.chunks.add(chunk); + this.totalEntities.addAndGet(chunk.getEntityCounts().total().get()); + } + + public void merge(Region group) { + this.chunks.addAll(group.getChunks()); + this.totalEntities.addAndGet(group.getTotalEntities().get()); + } + + private static long squaredEuclideanDistance(ChunkInfo a, ChunkInfo b) { + long dx = a.getX() - b.getX(); + long dz = a.getZ() - b.getZ(); + return (dx * dx) + (dz * dz); + } + } + +} diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index ec0aa88..2ea341f 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -94,7 +94,8 @@ message PlatformStatistics { Tps tps = 4; // optional Mspt mspt = 5; // optional Ping ping = 6; // optional - int64 player_count = 7; + int64 player_count = 7; // optional + WorldStatistics world = 8; // optional message Memory { MemoryPool heap = 1; @@ -127,6 +128,30 @@ message PlatformStatistics { } } +message WorldStatistics { + int32 total_entities = 1; + map entity_counts = 2; + repeated World worlds = 3; + + message World { + string name = 1; + int32 total_entities = 2; + repeated Region regions = 3; + } + + message Region { + int32 total_entities = 1; + repeated Chunk chunks = 2; + } + + message Chunk { + int32 x = 1; + int32 z = 2; + int32 total_entities = 3; + map entity_counts = 4; + } +} + message RollingAverageValues { double mean = 1; double max = 2; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java new file mode 100644 index 0000000..fddcf58 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java @@ -0,0 +1,145 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import it.unimi.dsi.fastutil.longs.LongIterator; +import it.unimi.dsi.fastutil.longs.LongSet; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.fabric.mixin.ClientEntityManagerAccessor; +import me.lucko.spark.fabric.mixin.ClientWorldAccessor; +import me.lucko.spark.fabric.mixin.ServerEntityManagerAccessor; +import me.lucko.spark.fabric.mixin.ServerWorldAccessor; + +import net.minecraft.client.MinecraftClient; +import net.minecraft.client.world.ClientEntityManager; +import net.minecraft.client.world.ClientWorld; +import net.minecraft.entity.Entity; +import net.minecraft.entity.EntityType; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.server.world.ServerWorld; +import net.minecraft.util.math.ChunkPos; +import net.minecraft.world.entity.EntityTrackingSection; +import net.minecraft.world.entity.SectionedEntityCache; +import net.minecraft.world.level.ServerWorldProperties; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Stream; + +public abstract class FabricWorldInfoProvider implements WorldInfoProvider { + + protected List getChunksFromCache(SectionedEntityCache cache) { + LongSet loadedChunks = cache.getChunkPositions(); + List list = new ArrayList<>(loadedChunks.size()); + + for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { + long chunkPos = iterator.nextLong(); + Stream> sections = cache.getTrackingSections(chunkPos); + + list.add(new FabricChunkInfo(chunkPos, sections)); + } + + return list; + } + + public static final class Server extends FabricWorldInfoProvider { + private final MinecraftServer server; + + public Server(MinecraftServer server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (ServerWorld world : this.server.getWorlds()) { + ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); + SectionedEntityCache cache = ((ServerEntityManagerAccessor) entityManager).getCache(); + + List list = getChunksFromCache(cache); + data.put(((ServerWorldProperties) world.getLevelProperties()).getLevelName(), list); + } + + return data; + } + } + + public static final class Client extends FabricWorldInfoProvider { + private final MinecraftClient client; + + public Client(MinecraftClient client) { + this.client = client; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + ClientWorld world = this.client.world; + if (world == null) { + return null; + } + + ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); + SectionedEntityCache cache = ((ClientEntityManagerAccessor) entityManager).getCache(); + + List list = getChunksFromCache(cache); + data.put(world.getDimensionKey().getValue().getPath(), list); + + return data; + } + } + + static final class FabricChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + FabricChunkInfo(long chunkPos, Stream> entities) { + super(ChunkPos.getPackedX(chunkPos), ChunkPos.getPackedZ(chunkPos)); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + entities.forEach(section -> { + if (section.getStatus().shouldTrack()) { + section.stream().forEach(entity -> + this.entityCounts.increment(entity.getType()) + ); + } + }); + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityType.getId(type).toString(); + } + } + +} + diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java new file mode 100644 index 0000000..88c9521 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.client.world.ClientEntityManager; +import net.minecraft.entity.Entity; +import net.minecraft.world.entity.SectionedEntityCache; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ClientEntityManager.class) +public interface ClientEntityManagerAccessor { + + @Accessor + SectionedEntityCache getCache(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java new file mode 100644 index 0000000..01562ef --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.client.world.ClientEntityManager; +import net.minecraft.client.world.ClientWorld; +import net.minecraft.entity.Entity; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ClientWorld.class) +public interface ClientWorldAccessor { + + @Accessor + ClientEntityManager getEntityManager(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java new file mode 100644 index 0000000..160a12b --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.entity.Entity; +import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.world.entity.SectionedEntityCache; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ServerEntityManager.class) +public interface ServerEntityManagerAccessor { + + @Accessor + SectionedEntityCache getCache(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java new file mode 100644 index 0000000..cf2e7e8 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.entity.Entity; +import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.server.world.ServerWorld; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ServerWorld.class) +public interface ServerWorldAccessor { + + @Accessor + ServerEntityManager getEntityManager(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index e94d697..1876658 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -29,6 +29,7 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -36,6 +37,7 @@ import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; +import me.lucko.spark.fabric.FabricWorldInfoProvider; import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback; import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource; @@ -112,6 +114,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman return Stream.of(new FabricCommandSender(this.minecraft.player, this)); } + @Override + public void executeSync(Runnable task) { + this.minecraft.executeSync(task); + } + @Override public TickHook createTickHook() { return new FabricTickHook.Client(); @@ -122,6 +129,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman return new FabricTickReporter.Client(); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new FabricWorldInfoProvider.Client(this.minecraft); + } + @Override public PlatformInfo getPlatformInfo() { return new FabricPlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index 3d1a0e7..2283a84 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -31,6 +31,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -39,6 +40,7 @@ import me.lucko.spark.fabric.FabricPlayerPingProvider; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; +import me.lucko.spark.fabric.FabricWorldInfoProvider; import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi; import net.fabricmc.loader.api.FabricLoader; @@ -126,6 +128,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman ).map(sender -> new FabricCommandSender(sender, this)); } + @Override + public void executeSync(Runnable task) { + this.server.executeSync(task); + } + @Override public TickHook createTickHook() { return new FabricTickHook.Server(); @@ -141,6 +148,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman return new FabricPlayerPingProvider(this.server); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new FabricWorldInfoProvider.Server(this.server); + } + @Override public PlatformInfo getPlatformInfo() { return new FabricPlatformInfo(PlatformInfo.Type.SERVER); diff --git a/spark-fabric/src/main/resources/fabric.mod.json b/spark-fabric/src/main/resources/fabric.mod.json index e2e600d..f1f0ad4 100644 --- a/spark-fabric/src/main/resources/fabric.mod.json +++ b/spark-fabric/src/main/resources/fabric.mod.json @@ -23,6 +23,9 @@ "me.lucko.spark.fabric.FabricSparkMod::initializeClient" ] }, + "mixins": [ + "spark.mixins.json" + ], "depends": { "fabricloader": ">=0.4.0", "fabric-api-base": "*", diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json new file mode 100644 index 0000000..09587fe --- /dev/null +++ b/spark-fabric/src/main/resources/spark.mixins.json @@ -0,0 +1,14 @@ +{ + "required": true, + "package": "me.lucko.spark.fabric.mixin", + "compatibilityLevel": "JAVA_17", + "mixins": [], + "client": [ + "ClientEntityManagerAccessor", + "ClientWorldAccessor" + ], + "server": [ + "ServerEntityManagerAccessor", + "ServerWorldAccessor" + ] +} \ No newline at end of file diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 210122b..3f46b95 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -21,6 +21,7 @@ tasks.withType(JavaCompile) { minecraft { mappings channel: 'official', version: '1.19' + accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg') } configurations { diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java new file mode 100644 index 0000000..b17dab5 --- /dev/null +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java @@ -0,0 +1,141 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.forge; + +import it.unimi.dsi.fastutil.longs.LongIterator; +import it.unimi.dsi.fastutil.longs.LongSet; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import net.minecraft.client.Minecraft; +import net.minecraft.client.multiplayer.ClientLevel; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.level.ServerLevel; +import net.minecraft.world.entity.Entity; +import net.minecraft.world.entity.EntityType; +import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.entity.EntitySection; +import net.minecraft.world.level.entity.EntitySectionStorage; +import net.minecraft.world.level.entity.PersistentEntitySectionManager; +import net.minecraft.world.level.entity.TransientEntitySectionManager; +import net.minecraft.world.level.storage.ServerLevelData; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Stream; + +public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { + + protected List getChunksFromCache(EntitySectionStorage cache) { + LongSet loadedChunks = cache.getAllChunksWithExistingSections(); + List list = new ArrayList<>(loadedChunks.size()); + + for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { + long chunkPos = iterator.nextLong(); + Stream> sections = cache.getExistingSectionsInChunk(chunkPos); + + list.add(new ForgeChunkInfo(chunkPos, sections)); + } + + return list; + } + + public static final class Server extends ForgeWorldInfoProvider { + private final MinecraftServer server; + + public Server(MinecraftServer server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (ServerLevel level : this.server.getAllLevels()) { + PersistentEntitySectionManager entityManager = level.entityManager; + EntitySectionStorage cache = entityManager.sectionStorage; + + List list = getChunksFromCache(cache); + data.put(((ServerLevelData) level.getLevelData()).getLevelName(), list); + } + + return data; + } + } + + public static final class Client extends ForgeWorldInfoProvider { + private final Minecraft client; + + public Client(Minecraft client) { + this.client = client; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + + TransientEntitySectionManager entityManager = level.entityStorage; + EntitySectionStorage cache = entityManager.sectionStorage; + + List list = getChunksFromCache(cache); + data.put(level.dimensionTypeId().location().getPath(), list); + + return data; + } + } + + static final class ForgeChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + ForgeChunkInfo(long chunkPos, Stream> entities) { + super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos)); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + entities.forEach(section -> { + if (section.getStatus().isAccessible()) { + section.getEntities().forEach(entity -> + this.entityCounts.increment(entity.getType()) + ); + } + }); + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityType.getKey(type).toString(); + } + } + + +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index cf5c89b..04c8785 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -28,6 +28,7 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -35,6 +36,7 @@ import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; +import me.lucko.spark.forge.ForgeWorldInfoProvider; import net.minecraft.client.Minecraft; import net.minecraft.commands.CommandSource; @@ -107,6 +109,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< return Stream.of(new ForgeCommandSender(this.minecraft.player, this)); } + @Override + public void executeSync(Runnable task) { + this.minecraft.executeIfPossible(task); + } + @Override public TickHook createTickHook() { return new ForgeTickHook(TickEvent.Type.CLIENT); @@ -117,6 +124,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgeTickReporter(TickEvent.Type.CLIENT); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new ForgeWorldInfoProvider.Client(this.minecraft); + } + @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index e341d6f..03f9952 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -31,6 +31,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -39,6 +40,7 @@ import me.lucko.spark.forge.ForgePlayerPingProvider; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; +import me.lucko.spark.forge.ForgeWorldInfoProvider; import net.minecraft.commands.CommandSource; import net.minecraft.commands.CommandSourceStack; @@ -183,6 +185,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< ).map(sender -> new ForgeCommandSender(sender, this)); } + @Override + public void executeSync(Runnable task) { + this.server.executeIfPossible(task); + } + @Override public TickHook createTickHook() { return new ForgeTickHook(TickEvent.Type.SERVER); @@ -198,6 +205,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgePlayerPingProvider(this.server); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new ForgeWorldInfoProvider.Server(this.server); + } + @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.SERVER); diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg new file mode 100644 index 0000000..1e418b8 --- /dev/null +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -0,0 +1,4 @@ +public net.minecraft.server.level.ServerLevel f_143244_ # entityManager +public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage +public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage +public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java index 18132c3..87d9f09 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java @@ -31,7 +31,6 @@ import cn.nukkit.command.Command; import cn.nukkit.command.CommandSender; import cn.nukkit.plugin.PluginBase; import cn.nukkit.plugin.service.ServicePriority; -import cn.nukkit.scheduler.AsyncTask; import java.nio.file.Path; import java.util.logging.Level; @@ -82,12 +81,7 @@ public class NukkitSparkPlugin extends PluginBase implements SparkPlugin { @Override public void executeAsync(Runnable task) { - getServer().getScheduler().scheduleAsyncTask(this, new AsyncTask() { - @Override - public void onRun() { - task.run(); - } - }); + getServer().getScheduler().scheduleTask(this, task, true); } @Override diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index 670e0c5..324e242 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -44,6 +45,7 @@ import org.spongepowered.api.plugin.Plugin; import org.spongepowered.api.plugin.PluginContainer; import org.spongepowered.api.scheduler.AsynchronousExecutor; import org.spongepowered.api.scheduler.SpongeExecutorService; +import org.spongepowered.api.scheduler.SynchronousExecutor; import org.spongepowered.api.text.Text; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; @@ -70,17 +72,19 @@ public class Sponge7SparkPlugin implements SparkPlugin { private final Game game; private final Path configDirectory; private final SpongeExecutorService asyncExecutor; + private final SpongeExecutorService syncExecutor; private SparkPlatform platform; private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @Inject - public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) { + public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor, @SynchronousExecutor SpongeExecutorService syncExecutor) { this.pluginContainer = pluginContainer; this.logger = logger; this.game = game; this.configDirectory = configDirectory; this.asyncExecutor = asyncExecutor; + this.syncExecutor = syncExecutor; } @Listener @@ -112,10 +116,14 @@ public class Sponge7SparkPlugin implements SparkPlugin { @Override public Stream getCommandSenders() { - return Stream.concat( - this.game.getServer().getOnlinePlayers().stream(), - Stream.of(this.game.getServer().getConsole()) - ).map(Sponge7CommandSender::new); + if (this.game.isServerAvailable()) { + return Stream.concat( + this.game.getServer().getOnlinePlayers().stream(), + Stream.of(this.game.getServer().getConsole()) + ).map(Sponge7CommandSender::new); + } else { + return Stream.of(this.game.getServer().getConsole()).map(Sponge7CommandSender::new); + } } @Override @@ -123,6 +131,11 @@ public class Sponge7SparkPlugin implements SparkPlugin { this.asyncExecutor.execute(task); } + @Override + public void executeSync(Runnable task) { + this.syncExecutor.execute(task); + } + @Override public void log(Level level, String msg) { if (level == Level.INFO) { @@ -160,6 +173,15 @@ public class Sponge7SparkPlugin implements SparkPlugin { } } + @Override + public WorldInfoProvider createWorldInfoProvider() { + if (this.game.isServerAvailable()) { + return new Sponge7WorldInfoProvider(this.game.getServer()); + } else { + return WorldInfoProvider.NO_OP; + } + } + @Override public PlatformInfo getPlatformInfo() { return new Sponge7PlatformInfo(this.game); diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java new file mode 100644 index 0000000..fa6fa6b --- /dev/null +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java @@ -0,0 +1,87 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.sponge; + +import com.google.common.collect.Lists; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import org.spongepowered.api.Server; +import org.spongepowered.api.entity.Entity; +import org.spongepowered.api.entity.EntityType; +import org.spongepowered.api.world.Chunk; +import org.spongepowered.api.world.World; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +public class Sponge7WorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public Sponge7WorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (World world : this.server.getWorlds()) { + List chunks = Lists.newArrayList(world.getLoadedChunks()); + + List list = new ArrayList<>(chunks.size()); + for (Chunk chunk : chunks) { + list.add(new Sponge7ChunkInfo(chunk)); + } + + data.put(world.getName(), list); + } + + return data; + } + + static final class Sponge7ChunkInfo extends AbstractChunkInfo { + private final CountMap entityCounts; + + Sponge7ChunkInfo(Chunk chunk) { + super(chunk.getPosition().getX(), chunk.getPosition().getZ()); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + for (Entity entity : chunk.getEntities()) { + this.entityCounts.increment(entity.getType()); + } + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return type.getName(); + } + + } +} diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index e867a75..68e47e3 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -67,6 +68,7 @@ public class Sponge8SparkPlugin implements SparkPlugin { private final Game game; private final Path configDirectory; private final ExecutorService asyncExecutor; + private final ExecutorService syncExecutor; private SparkPlatform platform; private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @@ -78,6 +80,14 @@ public class Sponge8SparkPlugin implements SparkPlugin { this.game = game; this.configDirectory = configDirectory; this.asyncExecutor = game.asyncScheduler().executor(pluginContainer); + + if (game.isServerAvailable()) { + this.syncExecutor = game.server().scheduler().executor(pluginContainer); + } else if (game.isClientAvailable()) { + this.syncExecutor = game.client().scheduler().executor(pluginContainer); + } else { + throw new IllegalStateException("Server and client both unavailable"); + } } @@ -114,10 +124,14 @@ public class Sponge8SparkPlugin implements SparkPlugin { @Override public Stream getCommandSenders() { - return Stream.concat( - this.game.server().onlinePlayers().stream(), - Stream.of(this.game.systemSubject()) - ).map(Sponge8CommandSender::new); + if (this.game.isServerAvailable()) { + return Stream.concat( + this.game.server().onlinePlayers().stream(), + Stream.of(this.game.systemSubject()) + ).map(Sponge8CommandSender::new); + } else { + return Stream.of(this.game.systemSubject()).map(Sponge8CommandSender::new); + } } @Override @@ -125,6 +139,11 @@ public class Sponge8SparkPlugin implements SparkPlugin { this.asyncExecutor.execute(task); } + @Override + public void executeSync(Runnable task) { + this.syncExecutor.execute(task); + } + @Override public void log(Level level, String msg) { if (level == Level.INFO) { @@ -162,6 +181,15 @@ public class Sponge8SparkPlugin implements SparkPlugin { } } + @Override + public WorldInfoProvider createWorldInfoProvider() { + if (this.game.isServerAvailable()) { + return new Sponge8WorldInfoProvider(this.game.server()); + } else { + return WorldInfoProvider.NO_OP; + } + } + @Override public PlatformInfo getPlatformInfo() { return new Sponge8PlatformInfo(this.game); diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java new file mode 100644 index 0000000..bff4d6e --- /dev/null +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java @@ -0,0 +1,88 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.sponge; + +import com.google.common.collect.Lists; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import org.spongepowered.api.Server; +import org.spongepowered.api.entity.Entity; +import org.spongepowered.api.entity.EntityType; +import org.spongepowered.api.entity.EntityTypes; +import org.spongepowered.api.world.chunk.WorldChunk; +import org.spongepowered.api.world.server.ServerWorld; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +public class Sponge8WorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public Sponge8WorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (ServerWorld world : this.server.worldManager().worlds()) { + List chunks = Lists.newArrayList(world.loadedChunks()); + + List list = new ArrayList<>(chunks.size()); + for (WorldChunk chunk : chunks) { + list.add(new Sponge7ChunkInfo(chunk)); + } + + data.put(world.key().value(), list); + } + + return data; + } + + static final class Sponge7ChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + Sponge7ChunkInfo(WorldChunk chunk) { + super(chunk.chunkPosition().x(), chunk.chunkPosition().z()); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + for (Entity entity : chunk.entities()) { + this.entityCounts.increment(entity.type()); + } + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityTypes.registry().valueKey(type).value(); + } + + } +} -- cgit From fed6a350a80f6daa8c170770e5f300a0d5aa0894 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 26 Jun 2022 18:59:02 +0100 Subject: Include info about number of ticks in a profile --- .../common/command/modules/SamplerModule.java | 2 +- .../spark/common/sampler/AbstractSampler.java | 36 +++++++++++++++++++--- .../me/lucko/spark/common/sampler/Sampler.java | 2 +- .../lucko/spark/common/sampler/SamplerBuilder.java | 6 ++-- .../spark/common/sampler/async/AsyncSampler.java | 10 +++--- .../spark/common/sampler/java/JavaSampler.java | 12 ++++---- .../common/sampler/java/TickedDataAggregator.java | 15 +++++++++ .../src/main/proto/spark/spark_sampler.proto | 2 ++ 8 files changed, 64 insertions(+), 21 deletions(-) (limited to 'spark-common') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 970d062..fd5cd67 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -266,7 +266,7 @@ public class SamplerModule implements CommandModule { if (this.activeSampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - long timeout = this.activeSampler.getEndTime(); + long timeout = this.activeSampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); } else { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index ce466a0..1c217db 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -41,6 +42,9 @@ import java.util.concurrent.CompletableFuture; */ public abstract class AbstractSampler implements Sampler { + /** The spark platform instance */ + protected final SparkPlatform platform; + /** The interval to wait between sampling, in microseconds */ protected final int interval; @@ -50,8 +54,11 @@ public abstract class AbstractSampler implements Sampler { /** The time when sampling first began */ protected long startTime = -1; + /** The game tick when sampling first began */ + protected int startTick = -1; + /** The unix timestamp (in millis) when this sampler should automatically complete. */ - protected final long endTime; // -1 for nothing + protected final long autoEndTime; // -1 for nothing /** A future to encapsulate the completion of this sampler instance */ protected final CompletableFuture future = new CompletableFuture<>(); @@ -59,10 +66,11 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) { + protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; - this.endTime = endTime; + this.autoEndTime = autoEndTime; } @Override @@ -74,8 +82,8 @@ public abstract class AbstractSampler implements Sampler { } @Override - public long getEndTime() { - return this.endTime; + public long getAutoEndTime() { + return this.autoEndTime; } @Override @@ -91,6 +99,16 @@ public abstract class AbstractSampler implements Sampler { return this.initialGcStats; } + @Override + public void start() { + this.startTime = System.currentTimeMillis(); + + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + this.startTick = tickHook.getCurrentTick(); + } + } + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) @@ -105,6 +123,14 @@ public abstract class AbstractSampler implements Sampler { metadata.setComment(comment); } + if (this.startTick != -1) { + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + int numberOfTicks = tickHook.getCurrentTick() - this.startTick; + metadata.setNumberOfTicks(numberOfTicks); + } + } + try { metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats())); } catch (Exception e) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 845043f..84f2da1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -57,7 +57,7 @@ public interface Sampler { * * @return the end time, or -1 if undefined */ - long getEndTime(); + long getAutoEndTime(); /** * Gets a future to encapsulate the completion of the sampler diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88cf018..88b9d91 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -97,11 +97,11 @@ public class SamplerBuilder { Sampler sampler; if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); + sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); } else { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 5cb7fdc..d8288da 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -65,8 +65,8 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for timeouts */ private ScheduledExecutorService timeoutExecutor; - public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(interval, threadDumper, endTime); + public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + super(platform, interval, threadDumper, endTime); this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } @@ -90,7 +90,7 @@ public class AsyncSampler extends AbstractSampler { */ @Override public void start() { - this.startTime = System.currentTimeMillis(); + super.start(); try { this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp"); @@ -120,11 +120,11 @@ public class AsyncSampler extends AbstractSampler { } private void scheduleTimeout() { - if (this.endTime == -1) { + if (this.autoEndTime == -1) { return; } - long delay = this.endTime - System.currentTimeMillis(); + long delay = this.autoEndTime - System.currentTimeMillis(); if (delay <= 0) { return; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index cfa0a0f..913faee 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -63,19 +63,19 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; - public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(interval, threadDumper, endTime); + public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { + super(platform, interval, threadDumper, endTime); this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); } - public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(interval, threadDumper, endTime); + public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(platform, interval, threadDumper, endTime); this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @Override public void start() { - this.startTime = System.currentTimeMillis(); + super.start(); this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS); } @@ -89,7 +89,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { // this is effectively synchronized, the worker pool will not allow this task // to concurrently execute. try { - if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) { + if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) { this.future.complete(this); stop(); return; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e817828..e062f31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; /** * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" @@ -47,6 +48,9 @@ public class TickedDataAggregator extends JavaDataAggregator { /** The expected number of samples in each tick */ private final int expectedSize; + /** The number of ticks aggregated so far */ + private final AtomicInteger numberOfTicks = new AtomicInteger(); + private final Object mutex = new Object(); // state @@ -64,10 +68,16 @@ public class TickedDataAggregator extends JavaDataAggregator { @Override public SamplerMetadata.DataAggregator getMetadata() { + // push the current tick (so numberOfTicks is accurate) + synchronized (this.mutex) { + pushCurrentTick(); + } + return SamplerMetadata.DataAggregator.newBuilder() .setType(SamplerMetadata.DataAggregator.Type.TICKED) .setThreadGrouper(this.threadGrouper.asProto()) .setTickLengthThreshold(this.tickLengthThreshold) + .setNumberOfIncludedTicks(this.numberOfTicks.get()) .build(); } @@ -97,6 +107,7 @@ public class TickedDataAggregator extends JavaDataAggregator { return; } + this.numberOfTicks.incrementAndGet(); this.workerPool.submit(currentData); } @@ -110,6 +121,10 @@ public class TickedDataAggregator extends JavaDataAggregator { return super.exportData(); } + public int getNumberOfTicks() { + return this.numberOfTicks.get(); + } + private final class TickList implements Runnable { private final List list; diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index 51bdd64..8d9512a 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -25,6 +25,7 @@ message SamplerMetadata { SystemStatistics system_statistics = 9; map server_configurations = 10; int64 end_time = 11; + int32 number_of_ticks = 12; message ThreadDumper { Type type = 1; @@ -42,6 +43,7 @@ message SamplerMetadata { Type type = 1; ThreadGrouper thread_grouper = 2; int64 tick_length_threshold = 3; // optional + int32 number_of_included_ticks = 4; // optional enum Type { SIMPLE = 0; -- cgit From d5d5cb10714b0993ec91d6a2b523b661c1314917 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 26 Jun 2022 19:19:49 +0100 Subject: Update adventure version --- spark-bukkit/build.gradle | 8 +------- spark-bungeecord/build.gradle | 8 +------- spark-common/build.gradle | 9 ++++++--- .../me/lucko/spark/common/command/modules/ActivityLogModule.java | 1 + 4 files changed, 9 insertions(+), 17 deletions(-) (limited to 'spark-common') diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index 8e111e8..7144291 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -4,13 +4,7 @@ plugins { dependencies { implementation project(':spark-common') - implementation('me.lucko:adventure-platform-bukkit:4.9.4') { - exclude(module: 'adventure-api') - exclude(module: 'checker-qual') - exclude(module: 'annotations') - exclude(module: 'adventure-text-serializer-gson') - exclude(module: 'adventure-text-serializer-legacy') - } + implementation 'net.kyori:adventure-platform-bukkit:4.1.1' compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT' // placeholders diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index ccea89d..1e92621 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -4,13 +4,7 @@ plugins { dependencies { implementation project(':spark-common') - implementation('me.lucko:adventure-platform-bungeecord:4.9.4') { - exclude(module: 'adventure-api') - exclude(module: 'checker-qual') - exclude(module: 'annotations') - exclude(module: 'adventure-text-serializer-gson') - exclude(module: 'adventure-text-serializer-legacy') - } + implementation 'net.kyori:adventure-platform-bungeecord:4.1.1' compileOnly 'net.md-5:bungeecord-api:1.16-R0.4' } diff --git a/spark-common/build.gradle b/spark-common/build.gradle index 554eec2..bc493f3 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -15,15 +15,18 @@ dependencies { implementation 'com.squareup.okio:okio:1.17.3' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' implementation 'org.tukaani:xz:1.8' - api('net.kyori:adventure-api:4.9.3') { + api('net.kyori:adventure-api:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'checker-qual') exclude(module: 'annotations') } - api('net.kyori:adventure-text-serializer-gson:4.9.3') { + api('net.kyori:adventure-text-serializer-gson:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'adventure-api') exclude(module: 'gson') } - api('net.kyori:adventure-text-serializer-legacy:4.9.3') { + api('net.kyori:adventure-text-serializer-legacy:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'adventure-api') } implementation('net.kyori:adventure-text-feature-pagination:4.0.0-SNAPSHOT') { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java index b777f3e..6252ac7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java @@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD; public class ActivityLogModule implements CommandModule, RowRenderer { private final Pagination.Builder pagination = Pagination.builder() + .width(45) .renderer(new Renderer() { @Override public Component renderEmpty() { -- cgit From 7f422948755c2988180c32fda9554ea1531949c2 Mon Sep 17 00:00:00 2001 From: Ben Kerllenevich Date: Tue, 28 Jun 2022 14:57:36 -0400 Subject: Better handling of Paper's split config system (#218) --- .../spark/bukkit/BukkitServerConfigProvider.java | 59 ++++++++++++++++++---- .../serverconfig/AbstractServerConfigProvider.java | 17 ++----- .../common/platform/serverconfig/ConfigParser.java | 20 +++++++- 3 files changed, 71 insertions(+), 25 deletions(-) (limited to 'spark-common') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index ff1b55f..bc1bdf8 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -22,6 +22,7 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; import com.google.gson.GsonBuilder; import com.google.gson.JsonSerializer; @@ -29,6 +30,8 @@ import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import org.bukkit.Bukkit; +import org.bukkit.World; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.file.YamlConfiguration; @@ -36,6 +39,9 @@ import co.aikar.timings.TimingsManager; import java.io.BufferedReader; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -58,15 +64,8 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { gson.registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))); } - @Override - protected String rewriteConfigPath(String path) { - return path.startsWith("config/") - ? path.substring("config/".length()) - : path; - } - - private enum YamlConfigParser implements ConfigParser { - INSTANCE; + private static class YamlConfigParser implements ConfigParser { + public static final YamlConfigParser INSTANCE = new YamlConfigParser(); @Override public Map parse(BufferedReader reader) throws IOException { @@ -75,14 +74,52 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { } } + // Paper 1.19+ split config layout + private static class SplitYamlConfigParser extends YamlConfigParser { + public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser(); + + @Override + public Map parse(String prefix) throws IOException { + Path configDir = Paths.get("config"); + if (!Files.exists(configDir)) { + return null; + } + + Map configs = Maps.newHashMap(); + + parseIfExists(configs, + "global.yml", + configDir.resolve(prefix + "-global.yml") + ); + parseIfExists(configs, + "world-defaults.yml", + configDir.resolve(prefix + "-world-defaults.yml") + ); + for (World world : Bukkit.getWorlds()) { + parseIfExists(configs, + world.getName() + ".yml", + world.getWorldFolder().toPath().resolve(prefix + "-world.yml") + ); + } + + return configs; + } + + private void parseIfExists(Map configs, String name, Path path) throws IOException { + Map values = parse(path); + if (values != null) { + configs.put(name, values); + } + } + } + static { ImmutableMap.Builder files = ImmutableMap.builder() .put("server.properties", PropertiesConfigParser.INSTANCE) .put("bukkit.yml", YamlConfigParser.INSTANCE) .put("spigot.yml", YamlConfigParser.INSTANCE) .put("paper.yml", YamlConfigParser.INSTANCE) - .put("config/paper-global.yml", YamlConfigParser.INSTANCE) - .put("config/paper-world-defaults.yml", YamlConfigParser.INSTANCE) + .put("paper", SplitYamlConfigParser.INSTANCE) .put("purpur.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 0eef111..5a14382 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -73,9 +73,7 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid } delete(json, this.hiddenPaths); - - String name = rewriteConfigPath(path); - builder.put(name, json); + builder.put(path, json); } catch (Exception e) { e.printStackTrace(); } @@ -85,25 +83,18 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid } private JsonElement load(String path, ConfigParser parser) throws IOException { - Path filePath = Paths.get(path); - if (!Files.exists(filePath)) { + Map values = parser.parse(path); + if (values == null) { return null; } - try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { - Map values = parser.parse(reader); - return this.gson.toJsonTree(values); - } + return this.gson.toJsonTree(values); } protected void customiseGson(GsonBuilder gson) { } - protected String rewriteConfigPath(String path) { - return path; - } - /** * Deletes the given paths from the json element. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java index 2dd15fe..dfbf816 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -22,10 +22,28 @@ package me.lucko.spark.common.platform.serverconfig; import java.io.BufferedReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Map; public interface ConfigParser { - Map parse(BufferedReader reader) throws IOException; + default Map parse(String file) throws IOException { + return parse(Paths.get(file)); + } + + default Map parse(Path file) throws IOException { + if (!Files.exists(file)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { + return this.parse(reader); + } + } + + Map parse(BufferedReader reader) throws IOException; } -- cgit From 3b93798a312a0b63b7d80a6b9a7ae3150f8fdaca Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 28 Jun 2022 20:24:46 +0100 Subject: Fix config filtering on nested files --- .../spark/bukkit/BukkitServerConfigProvider.java | 69 +++++++++------ .../serverconfig/AbstractServerConfigProvider.java | 97 +--------------------- .../common/platform/serverconfig/ConfigParser.java | 7 +- .../serverconfig/ExcludedConfigFilter.java | 97 ++++++++++++++++++++++ .../serverconfig/PropertiesConfigParser.java | 16 ++++ 5 files changed, 162 insertions(+), 124 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java (limited to 'spark-common') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index bc1bdf8..f822015 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -22,12 +22,15 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; +import com.google.gson.Gson; import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import org.bukkit.Bukkit; @@ -45,6 +48,7 @@ import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -59,13 +63,21 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { super(FILES, HIDDEN_PATHS); } - @Override - protected void customiseGson(GsonBuilder gson) { - gson.registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))); - } - private static class YamlConfigParser implements ConfigParser { public static final YamlConfigParser INSTANCE = new YamlConfigParser(); + protected static final Gson GSON = new GsonBuilder() + .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))) + .create(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } + + return filter.apply(GSON.toJsonTree(values)); + } @Override public Map parse(BufferedReader reader) throws IOException { @@ -79,37 +91,40 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser(); @Override - public Map parse(String prefix) throws IOException { + public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException { + String prefix = group.replace("/", ""); + Path configDir = Paths.get("config"); if (!Files.exists(configDir)) { return null; } - Map configs = Maps.newHashMap(); + JsonObject root = new JsonObject(); - parseIfExists(configs, - "global.yml", - configDir.resolve(prefix + "-global.yml") - ); - parseIfExists(configs, - "world-defaults.yml", - configDir.resolve(prefix + "-world-defaults.yml") - ); - for (World world : Bukkit.getWorlds()) { - parseIfExists(configs, - world.getName() + ".yml", - world.getWorldFolder().toPath().resolve(prefix + "-world.yml") - ); + for (Map.Entry entry : getNestedFiles(configDir, prefix).entrySet()) { + String fileName = entry.getKey(); + Path path = entry.getValue(); + + Map values = this.parse(path); + if (values == null) { + continue; + } + + // apply the filter individually to each nested file + root.add(fileName, filter.apply(GSON.toJsonTree(values))); } - return configs; + return root; } - private void parseIfExists(Map configs, String name, Path path) throws IOException { - Map values = parse(path); - if (values != null) { - configs.put(name, values); + private static Map getNestedFiles(Path configDir, String prefix) { + Map files = new LinkedHashMap<>(); + files.put("global.yml", configDir.resolve(prefix + "-global.yml")); + files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml")); + for (World world : Bukkit.getWorlds()) { + files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml")); } + return files; } } @@ -119,7 +134,7 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { .put("bukkit.yml", YamlConfigParser.INSTANCE) .put("spigot.yml", YamlConfigParser.INSTANCE) .put("paper.yml", YamlConfigParser.INSTANCE) - .put("paper", SplitYamlConfigParser.INSTANCE) + .put("paper/", SplitYamlConfigParser.INSTANCE) .put("purpur.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 5a14382..501851a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -21,24 +21,10 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import java.io.BufferedReader; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.Deque; -import java.util.LinkedList; import java.util.Map; -import java.util.stream.Collectors; /** * Abstract implementation of {@link ServerConfigProvider}. @@ -48,31 +34,23 @@ import java.util.stream.Collectors; */ public abstract class AbstractServerConfigProvider implements ServerConfigProvider { private final Map files; - private final Collection hiddenPaths; - - private final Gson gson; + private final ExcludedConfigFilter hiddenPathFilters; protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { this.files = files; - this.hiddenPaths = hiddenPaths; - - GsonBuilder gson = new GsonBuilder(); - customiseGson(gson); - this.gson = gson.create(); + this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths); } @Override public final Map loadServerConfigurations() { ImmutableMap.Builder builder = ImmutableMap.builder(); - this.files.forEach((path, reader) -> { + this.files.forEach((path, parser) -> { try { - JsonElement json = load(path, reader); + JsonElement json = parser.load(path, this.hiddenPathFilters); if (json == null) { return; } - - delete(json, this.hiddenPaths); builder.put(path, json); } catch (Exception e) { e.printStackTrace(); @@ -82,71 +60,4 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid return builder.build(); } - private JsonElement load(String path, ConfigParser parser) throws IOException { - Map values = parser.parse(path); - if (values == null) { - return null; - } - - return this.gson.toJsonTree(values); - } - - protected void customiseGson(GsonBuilder gson) { - - } - - /** - * Deletes the given paths from the json element. - * - * @param json the json element - * @param paths the paths to delete - */ - private static void delete(JsonElement json, Collection paths) { - for (String path : paths) { - Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); - delete(json, pathDeque); - } - } - - private static void delete(JsonElement json, Deque path) { - if (path.isEmpty()) { - return; - } - if (!json.isJsonObject()) { - return; - } - - JsonObject jsonObject = json.getAsJsonObject(); - String expected = path.removeFirst().replace("", "."); - - Collection keys; - if (expected.equals("*")) { - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .collect(Collectors.toList()); - } else if (expected.endsWith("*")) { - String pattern = expected.substring(0, expected.length() - 1); - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .filter(key -> key.startsWith(pattern)) - .collect(Collectors.toList()); - } else if (jsonObject.has(expected)) { - keys = Collections.singletonList(expected); - } else { - keys = Collections.emptyList(); - } - - for (String key : keys) { - if (path.isEmpty()) { - jsonObject.remove(key); - } else { - Deque pathCopy = keys.size() > 1 - ? new LinkedList<>(path) - : path; - - delete(jsonObject.get(key), pathCopy); - } - } - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java index dfbf816..675a32e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -20,19 +20,18 @@ package me.lucko.spark.common.platform.serverconfig; +import com.google.gson.JsonElement; + import java.io.BufferedReader; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Map; public interface ConfigParser { - default Map parse(String file) throws IOException { - return parse(Paths.get(file)); - } + JsonElement load(String file, ExcludedConfigFilter filter) throws IOException; default Map parse(Path file) throws IOException { if (!Files.exists(file)) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java new file mode 100644 index 0000000..c11c7f8 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Deque; +import java.util.LinkedList; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Filtered excluded paths from {@link JsonElement}s (parsed configuration files). + */ +public class ExcludedConfigFilter { + private final Collection pathsToExclude; + + public ExcludedConfigFilter(Collection pathsToExclude) { + this.pathsToExclude = pathsToExclude; + } + + /** + * Deletes the excluded paths from the json element. + * + * @param json the json element + */ + public JsonElement apply(JsonElement json) { + for (String path : this.pathsToExclude) { + Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); + delete(json, pathDeque); + } + return json; + } + + private static void delete(JsonElement json, Deque path) { + if (path.isEmpty()) { + return; + } + if (!json.isJsonObject()) { + return; + } + + JsonObject jsonObject = json.getAsJsonObject(); + String expected = path.removeFirst().replace("", "."); + + Collection keys; + if (expected.equals("*")) { + keys = jsonObject.entrySet().stream() + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + } else if (expected.endsWith("*")) { + String pattern = expected.substring(0, expected.length() - 1); + keys = jsonObject.entrySet().stream() + .map(Map.Entry::getKey) + .filter(key -> key.startsWith(pattern)) + .collect(Collectors.toList()); + } else if (jsonObject.has(expected)) { + keys = Collections.singletonList(expected); + } else { + keys = Collections.emptyList(); + } + + for (String key : keys) { + if (path.isEmpty()) { + jsonObject.remove(key); + } else { + Deque pathCopy = keys.size() > 1 + ? new LinkedList<>(path) + : path; + + delete(jsonObject.get(key), pathCopy); + } + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java index 4c7c2c1..344ba1c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java @@ -20,8 +20,12 @@ package me.lucko.spark.common.platform.serverconfig; +import com.google.gson.Gson; +import com.google.gson.JsonElement; + import java.io.BufferedReader; import java.io.IOException; +import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Properties; @@ -32,6 +36,18 @@ import java.util.Properties; public enum PropertiesConfigParser implements ConfigParser { INSTANCE; + private static final Gson GSON = new Gson(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } + + return filter.apply(GSON.toJsonTree(values)); + } + @Override public Map parse(BufferedReader reader) throws IOException { Properties properties = new Properties(); -- cgit From e5fc5ab0ac0016b8f4018f7f88d4603098d022c6 Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 28 Jun 2022 21:13:07 +0100 Subject: Maintain order of exported config files --- .../common/platform/serverconfig/ServerConfigProvider.java | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) (limited to 'spark-common') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java index 1fc2391..c66305f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java @@ -23,8 +23,8 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.gson.JsonElement; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Map; -import java.util.stream.Collectors; /** * Function to export server configuration files for access within the spark viewer. @@ -43,12 +43,9 @@ public interface ServerConfigProvider { Map loadServerConfigurations(); default Map exportServerConfigurations() { - return loadServerConfigurations().entrySet() - .stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> e.getValue().toString() - )); + Map map = new LinkedHashMap<>(); + loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString())); + return map; } /** -- cgit From 2b24d42ab2b9a9c28a5929520d9ca565f07724f9 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 29 Jun 2022 22:41:42 +0100 Subject: Add server config providers for Fabric/Forge --- .../spark/bukkit/BukkitServerConfigProvider.java | 8 --- .../serverconfig/AbstractServerConfigProvider.java | 10 ++++ .../spark/fabric/FabricServerConfigProvider.java | 57 ++++++++++++++++++++++ .../fabric/plugin/FabricServerSparkPlugin.java | 7 +++ .../spark/forge/ForgeServerConfigProvider.java | 57 ++++++++++++++++++++++ .../spark/forge/plugin/ForgeServerSparkPlugin.java | 7 +++ 6 files changed, 138 insertions(+), 8 deletions(-) create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java (limited to 'spark-common') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index f822015..4c587fb 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -45,7 +45,6 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; @@ -160,13 +159,6 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { HIDDEN_PATHS = hiddenPaths.build(); } - private static List getSystemPropertyList(String property) { - String value = System.getProperty(property); - return value == null - ? Collections.emptyList() - : Arrays.asList(value.split(",")); - } - private static List getTimingsHiddenConfigs() { try { return TimingsManager.hiddenConfigs; diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 501851a..559ae95 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -23,7 +23,10 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; +import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -60,4 +63,11 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid return builder.build(); } + protected static List getSystemPropertyList(String property) { + String value = System.getProperty(property); + return value == null + ? Collections.emptyList() + : Arrays.asList(value.split(",")); + } + } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java new file mode 100644 index 0000000..18079d3 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; + +import java.util.Collection; +import java.util.Map; + +public class FabricServerConfigProvider extends AbstractServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public FabricServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + static { + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("level-seed") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index 2283a84..bb1d68c 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -31,12 +31,14 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricPlayerPingProvider; +import me.lucko.spark.fabric.FabricServerConfigProvider; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; @@ -148,6 +150,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman return new FabricPlayerPingProvider(this.server); } + @Override + public ServerConfigProvider createServerConfigProvider() { + return new FabricServerConfigProvider(); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Server(this.server); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java new file mode 100644 index 0000000..baa1358 --- /dev/null +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.forge; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; + +import java.util.Collection; +import java.util.Map; + +public class ForgeServerConfigProvider extends AbstractServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public ForgeServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + static { + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("level-seed") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 03f9952..f4a51e0 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -31,12 +31,14 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgePlayerPingProvider; +import me.lucko.spark.forge.ForgeServerConfigProvider; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; @@ -205,6 +207,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgePlayerPingProvider(this.server); } + @Override + public ServerConfigProvider createServerConfigProvider() { + return new ForgeServerConfigProvider(); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Server(this.server); -- cgit From a10f966a443d56845a5efb1e65232e6b87eabb96 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 29 Jun 2022 22:44:30 +0100 Subject: Fix NPE from null entity type name --- .../me/lucko/spark/common/platform/world/WorldStatisticsProvider.java | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'spark-common') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java index 864a296..80c35a6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -122,6 +122,10 @@ public class WorldStatisticsProvider { String name = chunk.entityTypeName(key); int count = value.get(); + if (name == null) { + name = "unknown[" + key.toString() + "]"; + } + builder.putEntityCounts(name, count); combined.add(name, count); }); -- cgit