From a551317a0acc0f6ccb2d1bb66e8475b42387e59c Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 11 Jun 2022 21:05:08 +0100 Subject: Tidy up placeholder handling Co-authored-by: Caden Kriese --- .../lucko/spark/common/util/SparkPlaceholder.java | 191 +++++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java new file mode 100644 index 0000000..be5bbc2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java @@ -0,0 +1,191 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.tick.TickStatistics; + +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.TextComponent; +import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; + +import java.util.Locale; +import java.util.function.BiFunction; + +public enum SparkPlaceholder { + + TPS((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps15Min())) + .build(); + } + + switch (arg) { + case "5s": + return StatisticFormatter.formatTps(tickStatistics.tps5Sec()); + case "10s": + return StatisticFormatter.formatTps(tickStatistics.tps10Sec()); + case "1m": + return StatisticFormatter.formatTps(tickStatistics.tps1Min()); + case "5m": + return StatisticFormatter.formatTps(tickStatistics.tps5Min()); + case "15m": + return StatisticFormatter.formatTps(tickStatistics.tps15Min()); + } + + return null; + }), + + TICKDURATION((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null || !tickStatistics.isDurationSupported()) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; ")) + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec()); + case "1m": + return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()); + } + + return null; + }), + + CPU_SYSTEM((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()); + } + + return null; + }), + + CPU_PROCESS((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()); + } + + return null; + }); + + private final String name; + private final BiFunction function; + + SparkPlaceholder(BiFunction function) { + this.name = name().toLowerCase(Locale.ROOT); + this.function = function; + } + + public String getName() { + return this.name; + } + + public TextComponent resolve(SparkPlatform platform, String arg) { + return this.function.apply(platform, arg); + } + + public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) { + String[] parts = placeholder.split("_"); + + if (parts.length == 0) { + return null; + } + + String label = parts[0]; + + if (label.equals("tps")) { + String arg = parts.length < 2 ? null : parts[1]; + return TPS.resolve(platform, arg); + } + + if (label.equals("tickduration")) { + String arg = parts.length < 2 ? null : parts[1]; + return TICKDURATION.resolve(platform, arg); + } + + if (label.equals("cpu") && parts.length >= 2) { + String type = parts[1]; + String arg = parts.length < 3 ? null : parts[2]; + + if (type.equals("system")) { + return CPU_SYSTEM.resolve(platform, arg); + } + if (type.equals("process")) { + return CPU_PROCESS.resolve(platform, arg); + } + } + + return null; + } + + public static String resolveFormattingCode(SparkPlatform platform, String placeholder) { + TextComponent result = resolveComponent(platform, placeholder); + if (result == null) { + return null; + } + return LegacyComponentSerializer.legacySection().serialize(result); + } + +} -- cgit From 28cf3185c1374c4b5af277ef28482299694209a3 Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 20 Jun 2022 22:21:16 +0100 Subject: New paper config location (#217) --- .../serverconfig/AbstractServerConfigProvider.java | 63 ++++++++++++++------- .../common/platform/serverconfig/ConfigParser.java | 31 +++++++++++ .../serverconfig/PropertiesConfigParser.java | 61 +++++++++++++++++++++ .../serverconfig/PropertiesFileReader.java | 64 ---------------------- 4 files changed, 136 insertions(+), 83 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index ead2131..0eef111 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -21,10 +21,17 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import java.io.BufferedReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -38,29 +45,37 @@ import java.util.stream.Collectors; * *

This implementation is able to delete hidden paths from * the configurations before they are sent to the viewer.

- * - * @param the file type */ -public abstract class AbstractServerConfigProvider> implements ServerConfigProvider { - private final Map files; +public abstract class AbstractServerConfigProvider implements ServerConfigProvider { + private final Map files; private final Collection hiddenPaths; - protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { + private final Gson gson; + + protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { this.files = files; this.hiddenPaths = hiddenPaths; + + GsonBuilder gson = new GsonBuilder(); + customiseGson(gson); + this.gson = gson.create(); } @Override public final Map loadServerConfigurations() { ImmutableMap.Builder builder = ImmutableMap.builder(); - this.files.forEach((path, type) -> { + this.files.forEach((path, reader) -> { try { - JsonElement json = load(path, type); - if (json != null) { - delete(json, this.hiddenPaths); - builder.put(path, json); + JsonElement json = load(path, reader); + if (json == null) { + return; } + + delete(json, this.hiddenPaths); + + String name = rewriteConfigPath(path); + builder.put(name, json); } catch (Exception e) { e.printStackTrace(); } @@ -69,15 +84,25 @@ public abstract class AbstractServerConfigProvider> implements return builder.build(); } - /** - * Loads a file from the system. - * - * @param path the name of the file to load - * @param type the type of the file - * @return the loaded file - * @throws IOException if an error occurs performing i/o - */ - protected abstract JsonElement load(String path, T type) throws IOException; + private JsonElement load(String path, ConfigParser parser) throws IOException { + Path filePath = Paths.get(path); + if (!Files.exists(filePath)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { + Map values = parser.parse(reader); + return this.gson.toJsonTree(values); + } + } + + protected void customiseGson(GsonBuilder gson) { + + } + + protected String rewriteConfigPath(String path) { + return path; + } /** * Deletes the given paths from the json element. diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java new file mode 100644 index 0000000..2dd15fe --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -0,0 +1,31 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.Map; + +public interface ConfigParser { + + Map parse(BufferedReader reader) throws IOException; + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java new file mode 100644 index 0000000..4c7c2c1 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +/** + * A {@link ConfigParser} that can parse a .properties file. + */ +public enum PropertiesConfigParser implements ConfigParser { + INSTANCE; + + @Override + public Map parse(BufferedReader reader) throws IOException { + Properties properties = new Properties(); + properties.load(reader); + + Map values = new HashMap<>(); + properties.forEach((k, v) -> { + String key = k.toString(); + String value = v.toString(); + + if ("true".equals(value) || "false".equals(value)) { + values.put(key, Boolean.parseBoolean(value)); + } else if (value.matches("\\d+")) { + try { + values.put(key, Long.parseLong(value)); + } catch (NumberFormatException e) { + values.put(key, value); + } + } else { + values.put(key, value); + } + }); + + return values; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java deleted file mode 100644 index 8fc89d7..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.platform.serverconfig; - -import java.io.FilterReader; -import java.io.IOException; -import java.io.Reader; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * A {@link Reader} that can parse a .properties file. - */ -public class PropertiesFileReader extends FilterReader { - - public PropertiesFileReader(Reader in) { - super(in); - } - - public Map readProperties() throws IOException { - Properties properties = new Properties(); - properties.load(this); - - Map values = new HashMap<>(); - properties.forEach((k, v) -> { - String key = k.toString(); - String value = v.toString(); - - if ("true".equals(value) || "false".equals(value)) { - values.put(key, Boolean.parseBoolean(value)); - } else if (value.matches("\\d+")) { - try { - values.put(key, Long.parseLong(value)); - } catch (NumberFormatException e) { - values.put(key, value); - } - } else { - values.put(key, value); - } - }); - - return values; - } - -} -- cgit From 4d45579d2bf57b417d5d3eca041c2131177183e4 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 25 Jun 2022 22:48:55 +0100 Subject: Add providers for world (entity/chunk) statistics --- .../java/me/lucko/spark/common/SparkPlugin.java | 19 ++ .../platform/PlatformStatisticsProvider.java | 15 ++ .../common/platform/world/AbstractChunkInfo.java | 55 ++++++ .../spark/common/platform/world/ChunkInfo.java | 44 +++++ .../spark/common/platform/world/CountMap.java | 110 +++++++++++ .../common/platform/world/WorldInfoProvider.java | 57 ++++++ .../platform/world/WorldStatisticsProvider.java | 216 +++++++++++++++++++++ 7 files changed, 516 insertions(+) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index b817df1..1116b04 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -74,6 +75,15 @@ public interface SparkPlugin { */ void executeAsync(Runnable task); + /** + * Executes the given {@link Runnable} on the server/client main thread. + * + * @param task the task + */ + default void executeSync(Runnable task) { + throw new UnsupportedOperationException(); + } + /** * Print to the plugin logger. * @@ -142,6 +152,15 @@ public interface SparkPlugin { return ServerConfigProvider.NO_OP; } + /** + * Creates a world info provider. + * + * @return the world info provider function + */ + default WorldInfoProvider createWorldInfoProvider() { + return WorldInfoProvider.NO_OP; + } + /** * Gets information for the platform. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index f35bbbe..49cfed5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.platform.world.WorldStatisticsProvider; import me.lucko.spark.proto.SparkProtos.PlatformStatistics; import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; @@ -182,6 +185,18 @@ public class PlatformStatisticsProvider { builder.setPlayerCount(playerCount); } + try { + WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider(); + WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo); + WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics(); + if (worldStatistics != null) { + builder.setWorld(worldStatistics); + } + } catch (Exception e) { + e.printStackTrace(); + } + + return builder.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java new file mode 100644 index 0000000..80026cd --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java @@ -0,0 +1,55 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +public abstract class AbstractChunkInfo implements ChunkInfo { + private final int x; + private final int z; + + protected AbstractChunkInfo(int x, int z) { + this.x = x; + this.z = z; + } + + @Override + public int getX() { + return this.x; + } + + @Override + public int getZ() { + return this.z; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) return true; + if (!(obj instanceof AbstractChunkInfo)) return false; + AbstractChunkInfo that = (AbstractChunkInfo) obj; + return this.x == that.x && this.z == that.z; + } + + @Override + public int hashCode() { + return this.x ^ this.z; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java new file mode 100644 index 0000000..2193a50 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java @@ -0,0 +1,44 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +/** + * Information about a given chunk. + * + * @param the type used to describe entities + */ +public interface ChunkInfo { + + int getX(); + + int getZ(); + + CountMap getEntityCounts(); + + /** + * Converts entity type {@link E} to a string. + * + * @param type the entity type + * @return a string + */ + String entityTypeName(E type); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java new file mode 100644 index 0000000..3083266 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java @@ -0,0 +1,110 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import java.util.EnumMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * A map of (key) -> count. + * + * @param the key type + */ +public interface CountMap { + + /** + * Increment the counter for the given key + * + * @param key the key + */ + void increment(T key); + + /** + * Add to the counter for the given key + * + * @param key the key + */ + void add(T key, int delta); + + AtomicInteger total(); + + Map asMap(); + + /** + * A simple {@link CountMap} backed by the provided {@link Map} + * + * @param the key type + */ + class Simple implements CountMap { + private final Map counts; + private final AtomicInteger total; + + public Simple(Map counts) { + this.counts = counts; + this.total = new AtomicInteger(); + } + + @Override + public void increment(T key) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.incrementAndGet(); + this.total.incrementAndGet(); + } + + @Override + public void add(T key, int delta) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.addAndGet(delta); + this.total.addAndGet(delta); + } + + @Override + public AtomicInteger total() { + return this.total; + } + + @Override + public Map asMap() { + return this.counts; + } + } + + /** + * A {@link CountMap} backed by an {@link EnumMap}. + * + * @param the key type - must be an enum + */ + class EnumKeyed> extends Simple { + public EnumKeyed(Class keyClass) { + super(new EnumMap<>(keyClass)); + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java new file mode 100644 index 0000000..9494816 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Provides information about worlds. + */ +public interface WorldInfoProvider { + + WorldInfoProvider NO_OP = () -> null; + + /** + * Polls for information. + * + * @return the information + */ + Result> poll(); + + default boolean mustCallSync() { + return true; + } + + final class Result { + private final Map> worlds = new HashMap<>(); + + public void put(String worldName, List chunks) { + this.worlds.put(worldName, chunks); + } + + public Map> getWorlds() { + return this.worlds; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java new file mode 100644 index 0000000..864a296 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -0,0 +1,216 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.logging.Level; + +public class WorldStatisticsProvider { + private final SparkPlatform platform; + private final WorldInfoProvider provider; + + public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) { + this.platform = platform; + this.provider = provider; + } + + public WorldStatistics getWorldStatistics() { + if (this.provider == WorldInfoProvider.NO_OP) { + return null; + } + + CompletableFuture>> future; + + if (this.provider.mustCallSync()) { + SparkPlugin plugin = this.platform.getPlugin(); + future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync); + } else { + future = CompletableFuture.completedFuture(this.provider.poll()); + } + + WorldInfoProvider.Result> result; + try { + result = future.get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } catch (TimeoutException e) { + this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); + return null; + } + + if (result == null) { + return null; + } + + WorldStatistics.Builder stats = WorldStatistics.newBuilder(); + + AtomicInteger combinedTotal = new AtomicInteger(); + CountMap combined = new CountMap.Simple<>(new HashMap<>()); + + result.getWorlds().forEach((worldName, chunks) -> { + WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder(); + builder.setName(worldName); + + List regions = groupIntoRegions(chunks); + + int total = 0; + + for (Region region : regions) { + builder.addRegions(regionToProto(region, combined)); + total += region.getTotalEntities().get(); + } + + builder.setTotalEntities(total); + combinedTotal.addAndGet(total); + + stats.addWorlds(builder.build()); + }); + + stats.setTotalEntities(combinedTotal.get()); + combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get())); + + return stats.build(); + } + + private static WorldStatistics.Region regionToProto(Region region, CountMap combined) { + WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder(); + builder.setTotalEntities(region.getTotalEntities().get()); + for (ChunkInfo chunk : region.getChunks()) { + builder.addChunks(chunkToProto(chunk, combined)); + } + return builder.build(); + } + + private static WorldStatistics.Chunk chunkToProto(ChunkInfo chunk, CountMap combined) { + WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder(); + builder.setX(chunk.getX()); + builder.setZ(chunk.getZ()); + builder.setTotalEntities(chunk.getEntityCounts().total().get()); + chunk.getEntityCounts().asMap().forEach((key, value) -> { + String name = chunk.entityTypeName(key); + int count = value.get(); + + builder.putEntityCounts(name, count); + combined.add(name, count); + }); + return builder.build(); + } + + private static List groupIntoRegions(List> chunks) { + List regions = new ArrayList<>(); + + for (ChunkInfo chunk : chunks) { + CountMap counts = chunk.getEntityCounts(); + if (counts.total().get() == 0) { + continue; + } + + boolean found = false; + + for (Region region : regions) { + if (region.isAdjacent(chunk)) { + found = true; + region.add(chunk); + + // if the chunk is adjacent to more than one region, merge the regions together + for (Iterator iterator = regions.iterator(); iterator.hasNext(); ) { + Region otherRegion = iterator.next(); + if (region != otherRegion && otherRegion.isAdjacent(chunk)) { + iterator.remove(); + region.merge(otherRegion); + } + } + + break; + } + } + + if (!found) { + regions.add(new Region(chunk)); + } + } + + return regions; + } + + /** + * A map of nearby chunks grouped together by Euclidean distance. + */ + private static final class Region { + private static final int DISTANCE_THRESHOLD = 2; + private final Set> chunks; + private final AtomicInteger totalEntities; + + private Region(ChunkInfo initial) { + this.chunks = new HashSet<>(); + this.chunks.add(initial); + this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get()); + } + + public Set> getChunks() { + return this.chunks; + } + + public AtomicInteger getTotalEntities() { + return this.totalEntities; + } + + public boolean isAdjacent(ChunkInfo chunk) { + for (ChunkInfo el : this.chunks) { + if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) { + return true; + } + } + return false; + } + + public void add(ChunkInfo chunk) { + this.chunks.add(chunk); + this.totalEntities.addAndGet(chunk.getEntityCounts().total().get()); + } + + public void merge(Region group) { + this.chunks.addAll(group.getChunks()); + this.totalEntities.addAndGet(group.getTotalEntities().get()); + } + + private static long squaredEuclideanDistance(ChunkInfo a, ChunkInfo b) { + long dx = a.getX() - b.getX(); + long dz = a.getZ() - b.getZ(); + return (dx * dx) + (dz * dz); + } + } + +} -- cgit From fed6a350a80f6daa8c170770e5f300a0d5aa0894 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 26 Jun 2022 18:59:02 +0100 Subject: Include info about number of ticks in a profile --- .../common/command/modules/SamplerModule.java | 2 +- .../spark/common/sampler/AbstractSampler.java | 36 +++++++++++++++++++--- .../me/lucko/spark/common/sampler/Sampler.java | 2 +- .../lucko/spark/common/sampler/SamplerBuilder.java | 6 ++-- .../spark/common/sampler/async/AsyncSampler.java | 10 +++--- .../spark/common/sampler/java/JavaSampler.java | 12 ++++---- .../common/sampler/java/TickedDataAggregator.java | 15 +++++++++ 7 files changed, 62 insertions(+), 21 deletions(-) (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 970d062..fd5cd67 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -266,7 +266,7 @@ public class SamplerModule implements CommandModule { if (this.activeSampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - long timeout = this.activeSampler.getEndTime(); + long timeout = this.activeSampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); } else { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index ce466a0..1c217db 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -41,6 +42,9 @@ import java.util.concurrent.CompletableFuture; */ public abstract class AbstractSampler implements Sampler { + /** The spark platform instance */ + protected final SparkPlatform platform; + /** The interval to wait between sampling, in microseconds */ protected final int interval; @@ -50,8 +54,11 @@ public abstract class AbstractSampler implements Sampler { /** The time when sampling first began */ protected long startTime = -1; + /** The game tick when sampling first began */ + protected int startTick = -1; + /** The unix timestamp (in millis) when this sampler should automatically complete. */ - protected final long endTime; // -1 for nothing + protected final long autoEndTime; // -1 for nothing /** A future to encapsulate the completion of this sampler instance */ protected final CompletableFuture future = new CompletableFuture<>(); @@ -59,10 +66,11 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) { + protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; - this.endTime = endTime; + this.autoEndTime = autoEndTime; } @Override @@ -74,8 +82,8 @@ public abstract class AbstractSampler implements Sampler { } @Override - public long getEndTime() { - return this.endTime; + public long getAutoEndTime() { + return this.autoEndTime; } @Override @@ -91,6 +99,16 @@ public abstract class AbstractSampler implements Sampler { return this.initialGcStats; } + @Override + public void start() { + this.startTime = System.currentTimeMillis(); + + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + this.startTick = tickHook.getCurrentTick(); + } + } + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) @@ -105,6 +123,14 @@ public abstract class AbstractSampler implements Sampler { metadata.setComment(comment); } + if (this.startTick != -1) { + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + int numberOfTicks = tickHook.getCurrentTick() - this.startTick; + metadata.setNumberOfTicks(numberOfTicks); + } + } + try { metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats())); } catch (Exception e) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 845043f..84f2da1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -57,7 +57,7 @@ public interface Sampler { * * @return the end time, or -1 if undefined */ - long getEndTime(); + long getAutoEndTime(); /** * Gets a future to encapsulate the completion of the sampler diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88cf018..88b9d91 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -97,11 +97,11 @@ public class SamplerBuilder { Sampler sampler; if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); + sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); } else { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 5cb7fdc..d8288da 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -65,8 +65,8 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for timeouts */ private ScheduledExecutorService timeoutExecutor; - public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(interval, threadDumper, endTime); + public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + super(platform, interval, threadDumper, endTime); this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } @@ -90,7 +90,7 @@ public class AsyncSampler extends AbstractSampler { */ @Override public void start() { - this.startTime = System.currentTimeMillis(); + super.start(); try { this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp"); @@ -120,11 +120,11 @@ public class AsyncSampler extends AbstractSampler { } private void scheduleTimeout() { - if (this.endTime == -1) { + if (this.autoEndTime == -1) { return; } - long delay = this.endTime - System.currentTimeMillis(); + long delay = this.autoEndTime - System.currentTimeMillis(); if (delay <= 0) { return; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index cfa0a0f..913faee 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -63,19 +63,19 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; - public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(interval, threadDumper, endTime); + public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { + super(platform, interval, threadDumper, endTime); this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); } - public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(interval, threadDumper, endTime); + public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(platform, interval, threadDumper, endTime); this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @Override public void start() { - this.startTime = System.currentTimeMillis(); + super.start(); this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS); } @@ -89,7 +89,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { // this is effectively synchronized, the worker pool will not allow this task // to concurrently execute. try { - if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) { + if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) { this.future.complete(this); stop(); return; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e817828..e062f31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; /** * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" @@ -47,6 +48,9 @@ public class TickedDataAggregator extends JavaDataAggregator { /** The expected number of samples in each tick */ private final int expectedSize; + /** The number of ticks aggregated so far */ + private final AtomicInteger numberOfTicks = new AtomicInteger(); + private final Object mutex = new Object(); // state @@ -64,10 +68,16 @@ public class TickedDataAggregator extends JavaDataAggregator { @Override public SamplerMetadata.DataAggregator getMetadata() { + // push the current tick (so numberOfTicks is accurate) + synchronized (this.mutex) { + pushCurrentTick(); + } + return SamplerMetadata.DataAggregator.newBuilder() .setType(SamplerMetadata.DataAggregator.Type.TICKED) .setThreadGrouper(this.threadGrouper.asProto()) .setTickLengthThreshold(this.tickLengthThreshold) + .setNumberOfIncludedTicks(this.numberOfTicks.get()) .build(); } @@ -97,6 +107,7 @@ public class TickedDataAggregator extends JavaDataAggregator { return; } + this.numberOfTicks.incrementAndGet(); this.workerPool.submit(currentData); } @@ -110,6 +121,10 @@ public class TickedDataAggregator extends JavaDataAggregator { return super.exportData(); } + public int getNumberOfTicks() { + return this.numberOfTicks.get(); + } + private final class TickList implements Runnable { private final List list; -- cgit From d5d5cb10714b0993ec91d6a2b523b661c1314917 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 26 Jun 2022 19:19:49 +0100 Subject: Update adventure version --- .../java/me/lucko/spark/common/command/modules/ActivityLogModule.java | 1 + 1 file changed, 1 insertion(+) (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java index b777f3e..6252ac7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java @@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD; public class ActivityLogModule implements CommandModule, RowRenderer { private final Pagination.Builder pagination = Pagination.builder() + .width(45) .renderer(new Renderer() { @Override public Component renderEmpty() { -- cgit From 7f422948755c2988180c32fda9554ea1531949c2 Mon Sep 17 00:00:00 2001 From: Ben Kerllenevich Date: Tue, 28 Jun 2022 14:57:36 -0400 Subject: Better handling of Paper's split config system (#218) --- .../serverconfig/AbstractServerConfigProvider.java | 17 ++++------------- .../common/platform/serverconfig/ConfigParser.java | 20 +++++++++++++++++++- 2 files changed, 23 insertions(+), 14 deletions(-) (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 0eef111..5a14382 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -73,9 +73,7 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid } delete(json, this.hiddenPaths); - - String name = rewriteConfigPath(path); - builder.put(name, json); + builder.put(path, json); } catch (Exception e) { e.printStackTrace(); } @@ -85,25 +83,18 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid } private JsonElement load(String path, ConfigParser parser) throws IOException { - Path filePath = Paths.get(path); - if (!Files.exists(filePath)) { + Map values = parser.parse(path); + if (values == null) { return null; } - try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { - Map values = parser.parse(reader); - return this.gson.toJsonTree(values); - } + return this.gson.toJsonTree(values); } protected void customiseGson(GsonBuilder gson) { } - protected String rewriteConfigPath(String path) { - return path; - } - /** * Deletes the given paths from the json element. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java index 2dd15fe..dfbf816 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -22,10 +22,28 @@ package me.lucko.spark.common.platform.serverconfig; import java.io.BufferedReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Map; public interface ConfigParser { - Map parse(BufferedReader reader) throws IOException; + default Map parse(String file) throws IOException { + return parse(Paths.get(file)); + } + + default Map parse(Path file) throws IOException { + if (!Files.exists(file)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { + return this.parse(reader); + } + } + + Map parse(BufferedReader reader) throws IOException; } -- cgit From 3b93798a312a0b63b7d80a6b9a7ae3150f8fdaca Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 28 Jun 2022 20:24:46 +0100 Subject: Fix config filtering on nested files --- .../serverconfig/AbstractServerConfigProvider.java | 97 +--------------------- .../common/platform/serverconfig/ConfigParser.java | 7 +- .../serverconfig/ExcludedConfigFilter.java | 97 ++++++++++++++++++++++ .../serverconfig/PropertiesConfigParser.java | 16 ++++ 4 files changed, 120 insertions(+), 97 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java (limited to 'spark-common/src/main/java/me/lucko') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 5a14382..501851a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -21,24 +21,10 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import java.io.BufferedReader; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.Deque; -import java.util.LinkedList; import java.util.Map; -import java.util.stream.Collectors; /** * Abstract implementation of {@link ServerConfigProvider}. @@ -48,31 +34,23 @@ import java.util.stream.Collectors; */ public abstract class AbstractServerConfigProvider implements ServerConfigProvider { private final Map files; - private final Collection hiddenPaths; - - private final Gson gson; + private final ExcludedConfigFilter hiddenPathFilters; protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { this.files = files; - this.hiddenPaths = hiddenPaths; - - GsonBuilder gson = new GsonBuilder(); - customiseGson(gson); - this.gson = gson.create(); + this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths); } @Override public final Map loadServerConfigurations() { ImmutableMap.Builder builder = ImmutableMap.builder(); - this.files.forEach((path, reader) -> { + this.files.forEach((path, parser) -> { try { - JsonElement json = load(path, reader); + JsonElement json = parser.load(path, this.hiddenPathFilters); if (json == null) { return; } - - delete(json, this.hiddenPaths); builder.put(path, json); } catch (Exception e) { e.printStackTrace(); @@ -82,71 +60,4 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid return builder.build(); } - private JsonElement load(String path, ConfigParser parser) throws IOException { - Map values = parser.parse(path); - if (values == null) { - return null; - } - - return this.gson.toJsonTree(values); - } - - protected void customiseGson(GsonBuilder gson) { - - } - - /** - * Deletes the given paths from the json element. - * - * @param json the json element - * @param paths the paths to delete - */ - private static void delete(JsonElement json, Collection paths) { - for (String path : paths) { - Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); - delete(json, pathDeque); - } - } - - private static void delete(JsonElement json, Deque path) { - if (path.isEmpty()) { - return; - } - if (!json.isJsonObject()) { - return; - } - - JsonObject jsonObject = json.getAsJsonObject(); - String expected = path.removeFirst().replace("", "."); - - Collection keys; - if (expected.equals("*")) { - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .collect(Collectors.toList()); - } else if (expected.endsWith("*")) { - String pattern = expected.substring(0, expected.