aboutsummaryrefslogtreecommitdiff
path: root/spark-bukkit/src/main/java/me/lucko
diff options
context:
space:
mode:
Diffstat (limited to 'spark-bukkit/src/main/java/me/lucko')
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java2
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java122
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java36
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java130
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java3
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java3
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java123
7 files changed, 244 insertions, 175 deletions
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java
index 6d8afda..f9c0c0b 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java
@@ -20,7 +20,7 @@
package me.lucko.spark.bukkit;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import org.bukkit.plugin.java.JavaPlugin;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 953e171..5db1b38 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -25,11 +25,16 @@ import com.google.common.collect.ImmutableSet;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
import com.google.gson.JsonSerializer;
-import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
-import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import org.bukkit.Bukkit;
+import org.bukkit.World;
import org.bukkit.configuration.MemorySection;
import org.bukkit.configuration.file.YamlConfiguration;
@@ -37,23 +42,19 @@ import co.aikar.timings.TimingsManager;
import java.io.BufferedReader;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-public class BukkitServerConfigProvider extends AbstractServerConfigProvider<BukkitServerConfigProvider.FileType> {
- private static final Gson GSON = new GsonBuilder()
- .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
- .create();
+public class BukkitServerConfigProvider extends ServerConfigProvider {
/** A map of provided files and their type */
- private static final Map<String, FileType> FILES;
+ private static final Map<String, ConfigParser> FILES;
/** A collection of paths to be excluded from the files */
private static final Collection<String> HIDDEN_PATHS;
@@ -61,51 +62,89 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
super(FILES, HIDDEN_PATHS);
}
- @Override
- protected JsonElement load(String path, FileType type) throws IOException {
- Path filePath = Paths.get(path);
- if (!Files.exists(filePath)) {
- return null;
+ private static class YamlConfigParser implements ConfigParser {
+ public static final YamlConfigParser INSTANCE = new YamlConfigParser();
+ protected static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
+
+ return filter.apply(GSON.toJsonTree(values));
}
- try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
- Map<String, Object> values;
-
- if (type == FileType.PROPERTIES) {
- PropertiesFileReader propertiesReader = new PropertiesFileReader(reader);
- values = propertiesReader.readProperties();
- } else if (type == FileType.YAML) {
- YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
- values = config.getValues(false);
- } else {
- throw new IllegalArgumentException("Unknown file type: " + type);
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ return config.getValues(false);
+ }
+ }
+
+ // Paper 1.19+ split config layout
+ private static class SplitYamlConfigParser extends YamlConfigParser {
+ public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser();
+
+ @Override
+ public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException {
+ String prefix = group.replace("/", "");
+
+ Path configDir = Paths.get("config");
+ if (!Files.exists(configDir)) {
+ return null;
}
- return GSON.toJsonTree(values);
+ JsonObject root = new JsonObject();
+
+ for (Map.Entry<String, Path> entry : getNestedFiles(configDir, prefix).entrySet()) {
+ String fileName = entry.getKey();
+ Path path = entry.getValue();
+
+ Map<String, Object> values = this.parse(path);
+ if (values == null) {
+ continue;
+ }
+
+ // apply the filter individually to each nested file
+ root.add(fileName, filter.apply(GSON.toJsonTree(values)));
+ }
+
+ return root;
}
- }
- enum FileType {
- PROPERTIES,
- YAML
+ private static Map<String, Path> getNestedFiles(Path configDir, String prefix) {
+ Map<String, Path> files = new LinkedHashMap<>();
+ files.put("global.yml", configDir.resolve(prefix + "-global.yml"));
+ files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml"));
+ for (World world : Bukkit.getWorlds()) {
+ files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml"));
+ }
+ return files;
+ }
}
static {
- ImmutableMap.Builder<String, FileType> files = ImmutableMap.<String, FileType>builder()
- .put("server.properties", FileType.PROPERTIES)
- .put("bukkit.yml", FileType.YAML)
- .put("spigot.yml", FileType.YAML)
- .put("paper.yml", FileType.YAML)
- .put("purpur.yml", FileType.YAML);
+ ImmutableMap.Builder<String, ConfigParser> files = ImmutableMap.<String, ConfigParser>builder()
+ .put("server.properties", PropertiesConfigParser.INSTANCE)
+ .put("bukkit.yml", YamlConfigParser.INSTANCE)
+ .put("spigot.yml", YamlConfigParser.INSTANCE)
+ .put("paper.yml", YamlConfigParser.INSTANCE)
+ .put("paper/", SplitYamlConfigParser.INSTANCE)
+ .put("purpur.yml", YamlConfigParser.INSTANCE);
for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
- files.put(config, FileType.YAML);
+ files.put(config, YamlConfigParser.INSTANCE);
}
ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
.add("database")
.add("settings.bungeecord-addresses")
.add("settings.velocity-support.secret")
+ .add("proxies.velocity.secret")
.add("server-ip")
.add("motd")
.add("resource-pack")
@@ -113,6 +152,8 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
.add("level-seed")
.add("world-settings.*.feature-seeds")
.add("world-settings.*.seed-*")
+ .add("feature-seeds")
+ .add("seed-*")
.addAll(getTimingsHiddenConfigs())
.addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
@@ -120,13 +161,6 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
HIDDEN_PATHS = hiddenPaths.build();
}
- private static List<String> getSystemPropertyList(String property) {
- String value = System.getProperty(property);
- return value == null
- ? Collections.emptyList()
- : Arrays.asList(value.split(","));
- }
-
private static List<String> getTimingsHiddenConfigs() {
try {
return TimingsManager.hiddenConfigs;
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index 9727277..87490ea 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -28,10 +28,12 @@ import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.common.util.ClassSourceLookup;
import net.kyori.adventure.platform.bukkit.BukkitAudiences;
@@ -39,24 +41,29 @@ import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
+import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.ServicePriority;
import org.bukkit.plugin.java.JavaPlugin;
import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Stream;
public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
private BukkitAudiences audienceFactory;
+ private ThreadDumper gameThreadDumper;
+
private SparkPlatform platform;
private CommandExecutor tpsCommand = null;
- private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Override
public void onEnable() {
this.audienceFactory = BukkitAudiences.create(this);
+ this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread());
this.platform = new SparkPlatform(this);
this.platform.enable();
@@ -101,7 +108,6 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
- this.threadDumper.ensureSetup();
this.platform.executeCommand(new BukkitCommandSender(sender, this.audienceFactory), args);
return true;
}
@@ -136,7 +142,12 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public void executeAsync(Runnable task) {
- getServer().getScheduler().runTaskAsynchronously(BukkitSparkPlugin.this, task);
+ getServer().getScheduler().runTaskAsynchronously(this, task);
+ }
+
+ @Override
+ public void executeSync(Runnable task) {
+ getServer().getScheduler().runTask(this, task);
}
@Override
@@ -146,7 +157,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return this.threadDumper.get();
+ return this.gameThreadDumper;
}
@Override
@@ -174,6 +185,16 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
@Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ Arrays.asList(getServer().getPluginManager().getPlugins()),
+ Plugin::getName,
+ plugin -> plugin.getDescription().getVersion(),
+ plugin -> String.join(", ", plugin.getDescription().getAuthors())
+ );
+ }
+
+ @Override
public PlayerPingProvider createPlayerPingProvider() {
if (BukkitPlayerPingProvider.isSupported()) {
return new BukkitPlayerPingProvider(getServer());
@@ -188,6 +209,11 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
}
@Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new BukkitWorldInfoProvider(getServer());
+ }
+
+ @Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
new file mode 100644
index 0000000..8f876cf
--- /dev/null
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
@@ -0,0 +1,130 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.bukkit;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+
+import org.bukkit.Chunk;
+import org.bukkit.Server;
+import org.bukkit.World;
+import org.bukkit.entity.Entity;
+import org.bukkit.entity.EntityType;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class BukkitWorldInfoProvider implements WorldInfoProvider {
+ private static final boolean SUPPORTS_PAPER_COUNT_METHODS;
+
+ static {
+ boolean supportsPaperCountMethods = false;
+ try {
+ World.class.getMethod("getEntityCount");
+ World.class.getMethod("getTileEntityCount");
+ World.class.getMethod("getChunkCount");
+ supportsPaperCountMethods = true;
+ } catch (Exception e) {
+ // ignored
+ }
+ SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods;
+ }
+
+ private final Server server;
+
+ public BukkitWorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getOnlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (World world : this.server.getWorlds()) {
+ if (SUPPORTS_PAPER_COUNT_METHODS) {
+ entities += world.getEntityCount();
+ tileEntities += world.getTileEntityCount();
+ chunks += world.getChunkCount();
+ } else {
+ entities += world.getEntities().size();
+ Chunk[] chunksArray = world.getLoadedChunks();
+ for (Chunk chunk : chunksArray) {
+ tileEntities += chunk.getTileEntities().length;
+ }
+ chunks += chunksArray.length;
+ }
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<BukkitChunkInfo> pollChunks() {
+ ChunksResult<BukkitChunkInfo> data = new ChunksResult<>();
+
+ for (World world : this.server.getWorlds()) {
+ Chunk[] chunks = world.getLoadedChunks();
+
+ List<BukkitChunkInfo> list = new ArrayList<>(chunks.length);
+ for (Chunk chunk : chunks) {
+ if (chunk != null) {
+ list.add(new BukkitChunkInfo(chunk));
+ }
+ }
+
+ data.put(world.getName(), list);
+ }
+
+ return data;
+ }
+
+ static final class BukkitChunkInfo extends AbstractChunkInfo<EntityType> {
+ private final CountMap<EntityType> entityCounts;
+
+ BukkitChunkInfo(Chunk chunk) {
+ super(chunk.getX(), chunk.getZ());
+
+ this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class);
+ for (Entity entity : chunk.getEntities()) {
+ if (entity != null) {
+ this.entityCounts.increment(entity.getType());
+ }
+ }
+ }
+
+ @Override
+ public CountMap<EntityType> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public String entityTypeName(EntityType type) {
+ return type.getName();
+ }
+
+ }
+
+}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
index 078d027..7fa6e02 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkMVdWPlaceholders.java
@@ -22,6 +22,7 @@ package me.lucko.spark.bukkit.placeholder;
import me.lucko.spark.bukkit.BukkitSparkPlugin;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.util.SparkPlaceholder;
import be.maximvdw.placeholderapi.PlaceholderAPI;
import be.maximvdw.placeholderapi.PlaceholderReplaceEvent;
@@ -43,6 +44,6 @@ public class SparkMVdWPlaceholders implements PlaceholderReplacer {
}
String identifier = placeholder.substring("spark_".length());
- return SparkPlaceholderProvider.respond(this.platform, identifier);
+ return SparkPlaceholder.resolveFormattingCode(this.platform, identifier);
}
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
index 69dca72..b3919dd 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderApi.java
@@ -23,6 +23,7 @@ package me.lucko.spark.bukkit.placeholder;
import me.clip.placeholderapi.expansion.PlaceholderExpansion;
import me.lucko.spark.bukkit.BukkitSparkPlugin;
import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.util.SparkPlaceholder;
import org.bukkit.OfflinePlayer;
import org.bukkit.entity.Player;
@@ -44,7 +45,7 @@ public class SparkPlaceholderApi extends PlaceholderExpansion {
@Override
public String onRequest(OfflinePlayer p, String params) {
- return SparkPlaceholderProvider.respond(this.platform, params);
+ return SparkPlaceholder.resolveFormattingCode(this.platform, params);
}
@Override
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
deleted file mode 100644
index 5b57857..0000000
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/placeholder/SparkPlaceholderProvider.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.bukkit.placeholder;
-
-import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.monitor.cpu.CpuMonitor;
-import me.lucko.spark.common.monitor.tick.TickStatistics;
-import me.lucko.spark.common.util.StatisticFormatter;
-
-import net.kyori.adventure.text.Component;
-import net.kyori.adventure.text.TextComponent;
-import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
-
-enum SparkPlaceholderProvider {
- ;
-
- public static TextComponent respondComponent(SparkPlatform platform, String placeholder) {
- if (placeholder.startsWith("tps")) {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null) {
- return null;
- }
-
- switch (placeholder) {
- case "tps":
- return Component.text()
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
- .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
- .build();
- case "tps_5s":
- return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
- case "tps_10s":
- return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
- case "tps_1m":
- return StatisticFormatter.formatTps(tickStatistics.tps1Min());
- case "tps_5m":
- return StatisticFormatter.formatTps(tickStatistics.tps5Min());
- case "tps_15m":
- return StatisticFormatter.formatTps(tickStatistics.tps15Min());
- }
- }
-
- if (placeholder.startsWith("tickduration")) {
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
- return null;
- }
-
- switch (placeholder) {
- case "tickduration":
- return Component.text()
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
- .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
- .build();
- case "tickduration_10s":
- return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
- case "tickduration_1m":
- return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
- }
- }
-
- if (placeholder.startsWith("cpu")) {
- switch (placeholder) {
- case "cpu_system":
- return Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
- .build();
- case "cpu_system_10s":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
- case "cpu_system_1m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
- case "cpu_system_15m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
- case "cpu_process":
- return Component.text()
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
- .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
- .build();
- case "cpu_process_10s":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
- case "cpu_process_1m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
- case "cpu_process_15m":
- return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
- }
- }
-
- return null;
- }
-
- public static String respond(SparkPlatform platform, String placeholder) {
- TextComponent result = respondComponent(platform, placeholder);
- if (result == null) {
- return null;
- }
- return LegacyComponentSerializer.legacySection().serialize(result);
- }
-
-}