From a551317a0acc0f6ccb2d1bb66e8475b42387e59c Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 11 Jun 2022 21:05:08 +0100 Subject: Tidy up placeholder handling Co-authored-by: Caden Kriese --- .../lucko/spark/common/util/SparkPlaceholder.java | 191 +++++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java new file mode 100644 index 0000000..be5bbc2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java @@ -0,0 +1,191 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.tick.TickStatistics; + +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.TextComponent; +import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; + +import java.util.Locale; +import java.util.function.BiFunction; + +public enum SparkPlaceholder { + + TPS((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps15Min())) + .build(); + } + + switch (arg) { + case "5s": + return StatisticFormatter.formatTps(tickStatistics.tps5Sec()); + case "10s": + return StatisticFormatter.formatTps(tickStatistics.tps10Sec()); + case "1m": + return StatisticFormatter.formatTps(tickStatistics.tps1Min()); + case "5m": + return StatisticFormatter.formatTps(tickStatistics.tps5Min()); + case "15m": + return StatisticFormatter.formatTps(tickStatistics.tps15Min()); + } + + return null; + }), + + TICKDURATION((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null || !tickStatistics.isDurationSupported()) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; ")) + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec()); + case "1m": + return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()); + } + + return null; + }), + + CPU_SYSTEM((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()); + } + + return null; + }), + + CPU_PROCESS((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()); + } + + return null; + }); + + private final String name; + private final BiFunction function; + + SparkPlaceholder(BiFunction function) { + this.name = name().toLowerCase(Locale.ROOT); + this.function = function; + } + + public String getName() { + return this.name; + } + + public TextComponent resolve(SparkPlatform platform, String arg) { + return this.function.apply(platform, arg); + } + + public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) { + String[] parts = placeholder.split("_"); + + if (parts.length == 0) { + return null; + } + + String label = parts[0]; + + if (label.equals("tps")) { + String arg = parts.length < 2 ? null : parts[1]; + return TPS.resolve(platform, arg); + } + + if (label.equals("tickduration")) { + String arg = parts.length < 2 ? null : parts[1]; + return TICKDURATION.resolve(platform, arg); + } + + if (label.equals("cpu") && parts.length >= 2) { + String type = parts[1]; + String arg = parts.length < 3 ? null : parts[2]; + + if (type.equals("system")) { + return CPU_SYSTEM.resolve(platform, arg); + } + if (type.equals("process")) { + return CPU_PROCESS.resolve(platform, arg); + } + } + + return null; + } + + public static String resolveFormattingCode(SparkPlatform platform, String placeholder) { + TextComponent result = resolveComponent(platform, placeholder); + if (result == null) { + return null; + } + return LegacyComponentSerializer.legacySection().serialize(result); + } + +} -- cgit From 28cf3185c1374c4b5af277ef28482299694209a3 Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 20 Jun 2022 22:21:16 +0100 Subject: New paper config location (#217) --- .../spark/bukkit/BukkitServerConfigProvider.java | 70 +++++++++------------- .../serverconfig/AbstractServerConfigProvider.java | 63 +++++++++++++------ .../common/platform/serverconfig/ConfigParser.java | 31 ++++++++++ .../serverconfig/PropertiesConfigParser.java | 61 +++++++++++++++++++ .../serverconfig/PropertiesFileReader.java | 64 -------------------- 5 files changed, 165 insertions(+), 124 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index 953e171..ff1b55f 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -22,13 +22,12 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.gson.Gson; import com.google.gson.GsonBuilder; -import com.google.gson.JsonElement; import com.google.gson.JsonSerializer; import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; -import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.file.YamlConfiguration; @@ -37,23 +36,16 @@ import co.aikar.timings.TimingsManager; import java.io.BufferedReader; import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; -public class BukkitServerConfigProvider extends AbstractServerConfigProvider { - private static final Gson GSON = new GsonBuilder() - .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))) - .create(); +public class BukkitServerConfigProvider extends AbstractServerConfigProvider { /** A map of provided files and their type */ - private static final Map FILES; + private static final Map FILES; /** A collection of paths to be excluded from the files */ private static final Collection HIDDEN_PATHS; @@ -62,50 +54,46 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))); + } - try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { - Map values; + @Override + protected String rewriteConfigPath(String path) { + return path.startsWith("config/") + ? path.substring("config/".length()) + : path; + } - if (type == FileType.PROPERTIES) { - PropertiesFileReader propertiesReader = new PropertiesFileReader(reader); - values = propertiesReader.readProperties(); - } else if (type == FileType.YAML) { - YamlConfiguration config = YamlConfiguration.loadConfiguration(reader); - values = config.getValues(false); - } else { - throw new IllegalArgumentException("Unknown file type: " + type); - } + private enum YamlConfigParser implements ConfigParser { + INSTANCE; - return GSON.toJsonTree(values); + @Override + public Map parse(BufferedReader reader) throws IOException { + YamlConfiguration config = YamlConfiguration.loadConfiguration(reader); + return config.getValues(false); } } - enum FileType { - PROPERTIES, - YAML - } - static { - ImmutableMap.Builder files = ImmutableMap.builder() - .put("server.properties", FileType.PROPERTIES) - .put("bukkit.yml", FileType.YAML) - .put("spigot.yml", FileType.YAML) - .put("paper.yml", FileType.YAML) - .put("purpur.yml", FileType.YAML); + ImmutableMap.Builder files = ImmutableMap.builder() + .put("server.properties", PropertiesConfigParser.INSTANCE) + .put("bukkit.yml", YamlConfigParser.INSTANCE) + .put("spigot.yml", YamlConfigParser.INSTANCE) + .put("paper.yml", YamlConfigParser.INSTANCE) + .put("config/paper-global.yml", YamlConfigParser.INSTANCE) + .put("config/paper-world-defaults.yml", YamlConfigParser.INSTANCE) + .put("purpur.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { - files.put(config, FileType.YAML); + files.put(config, YamlConfigParser.INSTANCE); } ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() .add("database") .add("settings.bungeecord-addresses") .add("settings.velocity-support.secret") + .add("proxies.velocity.secret") .add("server-ip") .add("motd") .add("resource-pack") diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index ead2131..0eef111 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -21,10 +21,17 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import java.io.BufferedReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -38,29 +45,37 @@ import java.util.stream.Collectors; * *

This implementation is able to delete hidden paths from * the configurations before they are sent to the viewer.

- * - * @param the file type */ -public abstract class AbstractServerConfigProvider> implements ServerConfigProvider { - private final Map files; +public abstract class AbstractServerConfigProvider implements ServerConfigProvider { + private final Map files; private final Collection hiddenPaths; - protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { + private final Gson gson; + + protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { this.files = files; this.hiddenPaths = hiddenPaths; + + GsonBuilder gson = new GsonBuilder(); + customiseGson(gson); + this.gson = gson.create(); } @Override public final Map loadServerConfigurations() { ImmutableMap.Builder builder = ImmutableMap.builder(); - this.files.forEach((path, type) -> { + this.files.forEach((path, reader) -> { try { - JsonElement json = load(path, type); - if (json != null) { - delete(json, this.hiddenPaths); - builder.put(path, json); + JsonElement json = load(path, reader); + if (json == null) { + return; } + + delete(json, this.hiddenPaths); + + String name = rewriteConfigPath(path); + builder.put(name, json); } catch (Exception e) { e.printStackTrace(); } @@ -69,15 +84,25 @@ public abstract class AbstractServerConfigProvider> implements return builder.build(); } - /** - * Loads a file from the system. - * - * @param path the name of the file to load - * @param type the type of the file - * @return the loaded file - * @throws IOException if an error occurs performing i/o - */ - protected abstract JsonElement load(String path, T type) throws IOException; + private JsonElement load(String path, ConfigParser parser) throws IOException { + Path filePath = Paths.get(path); + if (!Files.exists(filePath)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { + Map values = parser.parse(reader); + return this.gson.toJsonTree(values); + } + } + + protected void customiseGson(GsonBuilder gson) { + + } + + protected String rewriteConfigPath(String path) { + return path; + } /** * Deletes the given paths from the json element. diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java new file mode 100644 index 0000000..2dd15fe --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -0,0 +1,31 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.Map; + +public interface ConfigParser { + + Map parse(BufferedReader reader) throws IOException; + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java new file mode 100644 index 0000000..4c7c2c1 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +/** + * A {@link ConfigParser} that can parse a .properties file. + */ +public enum PropertiesConfigParser implements ConfigParser { + INSTANCE; + + @Override + public Map parse(BufferedReader reader) throws IOException { + Properties properties = new Properties(); + properties.load(reader); + + Map values = new HashMap<>(); + properties.forEach((k, v) -> { + String key = k.toString(); + String value = v.toString(); + + if ("true".equals(value) || "false".equals(value)) { + values.put(key, Boolean.parseBoolean(value)); + } else if (value.matches("\\d+")) { + try { + values.put(key, Long.parseLong(value)); + } catch (NumberFormatException e) { + values.put(key, value); + } + } else { + values.put(key, value); + } + }); + + return values; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java deleted file mode 100644 index 8fc89d7..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.platform.serverconfig; - -import java.io.FilterReader; -import java.io.IOException; -import java.io.Reader; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * A {@link Reader} that can parse a .properties file. - */ -public class PropertiesFileReader extends FilterReader { - - public PropertiesFileReader(Reader in) { - super(in); - } - - public Map readProperties() throws IOException { - Properties properties = new Properties(); - properties.load(this); - - Map values = new HashMap<>(); - properties.forEach((k, v) -> { - String key = k.toString(); - String value = v.toString(); - - if ("true".equals(value) || "false".equals(value)) { - values.put(key, Boolean.parseBoolean(value)); - } else if (value.matches("\\d+")) { - try { - values.put(key, Long.parseLong(value)); - } catch (NumberFormatException e) { - values.put(key, value); - } - } else { - values.put(key, value); - } - }); - - return values; - } - -} -- cgit From 4d45579d2bf57b417d5d3eca041c2131177183e4 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 25 Jun 2022 22:48:55 +0100 Subject: Add providers for world (entity/chunk) statistics --- .../me/lucko/spark/bukkit/BukkitSparkPlugin.java | 13 +- .../spark/bukkit/BukkitWorldInfoProvider.java | 87 +++++++++ .../java/me/lucko/spark/common/SparkPlugin.java | 19 ++ .../platform/PlatformStatisticsProvider.java | 15 ++ .../common/platform/world/AbstractChunkInfo.java | 55 ++++++ .../spark/common/platform/world/ChunkInfo.java | 44 +++++ .../spark/common/platform/world/CountMap.java | 110 +++++++++++ .../common/platform/world/WorldInfoProvider.java | 57 ++++++ .../platform/world/WorldStatisticsProvider.java | 216 +++++++++++++++++++++ spark-common/src/main/proto/spark/spark.proto | 27 ++- .../spark/fabric/FabricWorldInfoProvider.java | 145 ++++++++++++++ .../fabric/mixin/ClientEntityManagerAccessor.java | 36 ++++ .../spark/fabric/mixin/ClientWorldAccessor.java | 36 ++++ .../fabric/mixin/ServerEntityManagerAccessor.java | 36 ++++ .../spark/fabric/mixin/ServerWorldAccessor.java | 36 ++++ .../fabric/plugin/FabricClientSparkPlugin.java | 12 ++ .../fabric/plugin/FabricServerSparkPlugin.java | 12 ++ spark-fabric/src/main/resources/fabric.mod.json | 3 + spark-fabric/src/main/resources/spark.mixins.json | 14 ++ spark-forge/build.gradle | 1 + .../lucko/spark/forge/ForgeWorldInfoProvider.java | 141 ++++++++++++++ .../spark/forge/plugin/ForgeClientSparkPlugin.java | 12 ++ .../spark/forge/plugin/ForgeServerSparkPlugin.java | 12 ++ .../main/resources/META-INF/accesstransformer.cfg | 4 + .../me/lucko/spark/nukkit/NukkitSparkPlugin.java | 8 +- .../me/lucko/spark/sponge/Sponge7SparkPlugin.java | 32 ++- .../spark/sponge/Sponge7WorldInfoProvider.java | 87 +++++++++ .../me/lucko/spark/sponge/Sponge8SparkPlugin.java | 36 +++- .../spark/sponge/Sponge8WorldInfoProvider.java | 88 +++++++++ 29 files changed, 1376 insertions(+), 18 deletions(-) create mode 100644 spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java create mode 100644 spark-fabric/src/main/resources/spark.mixins.json create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java create mode 100644 spark-forge/src/main/resources/META-INF/accesstransformer.cfg create mode 100644 spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java create mode 100644 spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 9727277..fddd66b 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -136,7 +137,12 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public void executeAsync(Runnable task) { - getServer().getScheduler().runTaskAsynchronously(BukkitSparkPlugin.this, task); + getServer().getScheduler().runTaskAsynchronously(this, task); + } + + @Override + public void executeSync(Runnable task) { + getServer().getScheduler().runTask(this, task); } @Override @@ -187,6 +193,11 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { return new BukkitServerConfigProvider(); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new BukkitWorldInfoProvider(getServer()); + } + @Override public PlatformInfo getPlatformInfo() { return new BukkitPlatformInfo(getServer()); diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java new file mode 100644 index 0000000..f34899b --- /dev/null +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -0,0 +1,87 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.bukkit; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import org.bukkit.Chunk; +import org.bukkit.Server; +import org.bukkit.World; +import org.bukkit.entity.Entity; +import org.bukkit.entity.EntityType; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class BukkitWorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public BukkitWorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (World world : this.server.getWorlds()) { + Chunk[] chunks = world.getLoadedChunks(); + + List list = new ArrayList<>(chunks.length); + for (Chunk chunk : chunks) { + list.add(new BukkitChunkInfo(chunk)); + } + + data.put(world.getName(), list); + } + + return data; + } + + static final class BukkitChunkInfo extends AbstractChunkInfo { + private final CountMap entityCounts; + + BukkitChunkInfo(Chunk chunk) { + super(chunk.getX(), chunk.getZ()); + + this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class); + for (Entity entity : chunk.getEntities()) { + this.entityCounts.increment(entity.getType()); + } + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @SuppressWarnings("deprecation") + @Override + public String entityTypeName(EntityType type) { + return type.getName(); + } + + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index b817df1..1116b04 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -74,6 +75,15 @@ public interface SparkPlugin { */ void executeAsync(Runnable task); + /** + * Executes the given {@link Runnable} on the server/client main thread. + * + * @param task the task + */ + default void executeSync(Runnable task) { + throw new UnsupportedOperationException(); + } + /** * Print to the plugin logger. * @@ -142,6 +152,15 @@ public interface SparkPlugin { return ServerConfigProvider.NO_OP; } + /** + * Creates a world info provider. + * + * @return the world info provider function + */ + default WorldInfoProvider createWorldInfoProvider() { + return WorldInfoProvider.NO_OP; + } + /** * Gets information for the platform. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index f35bbbe..49cfed5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.platform.world.WorldStatisticsProvider; import me.lucko.spark.proto.SparkProtos.PlatformStatistics; import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; @@ -182,6 +185,18 @@ public class PlatformStatisticsProvider { builder.setPlayerCount(playerCount); } + try { + WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider(); + WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo); + WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics(); + if (worldStatistics != null) { + builder.setWorld(worldStatistics); + } + } catch (Exception e) { + e.printStackTrace(); + } + + return builder.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java new file mode 100644 index 0000000..80026cd --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java @@ -0,0 +1,55 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +public abstract class AbstractChunkInfo implements ChunkInfo { + private final int x; + private final int z; + + protected AbstractChunkInfo(int x, int z) { + this.x = x; + this.z = z; + } + + @Override + public int getX() { + return this.x; + } + + @Override + public int getZ() { + return this.z; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) return true; + if (!(obj instanceof AbstractChunkInfo)) return false; + AbstractChunkInfo that = (AbstractChunkInfo) obj; + return this.x == that.x && this.z == that.z; + } + + @Override + public int hashCode() { + return this.x ^ this.z; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java new file mode 100644 index 0000000..2193a50 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java @@ -0,0 +1,44 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +/** + * Information about a given chunk. + * + * @param the type used to describe entities + */ +public interface ChunkInfo { + + int getX(); + + int getZ(); + + CountMap getEntityCounts(); + + /** + * Converts entity type {@link E} to a string. + * + * @param type the entity type + * @return a string + */ + String entityTypeName(E type); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java new file mode 100644 index 0000000..3083266 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java @@ -0,0 +1,110 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import java.util.EnumMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * A map of (key) -> count. + * + * @param the key type + */ +public interface CountMap { + + /** + * Increment the counter for the given key + * + * @param key the key + */ + void increment(T key); + + /** + * Add to the counter for the given key + * + * @param key the key + */ + void add(T key, int delta); + + AtomicInteger total(); + + Map asMap(); + + /** + * A simple {@link CountMap} backed by the provided {@link Map} + * + * @param the key type + */ + class Simple implements CountMap { + private final Map counts; + private final AtomicInteger total; + + public Simple(Map counts) { + this.counts = counts; + this.total = new AtomicInteger(); + } + + @Override + public void increment(T key) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.incrementAndGet(); + this.total.incrementAndGet(); + } + + @Override + public void add(T key, int delta) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.addAndGet(delta); + this.total.addAndGet(delta); + } + + @Override + public AtomicInteger total() { + return this.total; + } + + @Override + public Map asMap() { + return this.counts; + } + } + + /** + * A {@link CountMap} backed by an {@link EnumMap}. + * + * @param the key type - must be an enum + */ + class EnumKeyed> extends Simple { + public EnumKeyed(Class keyClass) { + super(new EnumMap<>(keyClass)); + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java new file mode 100644 index 0000000..9494816 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Provides information about worlds. + */ +public interface WorldInfoProvider { + + WorldInfoProvider NO_OP = () -> null; + + /** + * Polls for information. + * + * @return the information + */ + Result> poll(); + + default boolean mustCallSync() { + return true; + } + + final class Result { + private final Map> worlds = new HashMap<>(); + + public void put(String worldName, List chunks) { + this.worlds.put(worldName, chunks); + } + + public Map> getWorlds() { + return this.worlds; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java new file mode 100644 index 0000000..864a296 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -0,0 +1,216 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.logging.Level; + +public class WorldStatisticsProvider { + private final SparkPlatform platform; + private final WorldInfoProvider provider; + + public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) { + this.platform = platform; + this.provider = provider; + } + + public WorldStatistics getWorldStatistics() { + if (this.provider == WorldInfoProvider.NO_OP) { + return null; + } + + CompletableFuture>> future; + + if (this.provider.mustCallSync()) { + SparkPlugin plugin = this.platform.getPlugin(); + future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync); + } else { + future = CompletableFuture.completedFuture(this.provider.poll()); + } + + WorldInfoProvider.Result> result; + try { + result = future.get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } catch (TimeoutException e) { + this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); + return null; + } + + if (result == null) { + return null; + } + + WorldStatistics.Builder stats = WorldStatistics.newBuilder(); + + AtomicInteger combinedTotal = new AtomicInteger(); + CountMap combined = new CountMap.Simple<>(new HashMap<>()); + + result.getWorlds().forEach((worldName, chunks) -> { + WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder(); + builder.setName(worldName); + + List regions = groupIntoRegions(chunks); + + int total = 0; + + for (Region region : regions) { + builder.addRegions(regionToProto(region, combined)); + total += region.getTotalEntities().get(); + } + + builder.setTotalEntities(total); + combinedTotal.addAndGet(total); + + stats.addWorlds(builder.build()); + }); + + stats.setTotalEntities(combinedTotal.get()); + combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get())); + + return stats.build(); + } + + private static WorldStatistics.Region regionToProto(Region region, CountMap combined) { + WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder(); + builder.setTotalEntities(region.getTotalEntities().get()); + for (ChunkInfo chunk : region.getChunks()) { + builder.addChunks(chunkToProto(chunk, combined)); + } + return builder.build(); + } + + private static WorldStatistics.Chunk chunkToProto(ChunkInfo chunk, CountMap combined) { + WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder(); + builder.setX(chunk.getX()); + builder.setZ(chunk.getZ()); + builder.setTotalEntities(chunk.getEntityCounts().total().get()); + chunk.getEntityCounts().asMap().forEach((key, value) -> { + String name = chunk.entityTypeName(key); + int count = value.get(); + + builder.putEntityCounts(name, count); + combined.add(name, count); + }); + return builder.build(); + } + + private static List groupIntoRegions(List> chunks) { + List regions = new ArrayList<>(); + + for (ChunkInfo chunk : chunks) { + CountMap counts = chunk.getEntityCounts(); + if (counts.total().get() == 0) { + continue; + } + + boolean found = false; + + for (Region region : regions) { + if (region.isAdjacent(chunk)) { + found = true; + region.add(chunk); + + // if the chunk is adjacent to more than one region, merge the regions together + for (Iterator iterator = regions.iterator(); iterator.hasNext(); ) { + Region otherRegion = iterator.next(); + if (region != otherRegion && otherRegion.isAdjacent(chunk)) { + iterator.remove(); + region.merge(otherRegion); + } + } + + break; + } + } + + if (!found) { + regions.add(new Region(chunk)); + } + } + + return regions; + } + + /** + * A map of nearby chunks grouped together by Euclidean distance. + */ + private static final class Region { + private static final int DISTANCE_THRESHOLD = 2; + private final Set> chunks; + private final AtomicInteger totalEntities; + + private Region(ChunkInfo initial) { + this.chunks = new HashSet<>(); + this.chunks.add(initial); + this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get()); + } + + public Set> getChunks() { + return this.chunks; + } + + public AtomicInteger getTotalEntities() { + return this.totalEntities; + } + + public boolean isAdjacent(ChunkInfo chunk) { + for (ChunkInfo el : this.chunks) { + if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) { + return true; + } + } + return false; + } + + public void add(ChunkInfo chunk) { + this.chunks.add(chunk); + this.totalEntities.addAndGet(chunk.getEntityCounts().total().get()); + } + + public void merge(Region group) { + this.chunks.addAll(group.getChunks()); + this.totalEntities.addAndGet(group.getTotalEntities().get()); + } + + private static long squaredEuclideanDistance(ChunkInfo a, ChunkInfo b) { + long dx = a.getX() - b.getX(); + long dz = a.getZ() - b.getZ(); + return (dx * dx) + (dz * dz); + } + } + +} diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index ec0aa88..2ea341f 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -94,7 +94,8 @@ message PlatformStatistics { Tps tps = 4; // optional Mspt mspt = 5; // optional Ping ping = 6; // optional - int64 player_count = 7; + int64 player_count = 7; // optional + WorldStatistics world = 8; // optional message Memory { MemoryPool heap = 1; @@ -127,6 +128,30 @@ message PlatformStatistics { } } +message WorldStatistics { + int32 total_entities = 1; + map entity_counts = 2; + repeated World worlds = 3; + + message World { + string name = 1; + int32 total_entities = 2; + repeated Region regions = 3; + } + + message Region { + int32 total_entities = 1; + repeated Chunk chunks = 2; + } + + message Chunk { + int32 x = 1; + int32 z = 2; + int32 total_entities = 3; + map entity_counts = 4; + } +} + message RollingAverageValues { double mean = 1; double max = 2; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java new file mode 100644 index 0000000..fddcf58 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java @@ -0,0 +1,145 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import it.unimi.dsi.fastutil.longs.LongIterator; +import it.unimi.dsi.fastutil.longs.LongSet; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.fabric.mixin.ClientEntityManagerAccessor; +import me.lucko.spark.fabric.mixin.ClientWorldAccessor; +import me.lucko.spark.fabric.mixin.ServerEntityManagerAccessor; +import me.lucko.spark.fabric.mixin.ServerWorldAccessor; + +import net.minecraft.client.MinecraftClient; +import net.minecraft.client.world.ClientEntityManager; +import net.minecraft.client.world.ClientWorld; +import net.minecraft.entity.Entity; +import net.minecraft.entity.EntityType; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.server.world.ServerWorld; +import net.minecraft.util.math.ChunkPos; +import net.minecraft.world.entity.EntityTrackingSection; +import net.minecraft.world.entity.SectionedEntityCache; +import net.minecraft.world.level.ServerWorldProperties; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Stream; + +public abstract class FabricWorldInfoProvider implements WorldInfoProvider { + + protected List getChunksFromCache(SectionedEntityCache cache) { + LongSet loadedChunks = cache.getChunkPositions(); + List list = new ArrayList<>(loadedChunks.size()); + + for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { + long chunkPos = iterator.nextLong(); + Stream> sections = cache.getTrackingSections(chunkPos); + + list.add(new FabricChunkInfo(chunkPos, sections)); + } + + return list; + } + + public static final class Server extends FabricWorldInfoProvider { + private final MinecraftServer server; + + public Server(MinecraftServer server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (ServerWorld world : this.server.getWorlds()) { + ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); + SectionedEntityCache cache = ((ServerEntityManagerAccessor) entityManager).getCache(); + + List list = getChunksFromCache(cache); + data.put(((ServerWorldProperties) world.getLevelProperties()).getLevelName(), list); + } + + return data; + } + } + + public static final class Client extends FabricWorldInfoProvider { + private final MinecraftClient client; + + public Client(MinecraftClient client) { + this.client = client; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + ClientWorld world = this.client.world; + if (world == null) { + return null; + } + + ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); + SectionedEntityCache cache = ((ClientEntityManagerAccessor) entityManager).getCache(); + + List list = getChunksFromCache(cache); + data.put(world.getDimensionKey().getValue().getPath(), list); + + return data; + } + } + + static final class FabricChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + FabricChunkInfo(long chunkPos, Stream> entities) { + super(ChunkPos.getPackedX(chunkPos), ChunkPos.getPackedZ(chunkPos)); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + entities.forEach(section -> { + if (section.getStatus().shouldTrack()) { + section.stream().forEach(entity -> + this.entityCounts.increment(entity.getType()) + ); + } + }); + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityType.getId(type).toString(); + } + } + +} + diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java new file mode 100644 index 0000000..88c9521 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.client.world.ClientEntityManager; +import net.minecraft.entity.Entity; +import net.minecraft.world.entity.SectionedEntityCache; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ClientEntityManager.class) +public interface ClientEntityManagerAccessor { + + @Accessor + SectionedEntityCache getCache(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java new file mode 100644 index 0000000..01562ef --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientWorldAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.client.world.ClientEntityManager; +import net.minecraft.client.world.ClientWorld; +import net.minecraft.entity.Entity; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ClientWorld.class) +public interface ClientWorldAccessor { + + @Accessor + ClientEntityManager getEntityManager(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java new file mode 100644 index 0000000..160a12b --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.entity.Entity; +import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.world.entity.SectionedEntityCache; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ServerEntityManager.class) +public interface ServerEntityManagerAccessor { + + @Accessor + SectionedEntityCache getCache(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java new file mode 100644 index 0000000..cf2e7e8 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerWorldAccessor.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.entity.Entity; +import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.server.world.ServerWorld; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(ServerWorld.class) +public interface ServerWorldAccessor { + + @Accessor + ServerEntityManager getEntityManager(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index e94d697..1876658 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -29,6 +29,7 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -36,6 +37,7 @@ import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; +import me.lucko.spark.fabric.FabricWorldInfoProvider; import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback; import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource; @@ -112,6 +114,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman return Stream.of(new FabricCommandSender(this.minecraft.player, this)); } + @Override + public void executeSync(Runnable task) { + this.minecraft.executeSync(task); + } + @Override public TickHook createTickHook() { return new FabricTickHook.Client(); @@ -122,6 +129,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman return new FabricTickReporter.Client(); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new FabricWorldInfoProvider.Client(this.minecraft); + } + @Override public PlatformInfo getPlatformInfo() { return new FabricPlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index 3d1a0e7..2283a84 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -31,6 +31,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -39,6 +40,7 @@ import me.lucko.spark.fabric.FabricPlayerPingProvider; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; +import me.lucko.spark.fabric.FabricWorldInfoProvider; import me.lucko.spark.fabric.placeholder.SparkFabricPlaceholderApi; import net.fabricmc.loader.api.FabricLoader; @@ -126,6 +128,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman ).map(sender -> new FabricCommandSender(sender, this)); } + @Override + public void executeSync(Runnable task) { + this.server.executeSync(task); + } + @Override public TickHook createTickHook() { return new FabricTickHook.Server(); @@ -141,6 +148,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman return new FabricPlayerPingProvider(this.server); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new FabricWorldInfoProvider.Server(this.server); + } + @Override public PlatformInfo getPlatformInfo() { return new FabricPlatformInfo(PlatformInfo.Type.SERVER); diff --git a/spark-fabric/src/main/resources/fabric.mod.json b/spark-fabric/src/main/resources/fabric.mod.json index e2e600d..f1f0ad4 100644 --- a/spark-fabric/src/main/resources/fabric.mod.json +++ b/spark-fabric/src/main/resources/fabric.mod.json @@ -23,6 +23,9 @@ "me.lucko.spark.fabric.FabricSparkMod::initializeClient" ] }, + "mixins": [ + "spark.mixins.json" + ], "depends": { "fabricloader": ">=0.4.0", "fabric-api-base": "*", diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json new file mode 100644 index 0000000..09587fe --- /dev/null +++ b/spark-fabric/src/main/resources/spark.mixins.json @@ -0,0 +1,14 @@ +{ + "required": true, + "package": "me.lucko.spark.fabric.mixin", + "compatibilityLevel": "JAVA_17", + "mixins": [], + "client": [ + "ClientEntityManagerAccessor", + "ClientWorldAccessor" + ], + "server": [ + "ServerEntityManagerAccessor", + "ServerWorldAccessor" + ] +} \ No newline at end of file diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 210122b..3f46b95 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -21,6 +21,7 @@ tasks.withType(JavaCompile) { minecraft { mappings channel: 'official', version: '1.19' + accessTransformer = file('src/main/resources/META-INF/accesstransformer.cfg') } configurations { diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java new file mode 100644 index 0000000..b17dab5 --- /dev/null +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java @@ -0,0 +1,141 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.forge; + +import it.unimi.dsi.fastutil.longs.LongIterator; +import it.unimi.dsi.fastutil.longs.LongSet; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import net.minecraft.client.Minecraft; +import net.minecraft.client.multiplayer.ClientLevel; +import net.minecraft.server.MinecraftServer; +import net.minecraft.server.level.ServerLevel; +import net.minecraft.world.entity.Entity; +import net.minecraft.world.entity.EntityType; +import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.entity.EntitySection; +import net.minecraft.world.level.entity.EntitySectionStorage; +import net.minecraft.world.level.entity.PersistentEntitySectionManager; +import net.minecraft.world.level.entity.TransientEntitySectionManager; +import net.minecraft.world.level.storage.ServerLevelData; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.stream.Stream; + +public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { + + protected List getChunksFromCache(EntitySectionStorage cache) { + LongSet loadedChunks = cache.getAllChunksWithExistingSections(); + List list = new ArrayList<>(loadedChunks.size()); + + for (LongIterator iterator = loadedChunks.iterator(); iterator.hasNext(); ) { + long chunkPos = iterator.nextLong(); + Stream> sections = cache.getExistingSectionsInChunk(chunkPos); + + list.add(new ForgeChunkInfo(chunkPos, sections)); + } + + return list; + } + + public static final class Server extends ForgeWorldInfoProvider { + private final MinecraftServer server; + + public Server(MinecraftServer server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (ServerLevel level : this.server.getAllLevels()) { + PersistentEntitySectionManager entityManager = level.entityManager; + EntitySectionStorage cache = entityManager.sectionStorage; + + List list = getChunksFromCache(cache); + data.put(((ServerLevelData) level.getLevelData()).getLevelName(), list); + } + + return data; + } + } + + public static final class Client extends ForgeWorldInfoProvider { + private final Minecraft client; + + public Client(Minecraft client) { + this.client = client; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + + TransientEntitySectionManager entityManager = level.entityStorage; + EntitySectionStorage cache = entityManager.sectionStorage; + + List list = getChunksFromCache(cache); + data.put(level.dimensionTypeId().location().getPath(), list); + + return data; + } + } + + static final class ForgeChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + ForgeChunkInfo(long chunkPos, Stream> entities) { + super(ChunkPos.getX(chunkPos), ChunkPos.getZ(chunkPos)); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + entities.forEach(section -> { + if (section.getStatus().isAccessible()) { + section.getEntities().forEach(entity -> + this.entityCounts.increment(entity.getType()) + ); + } + }); + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityType.getKey(type).toString(); + } + } + + +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index cf5c89b..04c8785 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -28,6 +28,7 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -35,6 +36,7 @@ import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; +import me.lucko.spark.forge.ForgeWorldInfoProvider; import net.minecraft.client.Minecraft; import net.minecraft.commands.CommandSource; @@ -107,6 +109,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< return Stream.of(new ForgeCommandSender(this.minecraft.player, this)); } + @Override + public void executeSync(Runnable task) { + this.minecraft.executeIfPossible(task); + } + @Override public TickHook createTickHook() { return new ForgeTickHook(TickEvent.Type.CLIENT); @@ -117,6 +124,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgeTickReporter(TickEvent.Type.CLIENT); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new ForgeWorldInfoProvider.Client(this.minecraft); + } + @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index e341d6f..03f9952 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -31,6 +31,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -39,6 +40,7 @@ import me.lucko.spark.forge.ForgePlayerPingProvider; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; +import me.lucko.spark.forge.ForgeWorldInfoProvider; import net.minecraft.commands.CommandSource; import net.minecraft.commands.CommandSourceStack; @@ -183,6 +185,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< ).map(sender -> new ForgeCommandSender(sender, this)); } + @Override + public void executeSync(Runnable task) { + this.server.executeIfPossible(task); + } + @Override public TickHook createTickHook() { return new ForgeTickHook(TickEvent.Type.SERVER); @@ -198,6 +205,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgePlayerPingProvider(this.server); } + @Override + public WorldInfoProvider createWorldInfoProvider() { + return new ForgeWorldInfoProvider.Server(this.server); + } + @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.SERVER); diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg new file mode 100644 index 0000000..1e418b8 --- /dev/null +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -0,0 +1,4 @@ +public net.minecraft.server.level.ServerLevel f_143244_ # entityManager +public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage +public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage +public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java index 18132c3..87d9f09 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java @@ -31,7 +31,6 @@ import cn.nukkit.command.Command; import cn.nukkit.command.CommandSender; import cn.nukkit.plugin.PluginBase; import cn.nukkit.plugin.service.ServicePriority; -import cn.nukkit.scheduler.AsyncTask; import java.nio.file.Path; import java.util.logging.Level; @@ -82,12 +81,7 @@ public class NukkitSparkPlugin extends PluginBase implements SparkPlugin { @Override public void executeAsync(Runnable task) { - getServer().getScheduler().scheduleAsyncTask(this, new AsyncTask() { - @Override - public void onRun() { - task.run(); - } - }); + getServer().getScheduler().scheduleTask(this, task, true); } @Override diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index 670e0c5..324e242 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -44,6 +45,7 @@ import org.spongepowered.api.plugin.Plugin; import org.spongepowered.api.plugin.PluginContainer; import org.spongepowered.api.scheduler.AsynchronousExecutor; import org.spongepowered.api.scheduler.SpongeExecutorService; +import org.spongepowered.api.scheduler.SynchronousExecutor; import org.spongepowered.api.text.Text; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; @@ -70,17 +72,19 @@ public class Sponge7SparkPlugin implements SparkPlugin { private final Game game; private final Path configDirectory; private final SpongeExecutorService asyncExecutor; + private final SpongeExecutorService syncExecutor; private SparkPlatform platform; private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @Inject - public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) { + public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor, @SynchronousExecutor SpongeExecutorService syncExecutor) { this.pluginContainer = pluginContainer; this.logger = logger; this.game = game; this.configDirectory = configDirectory; this.asyncExecutor = asyncExecutor; + this.syncExecutor = syncExecutor; } @Listener @@ -112,10 +116,14 @@ public class Sponge7SparkPlugin implements SparkPlugin { @Override public Stream getCommandSenders() { - return Stream.concat( - this.game.getServer().getOnlinePlayers().stream(), - Stream.of(this.game.getServer().getConsole()) - ).map(Sponge7CommandSender::new); + if (this.game.isServerAvailable()) { + return Stream.concat( + this.game.getServer().getOnlinePlayers().stream(), + Stream.of(this.game.getServer().getConsole()) + ).map(Sponge7CommandSender::new); + } else { + return Stream.of(this.game.getServer().getConsole()).map(Sponge7CommandSender::new); + } } @Override @@ -123,6 +131,11 @@ public class Sponge7SparkPlugin implements SparkPlugin { this.asyncExecutor.execute(task); } + @Override + public void executeSync(Runnable task) { + this.syncExecutor.execute(task); + } + @Override public void log(Level level, String msg) { if (level == Level.INFO) { @@ -160,6 +173,15 @@ public class Sponge7SparkPlugin implements SparkPlugin { } } + @Override + public WorldInfoProvider createWorldInfoProvider() { + if (this.game.isServerAvailable()) { + return new Sponge7WorldInfoProvider(this.game.getServer()); + } else { + return WorldInfoProvider.NO_OP; + } + } + @Override public PlatformInfo getPlatformInfo() { return new Sponge7PlatformInfo(this.game); diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java new file mode 100644 index 0000000..fa6fa6b --- /dev/null +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java @@ -0,0 +1,87 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.sponge; + +import com.google.common.collect.Lists; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import org.spongepowered.api.Server; +import org.spongepowered.api.entity.Entity; +import org.spongepowered.api.entity.EntityType; +import org.spongepowered.api.world.Chunk; +import org.spongepowered.api.world.World; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +public class Sponge7WorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public Sponge7WorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (World world : this.server.getWorlds()) { + List chunks = Lists.newArrayList(world.getLoadedChunks()); + + List list = new ArrayList<>(chunks.size()); + for (Chunk chunk : chunks) { + list.add(new Sponge7ChunkInfo(chunk)); + } + + data.put(world.getName(), list); + } + + return data; + } + + static final class Sponge7ChunkInfo extends AbstractChunkInfo { + private final CountMap entityCounts; + + Sponge7ChunkInfo(Chunk chunk) { + super(chunk.getPosition().getX(), chunk.getPosition().getZ()); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + for (Entity entity : chunk.getEntities()) { + this.entityCounts.increment(entity.getType()); + } + } + + @Override + public CountMap getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return type.getName(); + } + + } +} diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index e867a75..68e47e3 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -67,6 +68,7 @@ public class Sponge8SparkPlugin implements SparkPlugin { private final Game game; private final Path configDirectory; private final ExecutorService asyncExecutor; + private final ExecutorService syncExecutor; private SparkPlatform platform; private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @@ -78,6 +80,14 @@ public class Sponge8SparkPlugin implements SparkPlugin { this.game = game; this.configDirectory = configDirectory; this.asyncExecutor = game.asyncScheduler().executor(pluginContainer); + + if (game.isServerAvailable()) { + this.syncExecutor = game.server().scheduler().executor(pluginContainer); + } else if (game.isClientAvailable()) { + this.syncExecutor = game.client().scheduler().executor(pluginContainer); + } else { + throw new IllegalStateException("Server and client both unavailable"); + } } @@ -114,10 +124,14 @@ public class Sponge8SparkPlugin implements SparkPlugin { @Override public Stream getCommandSenders() { - return Stream.concat( - this.game.server().onlinePlayers().stream(), - Stream.of(this.game.systemSubject()) - ).map(Sponge8CommandSender::new); + if (this.game.isServerAvailable()) { + return Stream.concat( + this.game.server().onlinePlayers().stream(), + Stream.of(this.game.systemSubject()) + ).map(Sponge8CommandSender::new); + } else { + return Stream.of(this.game.systemSubject()).map(Sponge8CommandSender::new); + } } @Override @@ -125,6 +139,11 @@ public class Sponge8SparkPlugin implements SparkPlugin { this.asyncExecutor.execute(task); } + @Override + public void executeSync(Runnable task) { + this.syncExecutor.execute(task); + } + @Override public void log(Level level, String msg) { if (level == Level.INFO) { @@ -162,6 +181,15 @@ public class Sponge8SparkPlugin implements SparkPlugin { } } + @Override + public WorldInfoProvider createWorldInfoProvider() { + if (this.game.isServerAvailable()) { + return new Sponge8WorldInfoProvider(this.game.server()); + } else { + return WorldInfoProvider.NO_OP; + } + } + @Override public PlatformInfo getPlatformInfo() { return new Sponge8PlatformInfo(this.game); diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java new file mode 100644 index 0000000..bff4d6e --- /dev/null +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java @@ -0,0 +1,88 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.sponge; + +import com.google.common.collect.Lists; + +import me.lucko.spark.common.platform.world.AbstractChunkInfo; +import me.lucko.spark.common.platform.world.CountMap; +import me.lucko.spark.common.platform.world.WorldInfoProvider; + +import org.spongepowered.api.Server; +import org.spongepowered.api.entity.Entity; +import org.spongepowered.api.entity.EntityType; +import org.spongepowered.api.entity.EntityTypes; +import org.spongepowered.api.world.chunk.WorldChunk; +import org.spongepowered.api.world.server.ServerWorld; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +public class Sponge8WorldInfoProvider implements WorldInfoProvider { + private final Server server; + + public Sponge8WorldInfoProvider(Server server) { + this.server = server; + } + + @Override + public Result poll() { + Result data = new Result<>(); + + for (ServerWorld world : this.server.worldManager().worlds()) { + List chunks = Lists.newArrayList(world.loadedChunks()); + + List list = new ArrayList<>(chunks.size()); + for (WorldChunk chunk : chunks) { + list.add(new Sponge7ChunkInfo(chunk)); + } + + data.put(world.key().value(), list); + } + + return data; + } + + static final class Sponge7ChunkInfo extends AbstractChunkInfo> { + private final CountMap> entityCounts; + + Sponge7ChunkInfo(WorldChunk chunk) { + super(chunk.chunkPosition().x(), chunk.chunkPosition().z()); + + this.entityCounts = new CountMap.Simple<>(new HashMap<>()); + for (Entity entity : chunk.entities()) { + this.entityCounts.increment(entity.type()); + } + } + + @Override + public CountMap> getEntityCounts() { + return this.entityCounts; + } + + @Override + public String entityTypeName(EntityType type) { + return EntityTypes.registry().valueKey(type).value(); + } + + } +} -- cgit From fed6a350a80f6daa8c170770e5f300a0d5aa0894 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 26 Jun 2022 18:59:02 +0100 Subject: Include info about number of ticks in a profile --- .../common/command/modules/SamplerModule.java | 2 +- .../spark/common/sampler/AbstractSampler.java | 36 +++++++++++++++++++--- .../me/lucko/spark/common/sampler/Sampler.java | 2 +- .../lucko/spark/common/sampler/SamplerBuilder.java | 6 ++-- .../spark/common/sampler/async/AsyncSampler.java | 10 +++--- .../spark/common/sampler/java/JavaSampler.java | 12 ++++---- .../common/sampler/java/TickedDataAggregator.java | 15 +++++++++ .../src/main/proto/spark/spark_sampler.proto | 2 ++ 8 files changed, 64 insertions(+), 21 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 970d062..fd5cd67 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -266,7 +266,7 @@ public class SamplerModule implements CommandModule { if (this.activeSampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - long timeout = this.activeSampler.getEndTime(); + long timeout = this.activeSampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); } else { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index ce466a0..1c217db 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -41,6 +42,9 @@ import java.util.concurrent.CompletableFuture; */ public abstract class AbstractSampler implements Sampler { + /** The spark platform instance */ + protected final SparkPlatform platform; + /** The interval to wait between sampling, in microseconds */ protected final int interval; @@ -50,8 +54,11 @@ public abstract class AbstractSampler implements Sampler { /** The time when sampling first began */ protected long startTime = -1; + /** The game tick when sampling first began */ + protected int startTick = -1; + /** The unix timestamp (in millis) when this sampler should automatically complete. */ - protected final long endTime; // -1 for nothing + protected final long autoEndTime; // -1 for nothing /** A future to encapsulate the completion of this sampler instance */ protected final CompletableFuture future = new CompletableFuture<>(); @@ -59,10 +66,11 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) { + protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; - this.endTime = endTime; + this.autoEndTime = autoEndTime; } @Override @@ -74,8 +82,8 @@ public abstract class AbstractSampler implements Sampler { } @Override - public long getEndTime() { - return this.endTime; + public long getAutoEndTime() { + return this.autoEndTime; } @Override @@ -91,6 +99,16 @@ public abstract class AbstractSampler implements Sampler { return this.initialGcStats; } + @Override + public void start() { + this.startTime = System.currentTimeMillis(); + + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + this.startTick = tickHook.getCurrentTick(); + } + } + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) @@ -105,6 +123,14 @@ public abstract class AbstractSampler implements Sampler { metadata.setComment(comment); } + if (this.startTick != -1) { + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + int numberOfTicks = tickHook.getCurrentTick() - this.startTick; + metadata.setNumberOfTicks(numberOfTicks); + } + } + try { metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats())); } catch (Exception e) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 845043f..84f2da1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -57,7 +57,7 @@ public interface Sampler { * * @return the end time, or -1 if undefined */ - long getEndTime(); + long getAutoEndTime(); /** * Gets a future to encapsulate the completion of the sampler diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88cf018..88b9d91 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -97,11 +97,11 @@ public class SamplerBuilder { Sampler sampler; if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); + sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); } else { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 5cb7fdc..d8288da 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -65,8 +65,8 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for timeouts */ private ScheduledExecutorService timeoutExecutor; - public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(interval, threadDumper, endTime); + public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + super(platform, interval, threadDumper, endTime); this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } @@ -90,7 +90,7 @@ public class AsyncSampler extends AbstractSampler { */ @Override public void start() { - this.startTime = System.currentTimeMillis(); + super.start(); try { this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp"); @@ -120,11 +120,11 @@ public class AsyncSampler extends AbstractSampler { } private void scheduleTimeout() { - if (this.endTime == -1) { + if (this.autoEndTime == -1) { return; } - long delay = this.endTime - System.currentTimeMillis(); + long delay = this.autoEndTime - System.currentTimeMillis(); if (delay <= 0) { return; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index cfa0a0f..913faee 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -63,19 +63,19 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; - public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(interval, threadDumper, endTime); + public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { + super(platform, interval, threadDumper, endTime); this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); } - public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(interval, threadDumper, endTime); + public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(platform, interval, threadDumper, endTime); this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @Override public void start() { - this.startTime = System.currentTimeMillis(); + super.start(); this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS); } @@ -89,7 +89,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { // this is effectively synchronized, the worker pool will not allow this task // to concurrently execute. try { - if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) { + if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) { this.future.complete(this); stop(); return; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e817828..e062f31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; /** * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" @@ -47,6 +48,9 @@ public class TickedDataAggregator extends JavaDataAggregator { /** The expected number of samples in each tick */ private final int expectedSize; + /** The number of ticks aggregated so far */ + private final AtomicInteger numberOfTicks = new AtomicInteger(); + private final Object mutex = new Object(); // state @@ -64,10 +68,16 @@ public class TickedDataAggregator extends JavaDataAggregator { @Override public SamplerMetadata.DataAggregator getMetadata() { + // push the current tick (so numberOfTicks is accurate) + synchronized (this.mutex) { + pushCurrentTick(); + } + return SamplerMetadata.DataAggregator.newBuilder() .setType(SamplerMetadata.DataAggregator.Type.TICKED) .setThreadGrouper(this.threadGrouper.asProto()) .setTickLengthThreshold(this.tickLengthThreshold) + .setNumberOfIncludedTicks(this.numberOfTicks.get()) .build(); } @@ -97,6 +107,7 @@ public class TickedDataAggregator extends JavaDataAggregator { return; } + this.numberOfTicks.incrementAndGet(); this.workerPool.submit(currentData); } @@ -110,6 +121,10 @@ public class TickedDataAggregator extends JavaDataAggregator { return super.exportData(); } + public int getNumberOfTicks() { + return this.numberOfTicks.get(); + } + private final class TickList implements Runnable { private final List list; diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index 51bdd64..8d9512a 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -25,6 +25,7 @@ message SamplerMetadata { SystemStatistics system_statistics = 9; map server_configurations = 10; int64 end_time = 11; + int32 number_of_ticks = 12; message ThreadDumper { Type type = 1; @@ -42,6 +43,7 @@ message SamplerMetadata { Type type = 1; ThreadGrouper thread_grouper = 2; int64 tick_length_threshold = 3; // optional + int32 number_of_included_ticks = 4; // optional enum Type { SIMPLE = 0; -- cgit From d5d5cb10714b0993ec91d6a2b523b661c1314917 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 26 Jun 2022 19:19:49 +0100 Subject: Update adventure version --- spark-bukkit/build.gradle | 8 +------- spark-bungeecord/build.gradle | 8 +------- spark-common/build.gradle | 9 ++++++--- .../me/lucko/spark/common/command/modules/ActivityLogModule.java | 1 + 4 files changed, 9 insertions(+), 17 deletions(-) (limited to 'spark-common/src') diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index 8e111e8..7144291 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -4,13 +4,7 @@ plugins { dependencies { implementation project(':spark-common') - implementation('me.lucko:adventure-platform-bukkit:4.9.4') { - exclude(module: 'adventure-api') - exclude(module: 'checker-qual') - exclude(module: 'annotations') - exclude(module: 'adventure-text-serializer-gson') - exclude(module: 'adventure-text-serializer-legacy') - } + implementation 'net.kyori:adventure-platform-bukkit:4.1.1' compileOnly 'com.destroystokyo.paper:paper-api:1.16.4-R0.1-SNAPSHOT' // placeholders diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index ccea89d..1e92621 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -4,13 +4,7 @@ plugins { dependencies { implementation project(':spark-common') - implementation('me.lucko:adventure-platform-bungeecord:4.9.4') { - exclude(module: 'adventure-api') - exclude(module: 'checker-qual') - exclude(module: 'annotations') - exclude(module: 'adventure-text-serializer-gson') - exclude(module: 'adventure-text-serializer-legacy') - } + implementation 'net.kyori:adventure-platform-bungeecord:4.1.1' compileOnly 'net.md-5:bungeecord-api:1.16-R0.4' } diff --git a/spark-common/build.gradle b/spark-common/build.gradle index 554eec2..bc493f3 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -15,15 +15,18 @@ dependencies { implementation 'com.squareup.okio:okio:1.17.3' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' implementation 'org.tukaani:xz:1.8' - api('net.kyori:adventure-api:4.9.3') { + api('net.kyori:adventure-api:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'checker-qual') exclude(module: 'annotations') } - api('net.kyori:adventure-text-serializer-gson:4.9.3') { + api('net.kyori:adventure-text-serializer-gson:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'adventure-api') exclude(module: 'gson') } - api('net.kyori:adventure-text-serializer-legacy:4.9.3') { + api('net.kyori:adventure-text-serializer-legacy:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'adventure-api') } implementation('net.kyori:adventure-text-feature-pagination:4.0.0-SNAPSHOT') { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java index b777f3e..6252ac7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java @@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD; public class ActivityLogModule implements CommandModule, RowRenderer { private final Pagination.Builder pagination = Pagination.builder() + .width(45) .renderer(new Renderer() { @Override public Component renderEmpty() { -- cgit From 7f422948755c2988180c32fda9554ea1531949c2 Mon Sep 17 00:00:00 2001 From: Ben Kerllenevich Date: Tue, 28 Jun 2022 14:57:36 -0400 Subject: Better handling of Paper's split config system (#218) --- .../spark/bukkit/BukkitServerConfigProvider.java | 59 ++++++++++++++++++---- .../serverconfig/AbstractServerConfigProvider.java | 17 ++----- .../common/platform/serverconfig/ConfigParser.java | 20 +++++++- 3 files changed, 71 insertions(+), 25 deletions(-) (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index ff1b55f..bc1bdf8 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -22,6 +22,7 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; import com.google.gson.GsonBuilder; import com.google.gson.JsonSerializer; @@ -29,6 +30,8 @@ import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import org.bukkit.Bukkit; +import org.bukkit.World; import org.bukkit.configuration.MemorySection; import org.bukkit.configuration.file.YamlConfiguration; @@ -36,6 +39,9 @@ import co.aikar.timings.TimingsManager; import java.io.BufferedReader; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -58,15 +64,8 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { gson.registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))); } - @Override - protected String rewriteConfigPath(String path) { - return path.startsWith("config/") - ? path.substring("config/".length()) - : path; - } - - private enum YamlConfigParser implements ConfigParser { - INSTANCE; + private static class YamlConfigParser implements ConfigParser { + public static final YamlConfigParser INSTANCE = new YamlConfigParser(); @Override public Map parse(BufferedReader reader) throws IOException { @@ -75,14 +74,52 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { } } + // Paper 1.19+ split config layout + private static class SplitYamlConfigParser extends YamlConfigParser { + public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser(); + + @Override + public Map parse(String prefix) throws IOException { + Path configDir = Paths.get("config"); + if (!Files.exists(configDir)) { + return null; + } + + Map configs = Maps.newHashMap(); + + parseIfExists(configs, + "global.yml", + configDir.resolve(prefix + "-global.yml") + ); + parseIfExists(configs, + "world-defaults.yml", + configDir.resolve(prefix + "-world-defaults.yml") + ); + for (World world : Bukkit.getWorlds()) { + parseIfExists(configs, + world.getName() + ".yml", + world.getWorldFolder().toPath().resolve(prefix + "-world.yml") + ); + } + + return configs; + } + + private void parseIfExists(Map configs, String name, Path path) throws IOException { + Map values = parse(path); + if (values != null) { + configs.put(name, values); + } + } + } + static { ImmutableMap.Builder files = ImmutableMap.builder() .put("server.properties", PropertiesConfigParser.INSTANCE) .put("bukkit.yml", YamlConfigParser.INSTANCE) .put("spigot.yml", YamlConfigParser.INSTANCE) .put("paper.yml", YamlConfigParser.INSTANCE) - .put("config/paper-global.yml", YamlConfigParser.INSTANCE) - .put("config/paper-world-defaults.yml", YamlConfigParser.INSTANCE) + .put("paper", SplitYamlConfigParser.INSTANCE) .put("purpur.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 0eef111..5a14382 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -73,9 +73,7 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid } delete(json, this.hiddenPaths); - - String name = rewriteConfigPath(path); - builder.put(name, json); + builder.put(path, json); } catch (Exception e) { e.printStackTrace(); } @@ -85,25 +83,18 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid } private JsonElement load(String path, ConfigParser parser) throws IOException { - Path filePath = Paths.get(path); - if (!Files.exists(filePath)) { + Map values = parser.parse(path); + if (values == null) { return null; } - try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) { - Map values = parser.parse(reader); - return this.gson.toJsonTree(values); - } + return this.gson.toJsonTree(values); } protected void customiseGson(GsonBuilder gson) { } - protected String rewriteConfigPath(String path) { - return path; - } - /** * Deletes the given paths from the json element. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java index 2dd15fe..dfbf816 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -22,10 +22,28 @@ package me.lucko.spark.common.platform.serverconfig; import java.io.BufferedReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Map; public interface ConfigParser { - Map parse(BufferedReader reader) throws IOException; + default Map parse(String file) throws IOException { + return parse(Paths.get(file)); + } + + default Map parse(Path file) throws IOException { + if (!Files.exists(file)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { + return this.parse(reader); + } + } + + Map parse(BufferedReader reader) throws IOException; } -- cgit From 3b93798a312a0b63b7d80a6b9a7ae3150f8fdaca Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 28 Jun 2022 20:24:46 +0100 Subject: Fix config filtering on nested files --- .../spark/bukkit/BukkitServerConfigProvider.java | 69 +++++++++------ .../serverconfig/AbstractServerConfigProvider.java | 97 +--------------------- .../common/platform/serverconfig/ConfigParser.java | 7 +- .../serverconfig/ExcludedConfigFilter.java | 97 ++++++++++++++++++++++ .../serverconfig/PropertiesConfigParser.java | 16 ++++ 5 files changed, 162 insertions(+), 124 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index bc1bdf8..f822015 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -22,12 +22,15 @@ package me.lucko.spark.bukkit; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; +import com.google.gson.Gson; import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; import org.bukkit.Bukkit; @@ -45,6 +48,7 @@ import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -59,13 +63,21 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { super(FILES, HIDDEN_PATHS); } - @Override - protected void customiseGson(GsonBuilder gson) { - gson.registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))); - } - private static class YamlConfigParser implements ConfigParser { public static final YamlConfigParser INSTANCE = new YamlConfigParser(); + protected static final Gson GSON = new GsonBuilder() + .registerTypeAdapter(MemorySection.class, (JsonSerializer) (obj, type, ctx) -> ctx.serialize(obj.getValues(false))) + .create(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } + + return filter.apply(GSON.toJsonTree(values)); + } @Override public Map parse(BufferedReader reader) throws IOException { @@ -79,37 +91,40 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser(); @Override - public Map parse(String prefix) throws IOException { + public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException { + String prefix = group.replace("/", ""); + Path configDir = Paths.get("config"); if (!Files.exists(configDir)) { return null; } - Map configs = Maps.newHashMap(); + JsonObject root = new JsonObject(); - parseIfExists(configs, - "global.yml", - configDir.resolve(prefix + "-global.yml") - ); - parseIfExists(configs, - "world-defaults.yml", - configDir.resolve(prefix + "-world-defaults.yml") - ); - for (World world : Bukkit.getWorlds()) { - parseIfExists(configs, - world.getName() + ".yml", - world.getWorldFolder().toPath().resolve(prefix + "-world.yml") - ); + for (Map.Entry entry : getNestedFiles(configDir, prefix).entrySet()) { + String fileName = entry.getKey(); + Path path = entry.getValue(); + + Map values = this.parse(path); + if (values == null) { + continue; + } + + // apply the filter individually to each nested file + root.add(fileName, filter.apply(GSON.toJsonTree(values))); } - return configs; + return root; } - private void parseIfExists(Map configs, String name, Path path) throws IOException { - Map values = parse(path); - if (values != null) { - configs.put(name, values); + private static Map getNestedFiles(Path configDir, String prefix) { + Map files = new LinkedHashMap<>(); + files.put("global.yml", configDir.resolve(prefix + "-global.yml")); + files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml")); + for (World world : Bukkit.getWorlds()) { + files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml")); } + return files; } } @@ -119,7 +134,7 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { .put("bukkit.yml", YamlConfigParser.INSTANCE) .put("spigot.yml", YamlConfigParser.INSTANCE) .put("paper.yml", YamlConfigParser.INSTANCE) - .put("paper", SplitYamlConfigParser.INSTANCE) + .put("paper/", SplitYamlConfigParser.INSTANCE) .put("purpur.yml", YamlConfigParser.INSTANCE); for (String config : getSystemPropertyList("spark.serverconfigs.extra")) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 5a14382..501851a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -21,24 +21,10 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import java.io.BufferedReader; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.Deque; -import java.util.LinkedList; import java.util.Map; -import java.util.stream.Collectors; /** * Abstract implementation of {@link ServerConfigProvider}. @@ -48,31 +34,23 @@ import java.util.stream.Collectors; */ public abstract class AbstractServerConfigProvider implements ServerConfigProvider { private final Map files; - private final Collection hiddenPaths; - - private final Gson gson; + private final ExcludedConfigFilter hiddenPathFilters; protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { this.files = files; - this.hiddenPaths = hiddenPaths; - - GsonBuilder gson = new GsonBuilder(); - customiseGson(gson); - this.gson = gson.create(); + this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths); } @Override public final Map loadServerConfigurations() { ImmutableMap.Builder builder = ImmutableMap.builder(); - this.files.forEach((path, reader) -> { + this.files.forEach((path, parser) -> { try { - JsonElement json = load(path, reader); + JsonElement json = parser.load(path, this.hiddenPathFilters); if (json == null) { return; } - - delete(json, this.hiddenPaths); builder.put(path, json); } catch (Exception e) { e.printStackTrace(); @@ -82,71 +60,4 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid return builder.build(); } - private JsonElement load(String path, ConfigParser parser) throws IOException { - Map values = parser.parse(path); - if (values == null) { - return null; - } - - return this.gson.toJsonTree(values); - } - - protected void customiseGson(GsonBuilder gson) { - - } - - /** - * Deletes the given paths from the json element. - * - * @param json the json element - * @param paths the paths to delete - */ - private static void delete(JsonElement json, Collection paths) { - for (String path : paths) { - Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); - delete(json, pathDeque); - } - } - - private static void delete(JsonElement json, Deque path) { - if (path.isEmpty()) { - return; - } - if (!json.isJsonObject()) { - return; - } - - JsonObject jsonObject = json.getAsJsonObject(); - String expected = path.removeFirst().replace("", "."); - - Collection keys; - if (expected.equals("*")) { - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .collect(Collectors.toList()); - } else if (expected.endsWith("*")) { - String pattern = expected.substring(0, expected.length() - 1); - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .filter(key -> key.startsWith(pattern)) - .collect(Collectors.toList()); - } else if (jsonObject.has(expected)) { - keys = Collections.singletonList(expected); - } else { - keys = Collections.emptyList(); - } - - for (String key : keys) { - if (path.isEmpty()) { - jsonObject.remove(key); - } else { - Deque pathCopy = keys.size() > 1 - ? new LinkedList<>(path) - : path; - - delete(jsonObject.get(key), pathCopy); - } - } - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java index dfbf816..675a32e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -20,19 +20,18 @@ package me.lucko.spark.common.platform.serverconfig; +import com.google.gson.JsonElement; + import java.io.BufferedReader; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Map; public interface ConfigParser { - default Map parse(String file) throws IOException { - return parse(Paths.get(file)); - } + JsonElement load(String file, ExcludedConfigFilter filter) throws IOException; default Map parse(Path file) throws IOException { if (!Files.exists(file)) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java new file mode 100644 index 0000000..c11c7f8 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Deque; +import java.util.LinkedList; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Filtered excluded paths from {@link JsonElement}s (parsed configuration files). + */ +public class ExcludedConfigFilter { + private final Collection pathsToExclude; + + public ExcludedConfigFilter(Collection pathsToExclude) { + this.pathsToExclude = pathsToExclude; + } + + /** + * Deletes the excluded paths from the json element. + * + * @param json the json element + */ + public JsonElement apply(JsonElement json) { + for (String path : this.pathsToExclude) { + Deque pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); + delete(json, pathDeque); + } + return json; + } + + private static void delete(JsonElement json, Deque path) { + if (path.isEmpty()) { + return; + } + if (!json.isJsonObject()) { + return; + } + + JsonObject jsonObject = json.getAsJsonObject(); + String expected = path.removeFirst().replace("", "."); + + Collection keys; + if (expected.equals("*")) { + keys = jsonObject.entrySet().stream() + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + } else if (expected.endsWith("*")) { + String pattern = expected.substring(0, expected.length() - 1); + keys = jsonObject.entrySet().stream() + .map(Map.Entry::getKey) + .filter(key -> key.startsWith(pattern)) + .collect(Collectors.toList()); + } else if (jsonObject.has(expected)) { + keys = Collections.singletonList(expected); + } else { + keys = Collections.emptyList(); + } + + for (String key : keys) { + if (path.isEmpty()) { + jsonObject.remove(key); + } else { + Deque pathCopy = keys.size() > 1 + ? new LinkedList<>(path) + : path; + + delete(jsonObject.get(key), pathCopy); + } + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java index 4c7c2c1..344ba1c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java @@ -20,8 +20,12 @@ package me.lucko.spark.common.platform.serverconfig; +import com.google.gson.Gson; +import com.google.gson.JsonElement; + import java.io.BufferedReader; import java.io.IOException; +import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Properties; @@ -32,6 +36,18 @@ import java.util.Properties; public enum PropertiesConfigParser implements ConfigParser { INSTANCE; + private static final Gson GSON = new Gson(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } + + return filter.apply(GSON.toJsonTree(values)); + } + @Override public Map parse(BufferedReader reader) throws IOException { Properties properties = new Properties(); -- cgit From e5fc5ab0ac0016b8f4018f7f88d4603098d022c6 Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 28 Jun 2022 21:13:07 +0100 Subject: Maintain order of exported config files --- .../common/platform/serverconfig/ServerConfigProvider.java | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java index 1fc2391..c66305f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java @@ -23,8 +23,8 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.gson.JsonElement; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Map; -import java.util.stream.Collectors; /** * Function to export server configuration files for access within the spark viewer. @@ -43,12 +43,9 @@ public interface ServerConfigProvider { Map loadServerConfigurations(); default Map exportServerConfigurations() { - return loadServerConfigurations().entrySet() - .stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> e.getValue().toString() - )); + Map map = new LinkedHashMap<>(); + loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString())); + return map; } /** -- cgit From 2b24d42ab2b9a9c28a5929520d9ca565f07724f9 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 29 Jun 2022 22:41:42 +0100 Subject: Add server config providers for Fabric/Forge --- .../spark/bukkit/BukkitServerConfigProvider.java | 8 --- .../serverconfig/AbstractServerConfigProvider.java | 10 ++++ .../spark/fabric/FabricServerConfigProvider.java | 57 ++++++++++++++++++++++ .../fabric/plugin/FabricServerSparkPlugin.java | 7 +++ .../spark/forge/ForgeServerConfigProvider.java | 57 ++++++++++++++++++++++ .../spark/forge/plugin/ForgeServerSparkPlugin.java | 7 +++ 6 files changed, 138 insertions(+), 8 deletions(-) create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index f822015..4c587fb 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -45,7 +45,6 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; @@ -160,13 +159,6 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider { HIDDEN_PATHS = hiddenPaths.build(); } - private static List getSystemPropertyList(String property) { - String value = System.getProperty(property); - return value == null - ? Collections.emptyList() - : Arrays.asList(value.split(",")); - } - private static List getTimingsHiddenConfigs() { try { return TimingsManager.hiddenConfigs; diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index 501851a..559ae95 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -23,7 +23,10 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; +import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -60,4 +63,11 @@ public abstract class AbstractServerConfigProvider implements ServerConfigProvid return builder.build(); } + protected static List getSystemPropertyList(String property) { + String value = System.getProperty(property); + return value == null + ? Collections.emptyList() + : Arrays.asList(value.split(",")); + } + } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java new file mode 100644 index 0000000..18079d3 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; + +import java.util.Collection; +import java.util.Map; + +public class FabricServerConfigProvider extends AbstractServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public FabricServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + static { + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("level-seed") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index 2283a84..bb1d68c 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -31,12 +31,14 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricPlayerPingProvider; +import me.lucko.spark.fabric.FabricServerConfigProvider; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; @@ -148,6 +150,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman return new FabricPlayerPingProvider(this.server); } + @Override + public ServerConfigProvider createServerConfigProvider() { + return new FabricServerConfigProvider(); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Server(this.server); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java new file mode 100644 index 0000000..baa1358 --- /dev/null +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.forge; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; +import me.lucko.spark.common.platform.serverconfig.ConfigParser; +import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; + +import java.util.Collection; +import java.util.Map; + +public class ForgeServerConfigProvider extends AbstractServerConfigProvider { + + /** A map of provided files and their type */ + private static final Map FILES; + /** A collection of paths to be excluded from the files */ + private static final Collection HIDDEN_PATHS; + + public ForgeServerConfigProvider() { + super(FILES, HIDDEN_PATHS); + } + + static { + ImmutableSet.Builder hiddenPaths = ImmutableSet.builder() + .add("server-ip") + .add("motd") + .add("resource-pack") + .add("rconpassword") + .add("level-seed") + .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths")); + + FILES = ImmutableMap.of("server.properties", PropertiesConfigParser.INSTANCE); + HIDDEN_PATHS = hiddenPaths.build(); + } + +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 03f9952..f4a51e0 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -31,12 +31,14 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgePlayerPingProvider; +import me.lucko.spark.forge.ForgeServerConfigProvider; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; import me.lucko.spark.forge.ForgeTickReporter; @@ -205,6 +207,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgePlayerPingProvider(this.server); } + @Override + public ServerConfigProvider createServerConfigProvider() { + return new ForgeServerConfigProvider(); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Server(this.server); -- cgit From a10f966a443d56845a5efb1e65232e6b87eabb96 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 29 Jun 2022 22:44:30 +0100 Subject: Fix NPE from null entity type name --- .../me/lucko/spark/common/platform/world/WorldStatisticsProvider.java | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java index 864a296..80c35a6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -122,6 +122,10 @@ public class WorldStatisticsProvider { String name = chunk.entityTypeName(key); int count = value.get(); + if (name == null) { + name = "unknown[" + key.toString() + "]"; + } + builder.putEntityCounts(name, count); combined.add(name, count); }); -- cgit From 73dd214ae66bab483ee8b4f0ed03881466da92e8 Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 12 Jul 2022 22:37:59 +0100 Subject: Improve/fix game thread dumper --- .../me/lucko/spark/bukkit/BukkitSparkPlugin.java | 7 +++-- .../spark/bukkit/BukkitWorldInfoProvider.java | 1 - .../lucko/spark/common/sampler/ThreadDumper.java | 10 ++++--- .../fabric/mixin/MinecraftClientAccessor.java | 34 ++++++++++++++++++++++ .../fabric/plugin/FabricClientSparkPlugin.java | 10 ++++++- .../fabric/plugin/FabricServerSparkPlugin.java | 9 +++++- .../spark/fabric/plugin/FabricSparkPlugin.java | 7 ----- spark-fabric/src/main/resources/spark.mixins.json | 3 +- .../spark/forge/plugin/ForgeClientSparkPlugin.java | 9 +++++- .../spark/forge/plugin/ForgeServerSparkPlugin.java | 9 +++++- .../lucko/spark/forge/plugin/ForgeSparkPlugin.java | 7 ----- .../main/resources/META-INF/accesstransformer.cfg | 1 + .../me/lucko/spark/sponge/Sponge7SparkPlugin.java | 7 +++-- .../me/lucko/spark/sponge/Sponge8SparkPlugin.java | 7 +++-- 14 files changed, 88 insertions(+), 33 deletions(-) create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index fddd66b..5737d3d 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -50,14 +50,16 @@ import java.util.stream.Stream; public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { private BukkitAudiences audienceFactory; + private ThreadDumper gameThreadDumper; + private SparkPlatform platform; private CommandExecutor tpsCommand = null; - private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @Override public void onEnable() { this.audienceFactory = BukkitAudiences.create(this); + this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread()); this.platform = new SparkPlatform(this); this.platform.enable(); @@ -102,7 +104,6 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { - this.threadDumper.ensureSetup(); this.platform.executeCommand(new BukkitCommandSender(sender, this.audienceFactory), args); return true; } @@ -152,7 +153,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public ThreadDumper getDefaultThreadDumper() { - return this.threadDumper.get(); + return this.gameThreadDumper; } @Override diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java index f34899b..5d50eeb 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -31,7 +31,6 @@ import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; public class BukkitWorldInfoProvider implements WorldInfoProvider { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java index 9d54f50..fe3a6a7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java @@ -83,10 +83,8 @@ public interface ThreadDumper { return Objects.requireNonNull(this.dumper, "dumper"); } - public void ensureSetup() { - if (this.dumper == null) { - this.dumper = new Specific(new long[]{Thread.currentThread().getId()}); - } + public void setThread(Thread thread) { + this.dumper = new Specific(new long[]{thread.getId()}); } } @@ -98,6 +96,10 @@ public interface ThreadDumper { private Set threads; private Set threadNamesLowerCase; + public Specific(Thread thread) { + this.ids = new long[]{thread.getId()}; + } + public Specific(long[] ids) { this.ids = ids; } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java new file mode 100644 index 0000000..7a4fb78 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/MinecraftClientAccessor.java @@ -0,0 +1,34 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.mixin; + +import net.minecraft.client.MinecraftClient; + +import org.spongepowered.asm.mixin.Mixin; +import org.spongepowered.asm.mixin.gen.Accessor; + +@Mixin(MinecraftClient.class) +public interface MinecraftClientAccessor { + + @Accessor + Thread getThread(); + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index 1876658..19d0707 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -30,6 +30,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -38,6 +39,7 @@ import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; import me.lucko.spark.fabric.FabricTickReporter; import me.lucko.spark.fabric.FabricWorldInfoProvider; +import me.lucko.spark.fabric.mixin.MinecraftClientAccessor; import net.fabricmc.fabric.api.client.command.v2.ClientCommandRegistrationCallback; import net.fabricmc.fabric.api.client.command.v2.FabricClientCommandSource; @@ -57,10 +59,12 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman } private final MinecraftClient minecraft; + private final ThreadDumper gameThreadDumper; public FabricClientSparkPlugin(FabricSparkMod mod, MinecraftClient minecraft) { super(mod); this.minecraft = minecraft; + this.gameThreadDumper = new ThreadDumper.Specific(((MinecraftClientAccessor) minecraft).getThread()); } @Override @@ -89,7 +93,6 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman return 0; } - this.threadDumper.ensureSetup(); this.platform.executeCommand(new FabricCommandSender(context.getSource().getEntity(), this), args); return Command.SINGLE_SUCCESS; } @@ -119,6 +122,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman this.minecraft.executeSync(task); } + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + @Override public TickHook createTickHook() { return new FabricTickHook.Client(); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index bb1d68c..f840f5e 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -33,6 +33,7 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -63,10 +64,12 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman } private final MinecraftServer server; + private final ThreadDumper gameThreadDumper; public FabricServerSparkPlugin(FabricSparkMod mod, MinecraftServer server) { super(mod); this.server = server; + this.gameThreadDumper = new ThreadDumper.Specific(server.getThread()); } @Override @@ -97,7 +100,6 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman return 0; } - this.threadDumper.ensureSetup(); CommandOutput source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer(); this.platform.executeCommand(new FabricCommandSender(source, this), args); return Command.SINGLE_SUCCESS; @@ -135,6 +137,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman this.server.executeSync(task); } + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + @Override public TickHook createTickHook() { return new FabricTickHook.Server(); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java index b1392d4..3126f28 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java @@ -34,7 +34,6 @@ import com.mojang.brigadier.tree.LiteralCommandNode; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.fabric.FabricClassSourceLookup; @@ -59,7 +58,6 @@ public abstract class FabricSparkPlugin implements SparkPlugin { protected final ScheduledExecutorService scheduler; protected SparkPlatform platform; - protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); protected FabricSparkPlugin(FabricSparkMod mod) { this.mod = mod; @@ -107,11 +105,6 @@ public abstract class FabricSparkPlugin implements SparkPlugin { } } - @Override - public ThreadDumper getDefaultThreadDumper() { - return this.threadDumper.get(); - } - @Override public ClassSourceLookup createClassSourceLookup() { return new FabricClassSourceLookup(); diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json index 09587fe..e75b34f 100644 --- a/spark-fabric/src/main/resources/spark.mixins.json +++ b/spark-fabric/src/main/resources/spark.mixins.json @@ -5,7 +5,8 @@ "mixins": [], "client": [ "ClientEntityManagerAccessor", - "ClientWorldAccessor" + "ClientWorldAccessor", + "MinecraftClientAccessor" ], "server": [ "ServerEntityManagerAccessor", diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index 04c8785..a4c6bd1 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -29,6 +29,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -58,10 +59,12 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< } private final Minecraft minecraft; + private final ThreadDumper gameThreadDumper; public ForgeClientSparkPlugin(ForgeSparkMod mod, Minecraft minecraft) { super(mod); this.minecraft = minecraft; + this.gameThreadDumper = new ThreadDumper.Specific(minecraft.gameThread); } @Override @@ -84,7 +87,6 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< return 0; } - this.threadDumper.ensureSetup(); this.platform.executeCommand(new ForgeCommandSender(context.getSource().getEntity(), this), args); return Command.SINGLE_SUCCESS; } @@ -114,6 +116,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< this.minecraft.executeIfPossible(task); } + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + @Override public TickHook createTickHook() { return new ForgeTickHook(TickEvent.Type.CLIENT); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index f4a51e0..1aeb2b1 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -33,6 +33,7 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -75,11 +76,13 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< } private final MinecraftServer server; + private final ThreadDumper gameThreadDumper; private Map> registeredPermissions = Collections.emptyMap(); public ForgeServerSparkPlugin(ForgeSparkMod mod, MinecraftServer server) { super(mod); this.server = server; + this.gameThreadDumper = new ThreadDumper.Specific(server.getRunningThread()); } @Override @@ -146,7 +149,6 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< return 0; } - this.threadDumper.ensureSetup(); CommandSource source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getServer(); this.platform.executeCommand(new ForgeCommandSender(source, this), args); return Command.SINGLE_SUCCESS; @@ -192,6 +194,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< this.server.executeIfPossible(task); } + @Override + public ThreadDumper getDefaultThreadDumper() { + return this.gameThreadDumper; + } + @Override public TickHook createTickHook() { return new ForgeTickHook(TickEvent.Type.SERVER); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java index f257e34..36a7ce8 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java @@ -34,7 +34,6 @@ import com.mojang.brigadier.tree.LiteralCommandNode; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.forge.ForgeClassSourceLookup; @@ -59,7 +58,6 @@ public abstract class ForgeSparkPlugin implements SparkPlugin { protected final ScheduledExecutorService scheduler; protected SparkPlatform platform; - protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); protected ForgeSparkPlugin(ForgeSparkMod mod) { this.mod = mod; @@ -107,11 +105,6 @@ public abstract class ForgeSparkPlugin implements SparkPlugin { } } - @Override - public ThreadDumper getDefaultThreadDumper() { - return this.threadDumper.get(); - } - @Override public ClassSourceLookup createClassSourceLookup() { return new ForgeClassSourceLookup(); diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg index 1e418b8..39e9c1a 100644 --- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -2,3 +2,4 @@ public net.minecraft.server.level.ServerLevel f_143244_ # entityManager public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage +public net.minecraft.client.Minecraft f_91018_ # gameThread \ No newline at end of file diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index 324e242..e6c9a04 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -73,9 +73,9 @@ public class Sponge7SparkPlugin implements SparkPlugin { private final Path configDirectory; private final SpongeExecutorService asyncExecutor; private final SpongeExecutorService syncExecutor; + private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); private SparkPlatform platform; - private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @Inject public Sponge7SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor, @SynchronousExecutor SpongeExecutorService syncExecutor) { @@ -85,6 +85,8 @@ public class Sponge7SparkPlugin implements SparkPlugin { this.configDirectory = configDirectory; this.asyncExecutor = asyncExecutor; this.syncExecutor = syncExecutor; + + this.syncExecutor.execute(() -> this.gameThreadDumper.setThread(Thread.currentThread())); } @Listener @@ -151,7 +153,7 @@ public class Sponge7SparkPlugin implements SparkPlugin { @Override public ThreadDumper getDefaultThreadDumper() { - return this.threadDumper.get(); + return this.gameThreadDumper.get(); } @Override @@ -201,7 +203,6 @@ public class Sponge7SparkPlugin implements SparkPlugin { @Override public CommandResult process(CommandSource source, String arguments) { - this.plugin.threadDumper.ensureSetup(); this.plugin.platform.executeCommand(new Sponge7CommandSender(source), arguments.split(" ")); return CommandResult.empty(); } diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index 68e47e3..70e73b9 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -69,9 +69,9 @@ public class Sponge8SparkPlugin implements SparkPlugin { private final Path configDirectory; private final ExecutorService asyncExecutor; private final ExecutorService syncExecutor; + private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); private SparkPlatform platform; - private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread(); @Inject public Sponge8SparkPlugin(PluginContainer pluginContainer, Logger logger, Game game, @ConfigDir(sharedRoot = false) Path configDirectory) { @@ -88,6 +88,8 @@ public class Sponge8SparkPlugin implements SparkPlugin { } else { throw new IllegalStateException("Server and client both unavailable"); } + + this.syncExecutor.execute(() -> this.gameThreadDumper.setThread(Thread.currentThread())); } @@ -159,7 +161,7 @@ public class Sponge8SparkPlugin implements SparkPlugin { @Override public ThreadDumper getDefaultThreadDumper() { - return this.threadDumper.get(); + return this.gameThreadDumper.get(); } @Override @@ -204,7 +206,6 @@ public class Sponge8SparkPlugin implements SparkPlugin { @Override public CommandResult process(CommandCause cause, ArgumentReader.Mutable arguments) { - this.plugin.threadDumper.ensureSetup(); this.plugin.platform.executeCommand(new Sponge8CommandSender(cause), arguments.input().split(" ")); return CommandResult.success(); } -- cgit From 06de991f44f3f0f33eed21fb92224a395a2a92ff Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 13 Jul 2022 21:10:28 +0100 Subject: Support linux x64 musl --- spark-common/build.gradle | 9 +------- .../common/sampler/async/AsyncProfilerAccess.java | 24 +++++++++++++++++++++ .../spark/linux/aarch64/libasyncProfiler.so | Bin 328432 -> 333864 bytes .../spark/linux/amd64-musl/libasyncProfiler.so | Bin 0 -> 304568 bytes .../spark/linux/amd64/libasyncProfiler.so | Bin 342239 -> 347712 bytes .../main/resources/spark/macos/libasyncProfiler.so | Bin 688400 -> 690128 bytes 6 files changed, 25 insertions(+), 8 deletions(-) create mode 100755 spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so (limited to 'spark-common/src') diff --git a/spark-common/build.gradle b/spark-common/build.gradle index bc493f3..fbd0db2 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -8,7 +8,7 @@ license { dependencies { api project(':spark-api') - implementation 'com.github.jvm-profiling-tools:async-profiler:v2.7' + implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.1' implementation 'org.ow2.asm:asm:9.1' implementation 'com.google.protobuf:protobuf-javalite:3.15.6' implementation 'com.squareup.okhttp3:okhttp:3.14.1' @@ -37,13 +37,6 @@ dependencies { compileOnly 'org.checkerframework:checker-qual:3.8.0' } -processResources { - from(sourceSets.main.resources.srcDirs) { - include 'spark/linux/libasyncProfiler.so' - include 'spark/macosx/libasyncProfiler.so' - } -} - protobuf { protoc { if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index d642a53..ef2c035 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -29,13 +29,16 @@ import me.lucko.spark.common.util.TemporaryFiles; import one.profiler.AsyncProfiler; import one.profiler.Events; +import java.io.BufferedReader; import java.io.InputStream; +import java.io.InputStreamReader; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Locale; import java.util.logging.Level; +import java.util.stream.Collectors; /** * Provides a bridge between spark and async-profiler. @@ -108,8 +111,13 @@ public enum AsyncProfilerAccess { String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); + if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { + arch = "amd64-musl"; + } + Table supported = ImmutableTable.builder() .put("linux", "amd64", "linux/amd64") + .put("linux", "amd64-musl", "linux/amd64-musl") .put("linux", "aarch64", "linux/aarch64") .put("macosx", "amd64", "macos") .put("macosx", "aarch64", "macos") @@ -190,4 +198,20 @@ public enum AsyncProfilerAccess { super("A runtime error occurred whilst loading the native library", cause); } } + + // Checks if the system is using musl instead of glibc + private static boolean isLinuxMusl() { + try { + InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`") + .start() + .getInputStream(); + + BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); + String output = reader.lines().collect(Collectors.joining()); + return output.contains("musl"); // shrug + } catch (Throwable e) { + // ignore + return false; + } + } } diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so index 35f83b2..c3c2eb2 100755 Binary files a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so new file mode 100755 index 0000000..4c69ab8 Binary files /dev/null and b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so index edbf103..5612ad9 100755 Binary files a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so index ab818e9..1fc6ba3 100755 Binary files a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so and b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so differ -- cgit From 319aae27ad290338a5558ac53517e144254a86ce Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 17 Jul 2022 12:19:03 +0100 Subject: Fix fabric client startup error --- .../java/me/lucko/spark/common/sampler/ThreadDumper.java | 14 ++++++++++++++ .../lucko/spark/fabric/plugin/FabricClientSparkPlugin.java | 6 +++--- 2 files changed, 17 insertions(+), 3 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java index fe3a6a7..fd0c413 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java @@ -76,10 +76,24 @@ public interface ThreadDumper { * the game (server/client) thread. */ final class GameThread implements Supplier { + private Supplier threadSupplier; private Specific dumper = null; + public GameThread() { + + } + + public GameThread(Supplier threadSupplier) { + this.threadSupplier = threadSupplier; + } + @Override public ThreadDumper get() { + if (this.dumper == null) { + setThread(this.threadSupplier.get()); + this.threadSupplier = null; + } + return Objects.requireNonNull(this.dumper, "dumper"); } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index 19d0707..0ef6620 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -59,12 +59,12 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman } private final MinecraftClient minecraft; - private final ThreadDumper gameThreadDumper; + private final ThreadDumper.GameThread gameThreadDumper; public FabricClientSparkPlugin(FabricSparkMod mod, MinecraftClient minecraft) { super(mod); this.minecraft = minecraft; - this.gameThreadDumper = new ThreadDumper.Specific(((MinecraftClientAccessor) minecraft).getThread()); + this.gameThreadDumper = new ThreadDumper.GameThread(() -> ((MinecraftClientAccessor) minecraft).getThread()); } @Override @@ -124,7 +124,7 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman @Override public ThreadDumper getDefaultThreadDumper() { - return this.gameThreadDumper; + return this.gameThreadDumper.get(); } @Override -- cgit From 768bf7a338da8e5daaebc9580ff3b289092c28ee Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 17 Jul 2022 14:05:06 +0100 Subject: Remove some unnecessary dependencies --- spark-bukkit/build.gradle | 3 - spark-bungeecord/build.gradle | 3 - spark-common/build.gradle | 3 - .../java/me/lucko/spark/common/SparkPlatform.java | 12 +--- .../common/command/modules/HeapAnalysisModule.java | 4 +- .../common/command/modules/SamplerModule.java | 4 +- .../spark/common/util/AbstractHttpClient.java | 46 ------------- .../me/lucko/spark/common/util/BytebinClient.java | 75 ++++++++++------------ .../me/lucko/spark/common/util/Compression.java | 60 ++++++++--------- spark-fabric/build.gradle | 3 - spark-forge/build.gradle | 3 - spark-minestom/build.gradle | 3 - spark-nukkit/build.gradle | 3 - spark-sponge7/build.gradle | 3 - spark-sponge8/build.gradle | 3 - spark-velocity/build.gradle | 3 - spark-velocity4/build.gradle | 3 - spark-waterdog/build.gradle | 3 - 18 files changed, 65 insertions(+), 172 deletions(-) delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java (limited to 'spark-common/src') diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index 7144291..917fb55 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -31,12 +31,9 @@ processResources { shadowJar { archiveName = "spark-${project.pluginVersion}-bukkit.jar" - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index 1e92621..d96d589 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -21,12 +21,9 @@ processResources { shadowJar { archiveName = "spark-${project.pluginVersion}-bungeecord.jar" - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-common/build.gradle b/spark-common/build.gradle index fbd0db2..ce09d51 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -11,10 +11,7 @@ dependencies { implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.1' implementation 'org.ow2.asm:asm:9.1' implementation 'com.google.protobuf:protobuf-javalite:3.15.6' - implementation 'com.squareup.okhttp3:okhttp:3.14.1' - implementation 'com.squareup.okio:okio:1.17.3' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' - implementation 'org.tukaani:xz:1.8' api('net.kyori:adventure-api:4.11.0') { exclude(module: 'adventure-bom') exclude(module: 'checker-qual') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 0ef4556..f92abf3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -54,8 +54,6 @@ import me.lucko.spark.common.util.TemporaryFiles; import net.kyori.adventure.text.event.ClickEvent; -import okhttp3.OkHttpClient; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -93,7 +91,6 @@ public class SparkPlatform { private final SparkPlugin plugin; private final Configuration configuration; private final String viewerUrl; - private final OkHttpClient httpClient; private final BytebinClient bytebinClient; private final boolean disableResponseBroadcast; private final List commandModules; @@ -116,9 +113,7 @@ public class SparkPlatform { this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); - - this.httpClient = new OkHttpClient(); - this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin"); + this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin"); this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); @@ -198,11 +193,6 @@ public class SparkPlatform { SparkApi.unregister(); TemporaryFiles.deleteTemporaryFiles(); - - // shutdown okhttp - // see: https://github.com/square/okhttp/issues/4029 - this.httpClient.dispatcher().executorService().shutdown(); - this.httpClient.connectionPool().evictAll(); } public SparkPlugin getPlugin() { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 1030f35..5bd62a8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -36,8 +36,6 @@ import me.lucko.spark.proto.SparkHeapProtos; import net.kyori.adventure.text.event.ClickEvent; -import okhttp3.MediaType; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -54,7 +52,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.GREEN; import static net.kyori.adventure.text.format.NamedTextColor.RED; public class HeapAnalysisModule implements CommandModule { - private static final MediaType SPARK_HEAP_MEDIA_TYPE = MediaType.parse("application/x-spark-heap"); + private static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap"; @Override public void registerCommands(Consumer consumer) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index fd5cd67..0a80c31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -44,8 +44,6 @@ import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.event.ClickEvent; -import okhttp3.MediaType; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -66,7 +64,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; public class SamplerModule implements CommandModule { - private static final MediaType SPARK_SAMPLER_MEDIA_TYPE = MediaType.parse("application/x-spark-sampler"); + private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; /** The sampler instance currently running, if any */ private Sampler activeSampler = null; diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java deleted file mode 100644 index 8ece3d4..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.util; - -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; - -import java.io.IOException; - -public class AbstractHttpClient { - - /** The http client */ - protected final OkHttpClient okHttp; - - public AbstractHttpClient(OkHttpClient okHttp) { - this.okHttp = okHttp; - } - - protected Response makeHttpRequest(Request request) throws IOException { - Response response = this.okHttp.newCall(request).execute(); - if (!response.isSuccessful()) { - response.close(); - throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message()); - } - return response; - } -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java index c2ca1b1..e69b94e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java @@ -22,73 +22,66 @@ package me.lucko.spark.common.util; import com.google.protobuf.AbstractMessageLite; -import okhttp3.MediaType; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; - -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import java.util.zip.GZIPOutputStream; /** * Utility for posting content to bytebin. */ -public class BytebinClient extends AbstractHttpClient { +public class BytebinClient { /** The bytebin URL */ private final String url; /** The client user agent */ private final String userAgent; - /** - * Creates a new bytebin instance - * - * @param url the bytebin url - * @param userAgent the client user agent string - */ - public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) { - super(okHttpClient); + public BytebinClient(String url, String userAgent) { this.url = url + (url.endsWith("/") ? "" : "/"); this.userAgent = userAgent; } - /** - * POSTs GZIP compressed content to bytebin. - * - * @param buf the compressed content - * @param contentType the type of the content - * @return the key of the resultant content - * @throws IOException if an error occurs - */ - public Content postContent(byte[] buf, MediaType contentType) throws IOException { - RequestBody body = RequestBody.create(contentType, buf); + private Content postContent(String contentType, Consumer consumer) throws IOException { + URL url = new URL(this.url + "post"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + try { + connection.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(10)); + connection.setReadTimeout((int) TimeUnit.SECONDS.toMillis(10)); + + connection.setDoOutput(true); + connection.setRequestMethod("POST"); + connection.setRequestProperty("Content-Type", contentType); + connection.setRequestProperty("User-Agent", this.userAgent); + connection.setRequestProperty("Content-Encoding", "gzip"); - Request.Builder requestBuilder = new Request.Builder() - .url(this.url + "post") - .header("User-Agent", this.userAgent) - .header("Content-Encoding", "gzip"); + connection.connect(); + try (OutputStream output = connection.getOutputStream()) { + consumer.accept(output); + } - Request request = requestBuilder.post(body).build(); - try (Response response = makeHttpRequest(request)) { - String key = response.header("Location"); + String key = connection.getHeaderField("Location"); if (key == null) { throw new IllegalStateException("Key not returned"); } return new Content(key); + } finally { + connection.getInputStream().close(); + connection.disconnect(); } } - public Content postContent(AbstractMessageLite proto, MediaType contentType) throws IOException { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (OutputStream out = new GZIPOutputStream(byteOut)) { - proto.writeTo(out); - } catch (IOException e) { - throw new RuntimeException(e); - } - return postContent(byteOut.toByteArray(), contentType); + public Content postContent(AbstractMessageLite proto, String contentType) throws IOException { + return postContent(contentType, outputStream -> { + try (OutputStream out = new GZIPOutputStream(outputStream)) { + proto.writeTo(out); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } public static final class Content { diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java index 9295c25..c8100e1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java @@ -20,10 +20,6 @@ package me.lucko.spark.common.util; -import org.tukaani.xz.LZMA2Options; -import org.tukaani.xz.LZMAOutputStream; -import org.tukaani.xz.XZOutputStream; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -46,35 +42,35 @@ public enum Compression { } return compressedFile; } - }, - XZ { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } - }, - LZMA { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } }; + // XZ { + // @Override + // public Path compress(Path file, LongConsumer progressHandler) throws IOException { + // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); + // try (InputStream in = Files.newInputStream(file)) { + // try (OutputStream out = Files.newOutputStream(compressedFile)) { + // try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { + // copy(in, compressionOut, progressHandler); + // } + // } + // } + // return compressedFile; + // } + // }, + // LZMA { + // @Override + // public Path compress(Path file, LongConsumer progressHandler) throws IOException { + // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); + // try (InputStream in = Files.newInputStream(file)) { + // try (OutputStream out = Files.newOutputStream(compressedFile)) { + // try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { + // copy(in, compressionOut, progressHandler); + // } + // } + // } + // return compressedFile; + // } + // }; public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException; diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle index 31008bb..35b7a86 100644 --- a/spark-fabric/build.gradle +++ b/spark-fabric/build.gradle @@ -70,12 +70,9 @@ shadowJar { archiveFileName = "spark-fabric-${project.pluginVersion}-dev.jar" configurations = [project.configurations.shade] - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 3f46b95..46ac08c 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -52,12 +52,9 @@ shadowJar { archiveName = "spark-${project.pluginVersion}-forge.jar" configurations = [project.configurations.shade] - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle index 26cdc2c..04e5f25 100644 --- a/spark-minestom/build.gradle +++ b/spark-minestom/build.gradle @@ -30,11 +30,8 @@ shadowJar { exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) } - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle index 2e1ad55..b15009a 100644 --- a/spark-nukkit/build.gradle +++ b/spark-nukkit/build.gradle @@ -25,11 +25,8 @@ processResources { shadowJar { archiveName = "spark-${project.pluginVersion}-nukkit.jar" - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' diff --git a/spark-sponge7/build.gradle b/spark-sponge7/build.gradle index b6f8273..b06d3bd 100644 --- a/spark-sponge7/build.gradle +++ b/spark-sponge7/build.gradle @@ -22,12 +22,9 @@ blossom { shadowJar { archiveFileName = "spark-${project.pluginVersion}-sponge7.jar" - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-sponge8/build.gradle b/spark-sponge8/build.gradle index 314ab18..202c308 100644 --- a/spark-sponge8/build.gradle +++ b/spark-sponge8/build.gradle @@ -28,11 +28,8 @@ shadowJar { exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) } - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle index b2e938b..275d3df 100644 --- a/spark-velocity/build.gradle +++ b/spark-velocity/build.gradle @@ -26,11 +26,8 @@ shadowJar { exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) } - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle index 5bef80b..1f8e8ee 100644 --- a/spark-velocity4/build.gradle +++ b/spark-velocity4/build.gradle @@ -31,11 +31,8 @@ shadowJar { exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$')) } - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.lib.adventure.pagination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle index c11e3fb..d0c7de7 100644 --- a/spark-waterdog/build.gradle +++ b/spark-waterdog/build.gradle @@ -30,11 +30,8 @@ processResources { shadowJar { archiveName = "spark-${project.pluginVersion}-waterdog.jar" - relocate 'okio', 'me.lucko.spark.lib.okio' - relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure' relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' - relocate 'org.tukaani.xz', 'me.lucko.spark.lib.xz' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' -- cgit From f9c898caf921c47f9c6d94f9c1e7d19905b55c07 Mon Sep 17 00:00:00 2001 From: Paint_Ninja Date: Sun, 7 Aug 2022 19:03:02 +0100 Subject: Support getting the CPU model name on Windows (#237) --- .../me/lucko/spark/common/monitor/cpu/CpuInfo.java | 27 ++++++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java index 9bbe0f8..fcb70c0 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java @@ -22,10 +22,13 @@ package me.lucko.spark.common.monitor.cpu; import me.lucko.spark.common.util.LinuxProc; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; import java.util.regex.Pattern; /** - * Small utility to query the CPU model on Linux systems. + * Small utility to query the CPU model on Linux and Windows systems. */ public enum CpuInfo { ; @@ -38,11 +41,25 @@ public enum CpuInfo { * @return the cpu model */ public static String queryCpuModel() { - for (String line : LinuxProc.CPUINFO.read()) { - String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line); + if (System.getProperty("os.name").startsWith("Windows")) { + final String[] args = { "wmic", "cpu", "get", "name", "/FORMAT:list" }; + try (final BufferedReader buf = new BufferedReader(new InputStreamReader(new ProcessBuilder(args).redirectErrorStream(true).start().getInputStream()))) { + String line; + while ((line = buf.readLine()) != null) { + if (line.startsWith("Name")) { + return line.substring(5).trim(); + } + } + } catch (final IOException e) { + return ""; + } + } else { + for (String line : LinuxProc.CPUINFO.read()) { + String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line); - if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) { - return splitLine[1]; + if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) { + return splitLine[1]; + } } } return ""; -- cgit From 0d3b71f3163d34a35506668066f94fcad8d5c6f8 Mon Sep 17 00:00:00 2001 From: Paint_Ninja Date: Sun, 7 Aug 2022 19:04:33 +0100 Subject: Improve operating system info (#238) --- .../common/monitor/os/OperatingSystemInfo.java | 78 ++++++++++++++++++++++ .../platform/PlatformStatisticsProvider.java | 5 +- .../java/me/lucko/spark/common/util/LinuxProc.java | 7 +- 3 files changed, 87 insertions(+), 3 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java new file mode 100644 index 0000000..4eeebd1 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java @@ -0,0 +1,78 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor.os; + +import me.lucko.spark.common.util.LinuxProc; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; + +public enum OperatingSystemInfo { + ; + + private static String name = null; + private static String version = null; + + static { + final String osNameJavaProp = System.getProperty("os.name"); + + if (osNameJavaProp.startsWith("Windows")) { + final String[] args = { "wmic", "os", "get", "caption,version", "/FORMAT:list" }; + try (final BufferedReader buf = new BufferedReader(new InputStreamReader(new ProcessBuilder(args).redirectErrorStream(true).start().getInputStream()))) { + String line; + while ((line = buf.readLine()) != null) { + if (line.startsWith("Caption")) { + name = line.substring(18).trim(); + } else if (line.startsWith("Version")) { + version = line.substring(8).trim(); + } + } + } catch (final IOException | IndexOutOfBoundsException e) { + // ignore + } + } else { + for (final String line : LinuxProc.OSINFO.read()) { + if (line.startsWith("PRETTY_NAME")) { + try { + name = line.substring(13).replace('"', ' ').trim(); + } catch (final IndexOutOfBoundsException e) { + // ignore + } + } + } + } + + if (name == null) + name = osNameJavaProp; + + if (version == null) + version = System.getProperty("os.version"); + } + + public static String getName() { + return name; + } + + public static String getVersion() { + return version; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index 49cfed5..e5be647 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.memory.MemoryInfo; import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; +import me.lucko.spark.common.monitor.os.OperatingSystemInfo; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -87,8 +88,8 @@ public class PlatformStatisticsProvider { ) .setOs(SystemStatistics.Os.newBuilder() .setArch(System.getProperty("os.arch")) - .setName(System.getProperty("os.name")) - .setVersion(System.getProperty("os.version")) + .setName(OperatingSystemInfo.getName()) + .setVersion(OperatingSystemInfo.getVersion()) .build() ) .setJava(SystemStatistics.Java.newBuilder() diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java index 7d688d7..44fd3f9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java @@ -49,7 +49,12 @@ public enum LinuxProc { /** * Information about the system network usage. */ - NET_DEV("/proc/net/dev"); + NET_DEV("/proc/net/dev"), + + /** + * Information about the operating system distro. + */ + OSINFO("/etc/os-release"); private final Path path; -- cgit From c96f88f7c125109edcae382b8d10011e87125102 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 7 Aug 2022 21:53:11 +0100 Subject: Refactor Windows wmic utility --- .../me/lucko/spark/common/monitor/LinuxProc.java | 89 +++++++++++++++++++++ .../me/lucko/spark/common/monitor/WindowsWmic.java | 74 ++++++++++++++++++ .../me/lucko/spark/common/monitor/cpu/CpuInfo.java | 32 +++----- .../spark/common/monitor/memory/MemoryInfo.java | 2 +- .../common/monitor/net/NetworkInterfaceInfo.java | 2 +- .../common/monitor/os/OperatingSystemInfo.java | 90 ++++++++++++---------- .../platform/PlatformStatisticsProvider.java | 7 +- .../java/me/lucko/spark/common/util/LinuxProc.java | 89 --------------------- 8 files changed, 229 insertions(+), 156 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java new file mode 100644 index 0000000..563e247 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java @@ -0,0 +1,89 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor; + +import org.checkerframework.checker.nullness.qual.NonNull; +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; + +/** + * Utility for reading from /proc/ on Linux systems. + */ +public enum LinuxProc { + + /** + * Information about the system CPU. + */ + CPUINFO("/proc/cpuinfo"), + + /** + * Information about the system memory. + */ + MEMINFO("/proc/meminfo"), + + /** + * Information about the system network usage. + */ + NET_DEV("/proc/net/dev"), + + /** + * Information about the operating system distro. + */ + OSINFO("/etc/os-release"); + + private final Path path; + + LinuxProc(String path) { + this.path = resolvePath(path); + } + + private static @Nullable Path resolvePath(String path) { + try { + Path p = Paths.get(path); + if (Files.isReadable(p)) { + return p; + } + } catch (Exception e) { + // ignore + } + return null; + } + + public @NonNull List read() { + if (this.path != null) { + try { + return Files.readAllLines(this.path, StandardCharsets.UTF_8); + } catch (IOException e) { + // ignore + } + } + + return Collections.emptyList(); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java new file mode 100644 index 0000000..6b602d9 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java @@ -0,0 +1,74 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.monitor; + +import org.checkerframework.checker.nullness.qual.NonNull; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Utility for reading from wmic (Windows Management Instrumentation Commandline) on Windows systems. + */ +public enum WindowsWmic { + + /** + * Gets the CPU name + */ + CPU_GET_NAME("wmic", "cpu", "get", "name", "/FORMAT:list"), + + /** + * Gets the operating system name (caption) and version. + */ + OS_GET_CAPTION_AND_VERSION("wmic", "os", "get", "caption,version", "/FORMAT:list"); + + private static final boolean SUPPORTED = System.getProperty("os.name").startsWith("Windows"); + + private final String[] cmdArgs; + + WindowsWmic(String... cmdArgs) { + this.cmdArgs = cmdArgs; + } + + public @NonNull List read() { + if (SUPPORTED) { + ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true); + try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) { + List lines = new ArrayList<>(); + + String line; + while ((line = buf.readLine()) != null) { + lines.add(line); + } + + return lines; + } catch (Exception e) { + // ignore + } + } + + return Collections.emptyList(); + } +} + diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java index fcb70c0..9954bd5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java @@ -20,11 +20,9 @@ package me.lucko.spark.common.monitor.cpu; -import me.lucko.spark.common.util.LinuxProc; +import me.lucko.spark.common.monitor.LinuxProc; +import me.lucko.spark.common.monitor.WindowsWmic; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; import java.util.regex.Pattern; /** @@ -41,27 +39,19 @@ public enum CpuInfo { * @return the cpu model */ public static String queryCpuModel() { - if (System.getProperty("os.name").startsWith("Windows")) { - final String[] args = { "wmic", "cpu", "get", "name", "/FORMAT:list" }; - try (final BufferedReader buf = new BufferedReader(new InputStreamReader(new ProcessBuilder(args).redirectErrorStream(true).start().getInputStream()))) { - String line; - while ((line = buf.readLine()) != null) { - if (line.startsWith("Name")) { - return line.substring(5).trim(); - } - } - } catch (final IOException e) { - return ""; + for (String line : LinuxProc.CPUINFO.read()) { + String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line); + if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) { + return splitLine[1]; } - } else { - for (String line : LinuxProc.CPUINFO.read()) { - String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line); + } - if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) { - return splitLine[1]; - } + for (String line : WindowsWmic.CPU_GET_NAME.read()) { + if (line.startsWith("Name")) { + return line.substring(5).trim(); } } + return ""; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java index 226f75b..8f63f71 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.monitor.memory; -import me.lucko.spark.common.util.LinuxProc; +import me.lucko.spark.common.monitor.LinuxProc; import java.lang.management.ManagementFactory; import java.util.regex.Matcher; diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java index bd9e187..332077a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java @@ -22,7 +22,7 @@ package me.lucko.spark.common.monitor.net; import com.google.common.collect.ImmutableMap; -import me.lucko.spark.common.util.LinuxProc; +import me.lucko.spark.common.monitor.LinuxProc; import org.checkerframework.checker.nullness.qual.NonNull; diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java index 4eeebd1..1c2732c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java @@ -20,59 +20,67 @@ package me.lucko.spark.common.monitor.os; -import me.lucko.spark.common.util.LinuxProc; +import me.lucko.spark.common.monitor.LinuxProc; +import me.lucko.spark.common.monitor.WindowsWmic; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; +/** + * Small utility to query the operating system name & version. + */ +public final class OperatingSystemInfo { + private final String name; + private final String version; + private final String arch; + + public OperatingSystemInfo(String name, String version, String arch) { + this.name = name; + this.version = version; + this.arch = arch; + } -public enum OperatingSystemInfo { - ; + public String name() { + return this.name; + } - private static String name = null; - private static String version = null; + public String version() { + return this.version; + } + + public String arch() { + return this.arch; + } - static { - final String osNameJavaProp = System.getProperty("os.name"); + public static OperatingSystemInfo poll() { + String name = null; + String version = null; - if (osNameJavaProp.startsWith("Windows")) { - final String[] args = { "wmic", "os", "get", "caption,version", "/FORMAT:list" }; - try (final BufferedReader buf = new BufferedReader(new InputStreamReader(new ProcessBuilder(args).redirectErrorStream(true).start().getInputStream()))) { - String line; - while ((line = buf.readLine()) != null) { - if (line.startsWith("Caption")) { - name = line.substring(18).trim(); - } else if (line.startsWith("Version")) { - version = line.substring(8).trim(); - } - } - } catch (final IOException | IndexOutOfBoundsException e) { - // ignore + for (String line : LinuxProc.OSINFO.read()) { + if (line.startsWith("PRETTY_NAME") && line.length() > 13) { + name = line.substring(13).replace('"', ' ').trim(); } - } else { - for (final String line : LinuxProc.OSINFO.read()) { - if (line.startsWith("PRETTY_NAME")) { - try { - name = line.substring(13).replace('"', ' ').trim(); - } catch (final IndexOutOfBoundsException e) { - // ignore - } - } + } + + for (String line : WindowsWmic.OS_GET_CAPTION_AND_VERSION.read()) { + if (line.startsWith("Caption") && line.length() > 18) { + // Caption=Microsoft Windows something + // \----------------/ = 18 chars + name = line.substring(18).trim(); + } else if (line.startsWith("Version")) { + // Version=10.0.something + // \------/ = 8 chars + version = line.substring(8).trim(); } } - if (name == null) - name = osNameJavaProp; + if (name == null) { + name = System.getProperty("os.name"); + } - if (version == null) + if (version == null) { version = System.getProperty("os.version"); - } + } - public static String getName() { - return name; - } + String arch = System.getProperty("os.arch"); - public static String getVersion() { - return version; + return new OperatingSystemInfo(name, version, arch); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index e5be647..1eb9753 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -51,6 +51,7 @@ public class PlatformStatisticsProvider { public SystemStatistics getSystemStatistics() { RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean(); + OperatingSystemInfo osInfo = OperatingSystemInfo.poll(); SystemStatistics.Builder builder = SystemStatistics.newBuilder() .setCpu(SystemStatistics.Cpu.newBuilder() @@ -87,9 +88,9 @@ public class PlatformStatisticsProvider { .build() ) .setOs(SystemStatistics.Os.newBuilder() - .setArch(System.getProperty("os.arch")) - .setName(OperatingSystemInfo.getName()) - .setVersion(OperatingSystemInfo.getVersion()) + .setArch(osInfo.arch()) + .setName(osInfo.name()) + .setVersion(osInfo.version()) .build() ) .setJava(SystemStatistics.Java.newBuilder() diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java deleted file mode 100644 index 44fd3f9..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.util; - -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; - -/** - * Utility for reading from /proc/ on Linux systems. - */ -public enum LinuxProc { - - /** - * Information about the system CPU. - */ - CPUINFO("/proc/cpuinfo"), - - /** - * Information about the system memory. - */ - MEMINFO("/proc/meminfo"), - - /** - * Information about the system network usage. - */ - NET_DEV("/proc/net/dev"), - - /** - * Information about the operating system distro. - */ - OSINFO("/etc/os-release"); - - private final Path path; - - LinuxProc(String path) { - this.path = resolvePath(path); - } - - private static @Nullable Path resolvePath(String path) { - try { - Path p = Paths.get(path); - if (Files.isReadable(p)) { - return p; - } - } catch (Exception e) { - // ignore - } - return null; - } - - public @NonNull List read() { - if (this.path != null) { - try { - return Files.readAllLines(this.path, StandardCharsets.UTF_8); - } catch (IOException e) { - // ignore - } - } - - return Collections.emptyList(); - } - -} -- cgit From 7dce92ac6f577e04b99a1da555edfbc801efa049 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 4 Sep 2022 10:34:02 +0100 Subject: Update async-profiler --- spark-common/build.gradle | 2 +- .../spark/linux/aarch64/libasyncProfiler.so | Bin 333864 -> 334176 bytes .../spark/linux/amd64-musl/libasyncProfiler.so | Bin 304568 -> 308800 bytes .../spark/linux/amd64/libasyncProfiler.so | Bin 347712 -> 352112 bytes .../main/resources/spark/macos/libasyncProfiler.so | Bin 690128 -> 690208 bytes 5 files changed, 1 insertion(+), 1 deletion(-) (limited to 'spark-common/src') diff --git a/spark-common/build.gradle b/spark-common/build.gradle index ce09d51..4a20142 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -8,7 +8,7 @@ license { dependencies { api project(':spark-api') - implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.1' + implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3' implementation 'org.ow2.asm:asm:9.1' implementation 'com.google.protobuf:protobuf-javalite:3.15.6' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so index c3c2eb2..cf6c48b 100755 Binary files a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so index 4c69ab8..0a08f7c 100755 Binary files a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so index 5612ad9..0deb9e0 100755 Binary files a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so index 1fc6ba3..65b4aed 100755 Binary files a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so and b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so differ -- cgit From 618230b958d7822985e2702cd9528f1b4567e59c Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 4 Sep 2022 10:53:06 +0100 Subject: Improve debug output when JFR parsing fails --- .../spark/common/sampler/async/AsyncSampler.java | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index d8288da..dae3852 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -185,8 +185,19 @@ public class AsyncSampler extends AbstractSampler { // read the jfr file produced by async-profiler try (JfrReader reader = new JfrReader(this.outputFile)) { readSegments(reader, threadFilter); - } catch (IOException e) { - throw new RuntimeException("Read error", e); + } catch (Exception e) { + boolean fileExists; + try { + fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0; + } catch (IOException ex) { + fileExists = false; + } + + if (fileExists) { + throw new JfrParsingException("Error parsing JFR data from profiler output", e); + } else { + throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e); + } } // delete the output file after reading @@ -268,4 +279,10 @@ public class AsyncSampler extends AbstractSampler { reader.stackFrames.put(methodId, result); return result; } + + private static final class JfrParsingException extends RuntimeException { + public JfrParsingException(String message, Throwable cause) { + super(message, cause); + } + } } -- cgit From 7ef9b6281135ce0a24f3c14c2255d9a2c2eca969 Mon Sep 17 00:00:00 2001 From: ishland Date: Mon, 19 Sep 2022 21:48:28 +0800 Subject: Display source info for mixin injected methods (#249) Co-authored-by: Luck --- .../spark/common/sampler/AbstractSampler.java | 12 +- .../lucko/spark/common/util/ClassSourceLookup.java | 257 +++++++++++++++++++-- .../src/main/proto/spark/spark_sampler.proto | 2 + spark-fabric/build.gradle | 10 +- .../spark/fabric/FabricClassSourceLookup.java | 159 ++++++++++++- .../fabric/plugin/FabricSparkMixinPlugin.java | 71 ++++++ .../me/lucko/spark/fabric/smap/MixinUtils.java | 52 +++++ .../lucko/spark/fabric/smap/SourceDebugCache.java | 87 +++++++ .../java/me/lucko/spark/fabric/smap/SourceMap.java | 133 +++++++++++ .../lucko/spark/fabric/smap/SourceMapProvider.java | 53 +++++ spark-fabric/src/main/resources/spark.mixins.json | 3 +- 11 files changed, 806 insertions(+), 33 deletions(-) create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 1c217db..3cfef0b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -164,8 +164,16 @@ public abstract class AbstractSampler implements Sampler { classSourceVisitor.visit(entry); } - if (classSourceVisitor.hasMappings()) { - proto.putAllClassSources(classSourceVisitor.getMapping()); + if (classSourceVisitor.hasClassSourceMappings()) { + proto.putAllClassSources(classSourceVisitor.getClassSourceMapping()); + } + + if (classSourceVisitor.hasMethodSourceMappings()) { + proto.putAllMethodSources(classSourceVisitor.getMethodSourceMapping()); + } + + if (classSourceVisitor.hasLineSourceMappings()) { + proto.putAllLineSources(classSourceVisitor.getLineSourceMapping()); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java index bd9ec37..668f31a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java @@ -38,9 +38,11 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; /** - * A function which defines the source of given {@link Class}es. + * A function which defines the source of given {@link Class}es or (Mixin) method calls. */ public interface ClassSourceLookup { @@ -52,6 +54,26 @@ public interface ClassSourceLookup { */ @Nullable String identify(Class clazz) throws Exception; + /** + * Identify the given method call. + * + * @param methodCall the method call info + * @return the source of the method call + */ + default @Nullable String identify(MethodCall methodCall) throws Exception { + return null; + } + + /** + * Identify the given method call. + * + * @param methodCall the method call info + * @return the source of the method call + */ + default @Nullable String identify(MethodCallByLine methodCall) throws Exception { + return null; + } + /** * A no-operation {@link ClassSourceLookup}. */ @@ -156,9 +178,17 @@ public interface ClassSourceLookup { interface Visitor { void visit(ThreadNode node); - boolean hasMappings(); + boolean hasClassSourceMappings(); + + Map getClassSourceMapping(); + + boolean hasMethodSourceMappings(); + + Map getMethodSourceMapping(); + + boolean hasLineSourceMappings(); - Map getMapping(); + Map getLineSourceMapping(); } static Visitor createVisitor(ClassSourceLookup lookup) { @@ -177,25 +207,46 @@ public interface ClassSourceLookup { } @Override - public boolean hasMappings() { + public boolean hasClassSourceMappings() { + return false; + } + + @Override + public Map getClassSourceMapping() { + return Collections.emptyMap(); + } + + @Override + public boolean hasMethodSourceMappings() { + return false; + } + + @Override + public Map getMethodSourceMapping() { + return Collections.emptyMap(); + } + + @Override + public boolean hasLineSourceMappings() { return false; } @Override - public Map getMapping() { + public Map getLineSourceMapping() { return Collections.emptyMap(); } } /** - * Visitor which scans {@link StackTraceNode}s and accumulates class identities. + * Visitor which scans {@link StackTraceNode}s and accumulates class/method call identities. */ class VisitorImpl implements Visitor { private final ClassSourceLookup lookup; private final ClassFinder classFinder = new ClassFinder(); - // class name --> identifier (plugin name) - private final Map map = new HashMap<>(); + private final SourcesMap classSources = new SourcesMap<>(Function.identity()); + private final SourcesMap methodSources = new SourcesMap<>(MethodCall::toString); + private final SourcesMap lineSources = new SourcesMap<>(MethodCallByLine::toString); VisitorImpl(ClassSourceLookup lookup) { this.lookup = lookup; @@ -208,34 +259,194 @@ public interface ClassSourceLookup { } } + private void visitStackNode(StackTraceNode node) { + this.classSources.computeIfAbsent( + node.getClassName(), + className -> { + Class clazz = this.classFinder.findClass(className); + if (clazz == null) { + return null; + } + return this.lookup.identify(clazz); + }); + + if (node.getMethodDescription() != null) { + MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription()); + this.methodSources.computeIfAbsent(methodCall, this.lookup::identify); + } else { + MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); + this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); + } + + // recursively + for (StackTraceNode child : node.getChildren()) { + visitStackNode(child); + } + } + @Override - public boolean hasMappings() { - return !this.map.isEmpty(); + public boolean hasClassSourceMappings() { + return this.classSources.hasMappings(); } @Override - public Map getMapping() { - this.map.values().removeIf(Objects::isNull); - return this.map; + public Map getClassSourceMapping() { + return this.classSources.export(); } - private void visitStackNode(StackTraceNode node) { - String className = node.getClassName(); - if (!this.map.containsKey(className)) { + @Override + public boolean hasMethodSourceMappings() { + return this.methodSources.hasMappings(); + } + + @Override + public Map getMethodSourceMapping() { + return this.methodSources.export(); + } + + @Override + public boolean hasLineSourceMappings() { + return this.lineSources.hasMappings(); + } + + @Override + public Map getLineSourceMapping() { + return this.lineSources.export(); + } + } + + final class SourcesMap { + // --> identifier (plugin name) + private final Map map = new HashMap<>(); + private final Function keyToStringFunction; + + private SourcesMap(Function keyToStringFunction) { + this.keyToStringFunction = keyToStringFunction; + } + + public void computeIfAbsent(T key, ComputeSourceFunction function) { + if (!this.map.containsKey(key)) { try { - Class clazz = this.classFinder.findClass(className); - Objects.requireNonNull(clazz); - this.map.put(className, this.lookup.identify(clazz)); + this.map.put(key, function.compute(key)); } catch (Throwable e) { - this.map.put(className, null); + this.map.put(key, null); } } + } - // recursively - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); + public boolean hasMappings() { + this.map.values().removeIf(Objects::isNull); + return !this.map.isEmpty(); + } + + public Map export() { + this.map.values().removeIf(Objects::isNull); + if (this.keyToStringFunction.equals(Function.identity())) { + //noinspection unchecked + return (Map) this.map; + } else { + return this.map.entrySet().stream().collect(Collectors.toMap( + e -> this.keyToStringFunction.apply(e.getKey()), + Map.Entry::getValue + )); } } + + private interface ComputeSourceFunction { + String compute(T key) throws Exception; + } + } + + /** + * Encapsulates information about a given method call using the name + method description. + */ + final class MethodCall { + private final String className; + private final String methodName; + private final String methodDescriptor; + + public MethodCall(String className, String methodName, String methodDescriptor) { + this.className = className; + this.methodName = methodName; + this.methodDescriptor = methodDescriptor; + } + + public String getClassName() { + return this.className; + } + + public String getMethodName() { + return this.methodName; + } + + public String getMethodDescriptor() { + return this.methodDescriptor; + } + + @Override + public String toString() { + return this.className + ";" + this.methodName + ";" + this.methodDescriptor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof MethodCall)) return false; + MethodCall that = (MethodCall) o; + return this.className.equals(that.className) && + this.methodName.equals(that.methodName) && + this.methodDescriptor.equals(that.methodDescriptor); + } + + @Override + public int hashCode() { + return Objects.hash(this.className, this.methodName, this.methodDescriptor); + } + } + + /** + * Encapsulates information about a given method call using the name + line number. + */ + final class MethodCallByLine { + private final String className; + private final String methodName; + private final int lineNumber; + + public MethodCallByLine(String className, String methodName, int lineNumber) { + this.className = className; + this.methodName = methodName; + this.lineNumber = lineNumber; + } + + public String getClassName() { + return this.className; + } + + public String getMethodName() { + return this.methodName; + } + + public int getLineNumber() { + return this.lineNumber; + } + + @Override + public String toString() { + return this.className + ";" + this.lineNumber; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof MethodCallByLine)) return false; + MethodCallByLine that = (MethodCallByLine) o; + return this.lineNumber == that.lineNumber && this.className.equals(that.className); + } + + @Override + public int hashCode() { + return Objects.hash(this.className, this.lineNumber); + } } } diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index 8d9512a..f670ddf 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -11,6 +11,8 @@ message SamplerData { SamplerMetadata metadata = 1; repeated ThreadNode threads = 2; map class_sources = 3; // optional + map method_sources = 4; // optional + map line_sources = 5; // optional } message SamplerMetadata { diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle index 30b1ff6..fce859a 100644 --- a/spark-fabric/build.gradle +++ b/spark-fabric/build.gradle @@ -66,6 +66,10 @@ processResources { } } +license { + exclude '**/smap/SourceMap.java' +} + shadowJar { archiveFileName = "spark-fabric-${project.pluginVersion}-dev.jar" configurations = [project.configurations.shade] @@ -74,12 +78,16 @@ shadowJar { relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination' relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy' relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' - relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' +// relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' exclude 'module-info.class' exclude 'META-INF/maven/**' exclude 'META-INF/proguard/**' + + dependencies { + exclude(dependency('org.ow2.asm::')) + } } task remappedShadowJar(type: RemapJarTask) { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java index 7030680..9ffac18 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java @@ -22,18 +22,35 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; +import me.lucko.spark.common.util.ClassFinder; import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.fabric.smap.MixinUtils; +import me.lucko.spark.fabric.smap.SourceMap; +import me.lucko.spark.fabric.smap.SourceMapProvider; import net.fabricmc.loader.api.FabricLoader; import net.fabricmc.loader.api.ModContainer; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.objectweb.asm.Type; +import org.spongepowered.asm.mixin.FabricUtil; +import org.spongepowered.asm.mixin.extensibility.IMixinConfig; +import org.spongepowered.asm.mixin.transformer.Config; +import org.spongepowered.asm.mixin.transformer.meta.MixinMerged; + +import java.lang.reflect.Method; +import java.net.URI; import java.nio.file.Path; import java.util.Collection; import java.util.Map; public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource { + + private final ClassFinder classFinder = new ClassFinder(); + private final SourceMapProvider smapProvider = new SourceMapProvider(); + private final Path modsDirectory; - private final Map pathToModMap; + private final Map pathToModMap; public FabricClassSourceLookup() { FabricLoader loader = FabricLoader.getInstance(); @@ -43,7 +60,7 @@ public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource { @Override public String identifyFile(Path path) { - String id = this.pathToModMap.get(path); + String id = this.pathToModMap.get(path.toAbsolutePath().normalize().toString()); if (id != null) { return id; } @@ -55,11 +72,141 @@ public class FabricClassSourceLookup extends ClassSourceLookup.ByCodeSource { return super.identifyFileName(this.modsDirectory.relativize(path).toString()); } - private static Map constructPathToModIdMap(Collection mods) { - ImmutableMap.Builder builder = ImmutableMap.builder(); + @Override + public @Nullable String identify(MethodCall methodCall) throws Exception { + String className = methodCall.getClassName(); + String methodName = methodCall.getMethodName(); + String methodDesc = methodCall.getMethodDescriptor(); + + if (className.equals("native") || methodName.equals("") || methodName.equals("")) { + return null; + } + + Class clazz = this.classFinder.findClass(className); + if (clazz == null) { + return null; + } + + Class[] params = getParameterTypesForMethodDesc(methodDesc); + Method reflectMethod = clazz.getDeclaredMethod(methodName, params); + + MixinMerged mixinMarker = reflectMethod.getDeclaredAnnotation(MixinMerged.class); + if (mixinMarker == null) { + return null; + } + + return modIdFromMixinClass(mixinMarker.mixin()); + } + + @Override + public @Nullable String identify(MethodCallByLine methodCall) throws Exception { + String className = methodCall.getClassName(); + String methodName = methodCall.getMethodName(); + int lineNumber = methodCall.getLineNumber(); + + if (className.equals("native") || methodName.equals("") || methodName.equals("")) { + return null; + } + + SourceMap smap = this.smapProvider.getSourceMap(className); + if (smap == null) { + return null; + } + + int[] inputLineInfo = smap.getReverseLineMapping().get(lineNumber); + if (inputLineInfo == null || inputLineInfo.length == 0) { + return null; + } + + for (int fileInfoIds : inputLineInfo) { + SourceMap.FileInfo inputFileInfo = smap.getFileInfo().get(fileInfoIds); + if (inputFileInfo == null) { + continue; + } + + String path = inputFileInfo.path(); + if (path.endsWith(".java")) { + path = path.substring(0, path.length() - 5); + } + + String possibleMixinClassName = path.replace('/', '.'); + if (possibleMixinClassName.equals(className)) { + continue; + } + + return modIdFromMixinClass(possibleMixinClassName); + } + + return null; + } + + private static String modIdFromMixinClass(String mixinClassName) { + for (Config config : MixinUtils.getMixinConfigs().values()) { + IMixinConfig mixinConfig = config.getConfig(); + if (mixinClassName.startsWith(mixinConfig.getMixinPackage())) { + return mixinConfig.getDecoration(FabricUtil.KEY_MOD_ID); + } + } + return null; + } + + private Class[] getParameterTypesForMethodDesc(String methodDesc) { + Type methodType = Type.getMethodType(methodDesc); + Class[] params = new Class[methodType.getArgumentTypes().length]; + Type[] argumentTypes = methodType.getArgumentTypes(); + + for (int i = 0, argumentTypesLength = argumentTypes.length; i < argumentTypesLength; i++) { + Type argumentType = argumentTypes[i]; + params[i] = getClassFromType(argumentType); + } + + return params; + } + + private Class getClassFromType(Type type) { + return switch (type.getSort()) { + case Type.VOID -> void.class; + case Type.BOOLEAN -> boolean.class; + case Type.CHAR -> char.class; + case Type.BYTE -> byte.class; + case Type.SHORT -> short.class; + case Type.INT -> int.class; + case Type.FLOAT -> float.class; + case Type.LONG -> long.class; + case Type.DOUBLE -> double.class; + case Type.ARRAY -> { + final Class classFromType = getClassFromType(type.getElementType()); + Class result = classFromType; + if (classFromType != null) { + for (int i = 0; i < type.getDimensions(); i++) { + result = result.arrayType(); + } + } + yield result; + } + case Type.OBJECT -> this.classFinder.findClass(type.getClassName()); + default -> null; + }; + } + + private static Map constructPathToModIdMap(Collection mods) { + ImmutableMap.Builder builder = ImmutableMap.builder(); for (ModContainer mod : mods) { - Path path = mod.getRootPath().toAbsolutePath().normalize(); - builder.put(path, mod.getMetadata().getId()); + String modId = mod.getMetadata().getId(); + if (modId.equals("java")) { + continue; + } + + for (Path path : mod.getRootPaths()) { + URI uri = path.toUri(); + if (uri.getScheme().equals("jar") && path.toString().equals("/")) { // ZipFileSystem + String zipFilePath = path.getFileSystem().toString(); + builder.put(zipFilePath, modId); + } else { + builder.put(path.toAbsolutePath().normalize().toString(), modId); + } + + } } return builder.build(); } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java new file mode 100644 index 0000000..cfc8c95 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkMixinPlugin.java @@ -0,0 +1,71 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.plugin; + +import me.lucko.spark.fabric.smap.SourceDebugCache; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.objectweb.asm.tree.ClassNode; +import org.spongepowered.asm.mixin.MixinEnvironment; +import org.spongepowered.asm.mixin.extensibility.IMixinConfigPlugin; +import org.spongepowered.asm.mixin.extensibility.IMixinInfo; +import org.spongepowered.asm.mixin.transformer.IMixinTransformer; +import org.spongepowered.asm.mixin.transformer.ext.Extensions; +import org.spongepowered.asm.mixin.transformer.ext.IExtension; +import org.spongepowered.asm.mixin.transformer.ext.ITargetClassContext; + +import java.util.List; +import java.util.Set; + +public class FabricSparkMixinPlugin implements IMixinConfigPlugin, IExtension { + + private static final Logger LOGGER = LogManager.getLogger("spark"); + + @Override + public void onLoad(String mixinPackage) { + Object activeTransformer = MixinEnvironment.getCurrentEnvironment().getActiveTransformer(); + if (activeTransformer instanceof IMixinTransformer transformer && transformer.getExtensions() instanceof Extensions extensions) { + extensions.add(this); + } else { + LOGGER.error( + "Failed to initialize SMAP parser for spark profiler. " + + "Mod information for mixin injected methods is now only available with the async-profiler engine." + ); + } + } + + @Override + public void export(MixinEnvironment env, String name, boolean force, ClassNode classNode) { + SourceDebugCache.put(name, classNode); + } + + // noop + @Override public String getRefMapperConfig() { return null; } + @Override public boolean shouldApplyMixin(String targetClassName, String mixinClassName) { return true; } + @Override public void acceptTargets(Set myTargets, Set otherTargets) { } + @Override public List getMixins() { return null; } + @Override public void preApply(String targetClassName, ClassNode targetClass, String mixinClassName, IMixinInfo mixinInfo) { } + @Override public void postApply(String targetClassName, ClassNode targetClass, String mixinClassName, IMixinInfo mixinInfo) { } + @Override public boolean checkActive(MixinEnvironment environment) { return true; } + @Override public void preApply(ITargetClassContext context) { } + @Override public void postApply(ITargetClassContext context) { } + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java new file mode 100644 index 0000000..ebf2766 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/MixinUtils.java @@ -0,0 +1,52 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.smap; + +import org.spongepowered.asm.mixin.transformer.Config; + +import java.lang.reflect.Field; +import java.util.HashMap; +import java.util.Map; + +public enum MixinUtils { + ; + + private static final Map MIXIN_CONFIGS; + + static { + Map configs; + try { + Field allConfigsField = Config.class.getDeclaredField("allConfigs"); + allConfigsField.setAccessible(true); + + //noinspection unchecked + configs = (Map) allConfigsField.get(null); + } catch (Exception e) { + e.printStackTrace(); + configs = new HashMap<>(); + } + MIXIN_CONFIGS = configs; + } + + public static Map getMixinConfigs() { + return MIXIN_CONFIGS; + } +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java new file mode 100644 index 0000000..88adae6 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceDebugCache.java @@ -0,0 +1,87 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.smap; + +import org.objectweb.asm.tree.ClassNode; +import org.spongepowered.asm.service.IClassBytecodeProvider; +import org.spongepowered.asm.service.MixinService; + +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Caches the lookup of class -> source debug info for classes loaded on the JVM. + * + * The {@link me.lucko.spark.fabric.plugin.FabricSparkMixinPlugin} also supplements this cache with + * extra information as classes are exported. + */ +public enum SourceDebugCache { + ; + + // class name -> smap + private static final Map CACHE = new ConcurrentHashMap<>(); + + public static void put(String className, ClassNode node) { + if (className == null || node == null) { + return; + } + className = className.replace('/', '.'); + CACHE.put(className, SmapValue.of(node.sourceDebug)); + } + + public static String getSourceDebugInfo(String className) { + SmapValue cached = CACHE.get(className); + if (cached != null) { + return cached.value(); + } + + try { + IClassBytecodeProvider provider = MixinService.getService().getBytecodeProvider(); + ClassNode classNode = provider.getClassNode(className.replace('.', '/')); + + if (classNode != null) { + put(className, classNode); + return classNode.sourceDebug; + } + + } catch (Exception e) { + // ignore + } + + CACHE.put(className, SmapValue.NULL); + return null; + } + + private record SmapValue(String value) { + static final SmapValue NULL = new SmapValue(null); + + static SmapValue of(String value) { + if (value == null) { + return NULL; + } else { + return new SmapValue(value); + } + } + + } + +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java new file mode 100644 index 0000000..5105a26 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMap.java @@ -0,0 +1,133 @@ +/* + * SMAPSourceDebugExtension.java - Parse source debug extensions and + * enhance stack traces. + * + * Copyright (c) 2012 Michael Schierl + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * - Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND THE CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * HOLDERS OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package me.lucko.spark.fabric.smap; + +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Utility class to parse "SMAP" (source map) information from loaded Java classes. + * + * @author Michael Schierl + */ +public class SourceMap { + + private final String generatedFileName; + private final String firstStratum; + private final Map fileinfo = new HashMap<>(); + private final Map reverseLineMapping = new HashMap<>(); + + private static final Pattern LINE_INFO_PATTERN = Pattern.compile("([0-9]+)(?:#([0-9]+))?(?:,([0-9]+))?:([0-9]+)(?:,([0-9]+))?"); + + public SourceMap(String value) { + String[] lines = value.split("\n"); + if (!lines[0].equals("SMAP") || !lines[3].startsWith("*S ") || !lines[4].equals("*F")) { + throw new IllegalArgumentException(value); + } + + this.generatedFileName = lines[1]; + this.firstStratum = lines[3].substring(3); + + int idx = 5; + while (!lines[idx].startsWith("*")) { + String infoline = lines[idx++]; + String path = null; + + if (infoline.startsWith("+ ")) { + path = lines[idx++]; + infoline = infoline.substring(2); + } + + int pos = infoline.indexOf(" "); + int filenum = Integer.parseInt(infoline.substring(0, pos)); + String name = infoline.substring(pos + 1); + + this.fileinfo.put(filenum, new FileInfo(name, path == null ? name : path)); + } + + if (lines[idx].equals("*L")) { + idx++; + int lastLFI = 0; + + while (!lines[idx].startsWith("*")) { + Matcher m = LINE_INFO_PATTERN.matcher(lines[idx++]); + if (!m.matches()) { + throw new IllegalArgumentException(lines[idx - 1]); + } + + int inputStartLine = Integer.parseInt(m.group(1)); + int lineFileID = m.group(2) == null ? lastLFI : Integer.parseInt(m.group(2)); + int repeatCount = m.group(3) == null ? 1 : Integer.parseInt(m.group(3)); + int outputStartLine = Integer.parseInt(m.group(4)); + int outputLineIncrement = m.group(5) == null ? 1 : Integer.parseInt(m.group(5)); + + for (int i = 0; i < repeatCount; i++) { + int[] inputMapping = new int[] { lineFileID, inputStartLine + i }; + int baseOL = outputStartLine + i * outputLineIncrement; + + for (int ol = baseOL; ol < baseOL + outputLineIncrement; ol++) { + if (!this.reverseLineMapping.containsKey(ol)) { + this.reverseLineMapping.put(ol, inputMapping); + } + } + } + + lastLFI = lineFileID; + } + } + } + + public String getGeneratedFileName() { + return this.generatedFileName; + } + + public String getFirstStratum() { + return this.firstStratum; + } + + public Map getFileInfo() { + return this.fileinfo; + } + + public Map getReverseLineMapping() { + return this.reverseLineMapping; + } + + public record FileInfo(String name, String path) { } +} \ No newline at end of file diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java new file mode 100644 index 0000000..1a4f246 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/smap/SourceMapProvider.java @@ -0,0 +1,53 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric.smap; + +import org.jetbrains.annotations.Nullable; + +import java.util.HashMap; +import java.util.Map; + +public class SourceMapProvider { + private final Map cache = new HashMap<>(); + + public @Nullable SourceMap getSourceMap(String className) { + if (this.cache.containsKey(className)) { + return this.cache.get(className); + } + + SourceMap smap = null; + try { + String value = SourceDebugCache.getSourceDebugInfo(className); + if (value != null) { + value = value.replaceAll("\r\n?", "\n"); + if (value.startsWith("SMAP\n")) { + smap = new SourceMap(value); + } + } + } catch (Exception e) { + // ignore + } + + this.cache.put(className, smap); + return smap; + } + +} diff --git a/spark-fabric/src/main/resources/spark.mixins.json b/spark-fabric/src/main/resources/spark.mixins.json index e75b34f..63c1078 100644 --- a/spark-fabric/src/main/resources/spark.mixins.json +++ b/spark-fabric/src/main/resources/spark.mixins.json @@ -11,5 +11,6 @@ "server": [ "ServerEntityManagerAccessor", "ServerWorldAccessor" - ] + ], + "plugin": "me.lucko.spark.fabric.plugin.FabricSparkMixinPlugin" } \ No newline at end of file -- cgit From 7079484d428321c9b3db09394577efda4d591a4e Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 19 Sep 2022 18:57:02 +0100 Subject: Provide extra metadata about sources in sampler data --- .../spark/bukkit/BukkitClassSourceLookup.java | 2 +- .../me/lucko/spark/bukkit/BukkitSparkPlugin.java | 16 +- .../bungeecord/BungeeCordClassSourceLookup.java | 2 +- .../spark/bungeecord/BungeeCordSparkPlugin.java | 14 +- .../java/me/lucko/spark/common/SparkPlatform.java | 2 +- .../java/me/lucko/spark/common/SparkPlugin.java | 14 +- .../common/command/modules/SamplerModule.java | 3 +- .../spark/common/sampler/AbstractSampler.java | 10 +- .../me/lucko/spark/common/sampler/Sampler.java | 2 +- .../spark/common/sampler/async/AsyncSampler.java | 2 +- .../spark/common/sampler/java/JavaSampler.java | 2 +- .../common/sampler/source/ClassSourceLookup.java | 463 +++++++++++++++++++++ .../common/sampler/source/SourceMetadata.java | 81 ++++ .../lucko/spark/common/util/ClassSourceLookup.java | 452 -------------------- .../src/main/proto/spark/spark_sampler.proto | 6 + .../spark/fabric/FabricClassSourceLookup.java | 2 +- .../spark/fabric/plugin/FabricSparkPlugin.java | 19 +- .../lucko/spark/forge/ForgeClassSourceLookup.java | 2 +- .../lucko/spark/forge/plugin/ForgeSparkPlugin.java | 16 +- .../spark/minestom/MinestomClassSourceLookup.java | 2 +- .../lucko/spark/minestom/MinestomSparkPlugin.java | 14 +- .../spark/nukkit/NukkitClassSourceLookup.java | 2 +- .../me/lucko/spark/nukkit/NukkitSparkPlugin.java | 2 +- .../spark/sponge/Sponge7ClassSourceLookup.java | 2 +- .../me/lucko/spark/sponge/Sponge7SparkPlugin.java | 2 +- .../spark/sponge/Sponge8ClassSourceLookup.java | 2 +- .../me/lucko/spark/sponge/Sponge8SparkPlugin.java | 17 +- .../spark/velocity/VelocityClassSourceLookup.java | 2 +- .../lucko/spark/velocity/VelocitySparkPlugin.java | 14 +- .../spark/velocity/Velocity4ClassSourceLookup.java | 4 +- .../lucko/spark/velocity/Velocity4SparkPlugin.java | 14 +- .../spark/waterdog/WaterdogClassSourceLookup.java | 2 +- .../lucko/spark/waterdog/WaterdogSparkPlugin.java | 14 +- 33 files changed, 721 insertions(+), 482 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java index 6d8afda..f9c0c0b 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.bukkit; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import org.bukkit.plugin.java.JavaPlugin; diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 5737d3d..87490ea 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -30,9 +30,10 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.common.util.ClassSourceLookup; import net.kyori.adventure.platform.bukkit.BukkitAudiences; @@ -40,10 +41,13 @@ import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; +import org.bukkit.plugin.Plugin; import org.bukkit.plugin.ServicePriority; import org.bukkit.plugin.java.JavaPlugin; import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.stream.Stream; @@ -180,6 +184,16 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { return new BukkitClassSourceLookup(); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + Arrays.asList(getServer().getPluginManager().getPlugins()), + Plugin::getName, + plugin -> plugin.getDescription().getVersion(), + plugin -> String.join(", ", plugin.getDescription().getAuthors()) + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { if (BukkitPlayerPingProvider.isSupported()) { diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java index e601f87..2024d54 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.bungeecord; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import net.md_5.bungee.api.plugin.PluginDescription; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java index e259adc..71beddb 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java @@ -24,7 +24,8 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import net.kyori.adventure.platform.bungeecord.BungeeAudiences; import net.md_5.bungee.api.CommandSender; @@ -33,6 +34,7 @@ import net.md_5.bungee.api.plugin.Plugin; import net.md_5.bungee.api.plugin.TabExecutor; import java.nio.file.Path; +import java.util.Collection; import java.util.logging.Level; import java.util.stream.Stream; @@ -91,6 +93,16 @@ public class BungeeCordSparkPlugin extends Plugin implements SparkPlugin { return new BungeeCordClassSourceLookup(); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + getProxy().getPluginManager().getPlugins(), + plugin -> plugin.getDescription().getName(), + plugin -> plugin.getDescription().getVersion(), + plugin -> plugin.getDescription().getAuthor() + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { return new BungeeCordPlayerPingProvider(getProxy()); diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index f92abf3..1969206 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -45,10 +45,10 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.PlatformStatisticsProvider; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; -import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.common.util.Configuration; import me.lucko.spark.common.util.TemporaryFiles; diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index 1116b04..e2a2dbd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -27,11 +27,14 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.common.util.ClassSourceLookup; import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; import java.util.logging.Level; import java.util.stream.Stream; @@ -132,6 +135,15 @@ public interface SparkPlugin { return ClassSourceLookup.NO_OP; } + /** + * Gets a list of known sources (plugins/mods) on the platform. + * + * @return a list of sources + */ + default Collection getKnownSources() { + return Collections.emptyList(); + } + /** * Creates a player ping provider function. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 0a80c31..2afed64 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -38,6 +38,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.ThreadNodeOrder; import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; @@ -303,7 +304,7 @@ public class SamplerModule implements CommandModule { } private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup()); + SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, ClassSourceLookup.create(platform)); boolean saveToFile = false; if (saveToFileFlag) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 3cfef0b..7b57504 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -27,13 +27,16 @@ import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import java.util.Collection; import java.util.Comparator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -150,6 +153,11 @@ public abstract class AbstractSampler implements Sampler { e.printStackTrace(); } + Collection knownSources = platform.getPlugin().getKnownSources(); + for (SourceMetadata source : knownSources) { + metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + proto.setMetadata(metadata); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 84f2da1..98281de 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -24,7 +24,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import java.util.Comparator; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index dae3852..37ccd96 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -30,7 +30,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.util.TemporaryFiles; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 913faee..0f73a9f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -29,8 +29,8 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import java.lang.management.ManagementFactory; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java new file mode 100644 index 0000000..66b41d2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java @@ -0,0 +1,463 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.source; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.node.StackTraceNode; +import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.util.ClassFinder; + +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.CodeSource; +import java.security.ProtectionDomain; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * A function which defines the source of given {@link Class}es or (Mixin) method calls. + */ +public interface ClassSourceLookup { + + /** + * Identify the given class. + * + * @param clazz the class + * @return the source of the class + */ + @Nullable String identify(Class clazz) throws Exception; + + /** + * Identify the given method call. + * + * @param methodCall the method call info + * @return the source of the method call + */ + default @Nullable String identify(MethodCall methodCall) throws Exception { + return null; + } + + /** + * Identify the given method call. + * + * @param methodCall the method call info + * @return the source of the method call + */ + default @Nullable String identify(MethodCallByLine methodCall) throws Exception { + return null; + } + + /** + * A no-operation {@link ClassSourceLookup}. + */ + ClassSourceLookup NO_OP = new ClassSourceLookup() { + @Override + public @Nullable String identify(Class clazz) { + return null; + } + }; + + static ClassSourceLookup create(SparkPlatform platform) { + try { + return platform.createClassSourceLookup(); + } catch (Exception e) { + e.printStackTrace(); + return NO_OP; + } + } + + /** + * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}. + */ + abstract class ByClassLoader implements ClassSourceLookup { + + public abstract @Nullable String identify(ClassLoader loader) throws Exception; + + @Override + public final @Nullable String identify(Class clazz) throws Exception { + ClassLoader loader = clazz.getClassLoader(); + while (loader != null) { + String source = identify(loader); + if (source != null) { + return source; + } + loader = loader.getParent(); + } + return null; + } + } + + /** + * A {@link ClassSourceLookup} which identifies classes based on URL. + */ + interface ByUrl extends ClassSourceLookup { + + default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException { + Path path = null; + + String protocol = url.getProtocol(); + if (protocol.equals("file")) { + path = Paths.get(url.toURI()); + } else if (protocol.equals("jar")) { + URL innerUrl = new URL(url.getPath()); + path = Paths.get(innerUrl.getPath().split("!")[0]); + } + + if (path != null) { + return identifyFile(path.toAbsolutePath().normalize()); + } + + return null; + } + + default String identifyFile(Path path) { + return identifyFileName(path.getFileName().toString()); + } + + default String identifyFileName(String fileName) { + return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null; + } + } + + /** + * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}. + */ + class ByFirstUrlSource extends ClassSourceLookup.ByClassLoader implements ClassSourceLookup.ByUrl { + @Override + public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException { + if (loader instanceof URLClassLoader) { + URLClassLoader urlClassLoader = (URLClassLoader) loader; + URL[] urls = urlClassLoader.getURLs(); + if (urls.length == 0) { + return null; + } + return identifyUrl(urls[0]); + } + return null; + } + } + + /** + * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}. + */ + class ByCodeSource implements ClassSourceLookup, ClassSourceLookup.ByUrl { + @Override + public @Nullable String identify(Class clazz) throws URISyntaxException, MalformedURLException { + ProtectionDomain protectionDomain = clazz.getProtectionDomain(); + if (protectionDomain == null) { + return null; + } + CodeSource codeSource = protectionDomain.getCodeSource(); + if (codeSource == null) { + return null; + } + + URL url = codeSource.getLocation(); + return url == null ? null : identifyUrl(url); + } + } + + interface Visitor { + void visit(ThreadNode node); + + boolean hasClassSourceMappings(); + + Map getClassSourceMapping(); + + boolean hasMethodSourceMappings(); + + Map getMethodSourceMapping(); + + boolean hasLineSourceMappings(); + + Map getLineSourceMapping(); + } + + static Visitor createVisitor(ClassSourceLookup lookup) { + if (lookup == ClassSourceLookup.NO_OP) { + return NoOpVisitor.INSTANCE; // don't bother! + } + return new VisitorImpl(lookup); + } + + enum NoOpVisitor implements Visitor { + INSTANCE; + + @Override + public void visit(ThreadNode node) { + + } + + @Override + public boolean hasClassSourceMappings() { + return false; + } + + @Override + public Map getClassSourceMapping() { + return Collections.emptyMap(); + } + + @Override + public boolean hasMethodSourceMappings() { + return false; + } + + @Override + public Map getMethodSourceMapping() { + return Collections.emptyMap(); + } + + @Override + public boolean hasLineSourceMappings() { + return false; + } + + @Override + public Map getLineSourceMapping() { + return Collections.emptyMap(); + } + } + + /** + * Visitor which scans {@link StackTraceNode}s and accumulates class/method call identities. + */ + class VisitorImpl implements Visitor { + private final ClassSourceLookup lookup; + private final ClassFinder classFinder = new ClassFinder(); + + private final SourcesMap classSources = new SourcesMap<>(Function.identity()); + private final SourcesMap methodSources = new SourcesMap<>(MethodCall::toString); + private final SourcesMap lineSources = new SourcesMap<>(MethodCallByLine::toString); + + VisitorImpl(ClassSourceLookup lookup) { + this.lookup = lookup; + } + + @Override + public void visit(ThreadNode node) { + for (StackTraceNode child : node.getChildren()) { + visitStackNode(child); + } + } + + private void visitStackNode(StackTraceNode node) { + this.classSources.computeIfAbsent( + node.getClassName(), + className -> { + Class clazz = this.classFinder.findClass(className); + if (clazz == null) { + return null; + } + return this.lookup.identify(clazz); + }); + + if (node.getMethodDescription() != null) { + MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription()); + this.methodSources.computeIfAbsent(methodCall, this.lookup::identify); + } else { + MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); + this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); + } + + // recursively + for (StackTraceNode child : node.getChildren()) { + visitStackNode(child); + } + } + + @Override + public boolean hasClassSourceMappings() { + return this.classSources.hasMappings(); + } + + @Override + public Map getClassSourceMapping() { + return this.classSources.export(); + } + + @Override + public boolean hasMethodSourceMappings() { + return this.methodSources.hasMappings(); + } + + @Override + public Map getMethodSourceMapping() { + return this.methodSources.export(); + } + + @Override + public boolean hasLineSourceMappings() { + return this.lineSources.hasMappings(); + } + + @Override + public Map getLineSourceMapping() { + return this.lineSources.export(); + } + } + + final class SourcesMap { + // --> identifier (plugin name) + private final Map map = new HashMap<>(); + private final Function keyToStringFunction; + + private SourcesMap(Function keyToStringFunction) { + this.keyToStringFunction = keyToStringFunction; + } + + public void computeIfAbsent(T key, ComputeSourceFunction function) { + if (!this.map.containsKey(key)) { + try { + this.map.put(key, function.compute(key)); + } catch (Throwable e) { + this.map.put(key, null); + } + } + } + + public boolean hasMappings() { + this.map.values().removeIf(Objects::isNull); + return !this.map.isEmpty(); + } + + public Map export() { + this.map.values().removeIf(Objects::isNull); + if (this.keyToStringFunction.equals(Function.identity())) { + //noinspection unchecked + return (Map) this.map; + } else { + return this.map.entrySet().stream().collect(Collectors.toMap( + e -> this.keyToStringFunction.apply(e.getKey()), + Map.Entry::getValue + )); + } + } + + private interface ComputeSourceFunction { + String compute(T key) throws Exception; + } + } + + /** + * Encapsulates information about a given method call using the name + method description. + */ + final class MethodCall { + private final String className; + private final String methodName; + private final String methodDescriptor; + + public MethodCall(String className, String methodName, String methodDescriptor) { + this.className = className; + this.methodName = methodName; + this.methodDescriptor = methodDescriptor; + } + + public String getClassName() { + return this.className; + } + + public String getMethodName() { + return this.methodName; + } + + public String getMethodDescriptor() { + return this.methodDescriptor; + } + + @Override + public String toString() { + return this.className + ";" + this.methodName + ";" + this.methodDescriptor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof MethodCall)) return false; + MethodCall that = (MethodCall) o; + return this.className.equals(that.className) && + this.methodName.equals(that.methodName) && + this.methodDescriptor.equals(that.methodDescriptor); + } + + @Override + public int hashCode() { + return Objects.hash(this.className, this.methodName, this.methodDescriptor); + } + } + + /** + * Encapsulates information about a given method call using the name + line number. + */ + final class MethodCallByLine { + private final String className; + private final String methodName; + private final int lineNumber; + + public MethodCallByLine(String className, String methodName, int lineNumber) { + this.className = className; + this.methodName = methodName; + this.lineNumber = lineNumber; + } + + public String getClassName() { + return this.className; + } + + public String getMethodName() { + return this.methodName; + } + + public int getLineNumber() { + return this.lineNumber; + } + + @Override + public String toString() { + return this.className + ";" + this.lineNumber; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof MethodCallByLine)) return false; + MethodCallByLine that = (MethodCallByLine) o; + return this.lineNumber == that.lineNumber && this.className.equals(that.className); + } + + @Override + public int hashCode() { + return Objects.hash(this.className, this.lineNumber); + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java new file mode 100644 index 0000000..0808d66 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java @@ -0,0 +1,81 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.source; + +import com.google.common.collect.ImmutableList; + +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +import java.util.Collection; +import java.util.List; +import java.util.function.Function; + +/** + * A "source" is a plugin or mod on the platform that may be identified + * as a source of a method call in a profile. + */ +public class SourceMetadata { + + public static List gather(Collection sources, Function nameFunction, Function versionFunction, Function authorFunction) { + ImmutableList.Builder builder = ImmutableList.builder(); + + for (T source : sources) { + String name = nameFunction.apply(source); + String version = versionFunction.apply(source); + String author = authorFunction.apply(source); + + SourceMetadata metadata = new SourceMetadata(name, version, author); + builder.add(metadata); + } + + return builder.build(); + } + + private final String name; + private final String version; + private final String author; + + public SourceMetadata(String name, String version, String author) { + this.name = name; + this.version = version; + this.author = author; + } + + public String getName() { + return this.name; + } + + public String getVersion() { + return this.version; + } + + public String getAuthor() { + return this.author; + } + + public SamplerMetadata.SourceMetadata toProto() { + return SamplerMetadata.SourceMetadata.newBuilder() + .setName(this.name) + .setVersion(this.version) + .build(); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java deleted file mode 100644 index 668f31a..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java +++ /dev/null @@ -1,452 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.util; - -import me.lucko.spark.common.sampler.node.StackTraceNode; -import me.lucko.spark.common.sampler.node.ThreadNode; - -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.security.CodeSource; -import java.security.ProtectionDomain; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * A function which defines the source of given {@link Class}es or (Mixin) method calls. - */ -public interface ClassSourceLookup { - - /** - * Identify the given class. - * - * @param clazz the class - * @return the source of the class - */ - @Nullable String identify(Class clazz) throws Exception; - - /** - * Identify the given method call. - * - * @param methodCall the method call info - * @return the source of the method call - */ - default @Nullable String identify(MethodCall methodCall) throws Exception { - return null; - } - - /** - * Identify the given method call. - * - * @param methodCall the method call info - * @return the source of the method call - */ - default @Nullable String identify(MethodCallByLine methodCall) throws Exception { - return null; - } - - /** - * A no-operation {@link ClassSourceLookup}. - */ - ClassSourceLookup NO_OP = new ClassSourceLookup() { - @Override - public @Nullable String identify(Class clazz) { - return null; - } - }; - - /** - * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}. - */ - abstract class ByClassLoader implements ClassSourceLookup { - - public abstract @Nullable String identify(ClassLoader loader) throws Exception; - - @Override - public final @Nullable String identify(Class clazz) throws Exception { - ClassLoader loader = clazz.getClassLoader(); - while (loader != null) { - String source = identify(loader); - if (source != null) { - return source; - } - loader = loader.getParent(); - } - return null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on URL. - */ - interface ByUrl extends ClassSourceLookup { - - default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException { - Path path = null; - - String protocol = url.getProtocol(); - if (protocol.equals("file")) { - path = Paths.get(url.toURI()); - } else if (protocol.equals("jar")) { - URL innerUrl = new URL(url.getPath()); - path = Paths.get(innerUrl.getPath().split("!")[0]); - } - - if (path != null) { - return identifyFile(path.toAbsolutePath().normalize()); - } - - return null; - } - - default String identifyFile(Path path) { - return identifyFileName(path.getFileName().toString()); - } - - default String identifyFileName(String fileName) { - return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}. - */ - class ByFirstUrlSource extends ByClassLoader implements ByUrl { - @Override - public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException { - if (loader instanceof URLClassLoader) { - URLClassLoader urlClassLoader = (URLClassLoader) loader; - URL[] urls = urlClassLoader.getURLs(); - if (urls.length == 0) { - return null; - } - return identifyUrl(urls[0]); - } - return null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}. - */ - class ByCodeSource implements ClassSourceLookup, ByUrl { - @Override - public @Nullable String identify(Class clazz) throws URISyntaxException, MalformedURLException { - ProtectionDomain protectionDomain = clazz.getProtectionDomain(); - if (protectionDomain == null) { - return null; - } - CodeSource codeSource = protectionDomain.getCodeSource(); - if (codeSource == null) { - return null; - } - - URL url = codeSource.getLocation(); - return url == null ? null : identifyUrl(url); - } - } - - interface Visitor { - void visit(ThreadNode node); - - boolean hasClassSourceMappings(); - - Map getClassSourceMapping(); - - boolean hasMethodSourceMappings(); - - Map getMethodSourceMapping(); - - boolean hasLineSourceMappings(); - - Map getLineSourceMapping(); - } - - static Visitor createVisitor(ClassSourceLookup lookup) { - if (lookup == ClassSourceLookup.NO_OP) { - return NoOpVisitor.INSTANCE; // don't bother! - } - return new VisitorImpl(lookup); - } - - enum NoOpVisitor implements Visitor { - INSTANCE; - - @Override - public void visit(ThreadNode node) { - - } - - @Override - public boolean hasClassSourceMappings() { - return false; - } - - @Override - public Map getClassSourceMapping() { - return Collections.emptyMap(); - } - - @Override - public boolean hasMethodSourceMappings() { - return false; - } - - @Override - public Map getMethodSourceMapping() { - return Collections.emptyMap(); - } - - @Override - public boolean hasLineSourceMappings() { - return false; - } - - @Override - public Map getLineSourceMapping() { - return Collections.emptyMap(); - } - } - - /** - * Visitor which scans {@link StackTraceNode}s and accumulates class/method call identities. - */ - class VisitorImpl implements Visitor { - private final ClassSourceLookup lookup; - private final ClassFinder classFinder = new ClassFinder(); - - private final SourcesMap classSources = new SourcesMap<>(Function.identity()); - private final SourcesMap methodSources = new SourcesMap<>(MethodCall::toString); - private final SourcesMap lineSources = new SourcesMap<>(MethodCallByLine::toString); - - VisitorImpl(ClassSourceLookup lookup) { - this.lookup = lookup; - } - - @Override - public void visit(ThreadNode node) { - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } - } - - private void visitStackNode(StackTraceNode node) { - this.classSources.computeIfAbsent( - node.getClassName(), - className -> { - Class clazz = this.classFinder.findClass(className); - if (clazz == null) { - return null; - } - return this.lookup.identify(clazz); - }); - - if (node.getMethodDescription() != null) { - MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription()); - this.methodSources.computeIfAbsent(methodCall, this.lookup::identify); - } else { - MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); - this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); - } - - // recursively - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } - } - - @Override - public boolean hasClassSourceMappings() { - return this.classSources.hasMappings(); - } - - @Override - public Map getClassSourceMapping() { - return this.classSources.export(); - } - - @Override - public boolean hasMethodSourceMappings() { - return this.methodSources.hasMappings(); - } - - @Override - public Map getMethodSourceMapping() { - return this.methodSources.export(); - } - - @Override - public boolean hasLineSourceMappings() { - return this.lineSources.hasMappings(); - } - - @Override - public Map getLineSourceMapping() { - return this.lineSources.export(); - } - } - - final class SourcesMap { - // --> identifier (plugin name) - private final Map map = new HashMap<>(); - private final Function keyToStringFunction; - - private SourcesMap(Function keyToStringFunction) { - this.keyToStringFunction = keyToStringFunction; - } - - public void computeIfAbsent(T key, ComputeSourceFunction function) { - if (!this.map.containsKey(key)) { - try { - this.map.put(key, function.compute(key)); - } catch (Throwable e) { - this.map.put(key, null); - } - } - } - - public boolean hasMappings() { - this.map.values().removeIf(Objects::isNull); - return !this.map.isEmpty(); - } - - public Map export() { - this.map.values().removeIf(Objects::isNull); - if (this.keyToStringFunction.equals(Function.identity())) { - //noinspection unchecked - return (Map) this.map; - } else { - return this.map.entrySet().stream().collect(Collectors.toMap( - e -> this.keyToStringFunction.apply(e.getKey()), - Map.Entry::getValue - )); - } - } - - private interface ComputeSourceFunction { - String compute(T key) throws Exception; - } - } - - /** - * Encapsulates information about a given method call using the name + method description. - */ - final class MethodCall { - private final String className; - private final String methodName; - private final String methodDescriptor; - - public MethodCall(String className, String methodName, String methodDescriptor) { - this.className = className; - this.methodName = methodName; - this.methodDescriptor = methodDescriptor; - } - - public String getClassName() { - return this.className; - } - - public String getMethodName() { - return this.methodName; - } - - public String getMethodDescriptor() { - return this.methodDescriptor; - } - - @Override - public String toString() { - return this.className + ";" + this.methodName + ";" + this.methodDescriptor; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof MethodCall)) return false; - MethodCall that = (MethodCall) o; - return this.className.equals(that.className) && - this.methodName.equals(that.methodName) && - this.methodDescriptor.equals(that.methodDescriptor); - } - - @Override - public int hashCode() { - return Objects.hash(this.className, this.methodName, this.methodDescriptor); - } - } - - /** - * Encapsulates information about a given method call using the name + line number. - */ - final class MethodCallByLine { - private final String className; - private final String methodName; - private final int lineNumber; - - public MethodCallByLine(String className, String methodName, int lineNumber) { - this.className = className; - this.methodName = methodName; - this.lineNumber = lineNumber; - } - - public String getClassName() { - return this.className; - } - - public String getMethodName() { - return this.methodName; - } - - public int getLineNumber() { - return this.lineNumber; - } - - @Override - public String toString() { - return this.className + ";" + this.lineNumber; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof MethodCallByLine)) return false; - MethodCallByLine that = (MethodCallByLine) o; - return this.lineNumber == that.lineNumber && this.className.equals(that.className); - } - - @Override - public int hashCode() { - return Objects.hash(this.className, this.lineNumber); - } - } - -} diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index f670ddf..e4c2481 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -28,6 +28,7 @@ message SamplerMetadata { map server_configurations = 10; int64 end_time = 11; int32 number_of_ticks = 12; + map sources = 13; message ThreadDumper { Type type = 1; @@ -58,6 +59,11 @@ message SamplerMetadata { AS_ONE = 2; } } + + message SourceMetadata { + string name = 1; + string version = 2; + } } message ThreadNode { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java index 9ffac18..51834fc 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricClassSourceLookup.java @@ -22,8 +22,8 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.util.ClassFinder; -import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.fabric.smap.MixinUtils; import me.lucko.spark.fabric.smap.SourceMap; import me.lucko.spark.fabric.smap.SourceMapProvider; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java index 3126f28..9a03b4e 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java @@ -34,11 +34,14 @@ import com.mojang.brigadier.tree.LiteralCommandNode; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.fabric.FabricClassSourceLookup; import me.lucko.spark.fabric.FabricSparkMod; +import net.fabricmc.loader.api.FabricLoader; +import net.fabricmc.loader.api.metadata.Person; import net.minecraft.server.command.CommandOutput; import org.apache.logging.log4j.LogManager; @@ -46,10 +49,12 @@ import org.apache.logging.log4j.Logger; import java.nio.file.Path; import java.util.Arrays; +import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.logging.Level; +import java.util.stream.Collectors; public abstract class FabricSparkPlugin implements SparkPlugin { @@ -110,6 +115,18 @@ public abstract class FabricSparkPlugin implements SparkPlugin { return new FabricClassSourceLookup(); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + FabricLoader.getInstance().getAllMods(), + mod -> mod.getMetadata().getId(), + mod -> mod.getMetadata().getVersion().getFriendlyString(), + mod -> mod.getMetadata().getAuthors().stream() + .map(Person::getName) + .collect(Collectors.joining(", ")) + ); + } + protected CompletableFuture generateSuggestions(CommandSender sender, String[] args, SuggestionsBuilder builder) { SuggestionsBuilder suggestions; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java index 7900bc3..82d66ca 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.forge; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import cpw.mods.modlauncher.TransformingClassLoader; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java index 36a7ce8..56061b9 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java @@ -34,18 +34,22 @@ import com.mojang.brigadier.tree.LiteralCommandNode; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.util.SparkThreadFactory; import me.lucko.spark.forge.ForgeClassSourceLookup; import me.lucko.spark.forge.ForgeSparkMod; import net.minecraft.commands.CommandSource; +import net.minecraftforge.fml.ModList; +import net.minecraftforge.forgespi.language.IModInfo; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.nio.file.Path; import java.util.Arrays; +import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -110,6 +114,16 @@ public abstract class ForgeSparkPlugin implements SparkPlugin { return new ForgeClassSourceLookup(); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + ModList.get().getMods(), + IModInfo::getModId, + mod -> mod.getVersion().toString(), + mod -> null // ? + ); + } + protected CompletableFuture generateSuggestions(CommandSender sender, String[] args, SuggestionsBuilder builder) { SuggestionsBuilder suggestions; diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java index 252060e..ca44eea 100644 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java +++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.minestom; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import net.minestom.server.MinecraftServer; import net.minestom.server.extensions.Extension; diff --git a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java index 2b43cae..9014476 100644 --- a/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java +++ b/spark-minestom/src/main/java/me/lucko/spark/minestom/MinestomSparkPlugin.java @@ -24,9 +24,10 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; -import me.lucko.spark.common.util.ClassSourceLookup; import net.minestom.server.MinecraftServer; import net.minestom.server.command.CommandSender; @@ -45,6 +46,7 @@ import org.jetbrains.annotations.NotNull; import java.nio.file.Path; import java.util.Arrays; +import java.util.Collection; import java.util.logging.Level; import java.util.stream.Stream; @@ -117,6 +119,16 @@ public class MinestomSparkPlugin extends Extension implements SparkPlugin { return new MinestomClassSourceLookup(); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + MinecraftServer.getExtensionManager().getExtensions(), + extension -> extension.getOrigin().getName(), + extension -> extension.getOrigin().getVersion(), + extension -> String.join(", ", extension.getOrigin().getAuthors()) + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { return new MinestomPlayerPingProvider(); diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java index 4fed396..180e0af 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.nukkit; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import cn.nukkit.plugin.PluginClassLoader; diff --git a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java index 87d9f09..ae21241 100644 --- a/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java +++ b/spark-nukkit/src/main/java/me/lucko/spark/nukkit/NukkitSparkPlugin.java @@ -25,7 +25,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import cn.nukkit.command.Command; import cn.nukkit.command.CommandSender; diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java index 90f3b8f..899ce58 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7ClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.sponge; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import org.spongepowered.api.Game; diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index e6c9a04..0e3f4eb 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -29,8 +29,8 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.common.util.ClassSourceLookup; import org.slf4j.Logger; import org.spongepowered.api.Game; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java index fa4ac45..7f02e75 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8ClassSourceLookup.java @@ -22,7 +22,7 @@ package me.lucko.spark.sponge; import com.google.common.collect.ImmutableMap; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import org.spongepowered.api.Game; import org.spongepowered.plugin.PluginCandidate; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index 83b2ec2..b1d31e9 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -30,8 +30,9 @@ import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.common.util.ClassSourceLookup; import net.kyori.adventure.text.Component; @@ -52,8 +53,10 @@ import org.spongepowered.api.event.lifecycle.StartedEngineEvent; import org.spongepowered.api.event.lifecycle.StoppingEngineEvent; import org.spongepowered.plugin.PluginContainer; import org.spongepowered.plugin.builtin.jvm.Plugin; +import org.spongepowered.plugin.metadata.model.PluginContributor; import java.nio.file.Path; +import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutorService; @@ -177,6 +180,18 @@ public class Sponge8SparkPlugin implements SparkPlugin { return new Sponge8ClassSourceLookup(this.game); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + this.game.pluginManager().plugins(), + plugin -> plugin.metadata().id(), + plugin -> plugin.metadata().version().toString(), + plugin -> plugin.metadata().contributors().stream() + .map(PluginContributor::name) + .collect(Collectors.joining(", ")) + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { if (this.game.isServerAvailable()) { diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java index bcb8176..9b697c3 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocityClassSourceLookup.java @@ -23,7 +23,7 @@ package me.lucko.spark.velocity; import com.velocitypowered.api.plugin.PluginContainer; import com.velocitypowered.api.plugin.PluginManager; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import org.checkerframework.checker.nullness.qual.Nullable; diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java index 7d9ced8..4a89a4e 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java @@ -34,11 +34,13 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import org.slf4j.Logger; import java.nio.file.Path; +import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.stream.Stream; @@ -133,6 +135,16 @@ public class VelocitySparkPlugin implements SparkPlugin, SimpleCommand { return new VelocityClassSourceLookup(this.proxy.getPluginManager()); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + this.proxy.getPluginManager().getPlugins(), + plugin -> plugin.getDescription().getId(), + plugin -> plugin.getDescription().getVersion().orElse("unspecified"), + plugin -> String.join(", ", plugin.getDescription().getAuthors()) + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { return new VelocityPlayerPingProvider(this.proxy); diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java index c5c22c3..84840d2 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4ClassSourceLookup.java @@ -23,7 +23,7 @@ package me.lucko.spark.velocity; import com.velocitypowered.api.plugin.PluginContainer; import com.velocitypowered.api.plugin.PluginManager; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import org.checkerframework.checker.nullness.qual.Nullable; @@ -48,7 +48,7 @@ public class Velocity4ClassSourceLookup extends ClassSourceLookup.ByClassLoader for (PluginContainer plugin : pluginManager.plugins()) { Object instance = plugin.instance(); if (instance != null) { - this.classLoadersToPlugin.put(instance.getClass().getClassLoader(), plugin.description().name()); + this.classLoadersToPlugin.put(instance.getClass().getClassLoader(), plugin.description().id()); } } } diff --git a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java index 0c57689..b638246 100644 --- a/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java +++ b/spark-velocity4/src/main/java/me/lucko/spark/velocity/Velocity4SparkPlugin.java @@ -34,11 +34,13 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import org.slf4j.Logger; import java.nio.file.Path; +import java.util.Collection; import java.util.List; import java.util.logging.Level; import java.util.stream.Stream; @@ -133,6 +135,16 @@ public class Velocity4SparkPlugin implements SparkPlugin, SimpleCommand { return new Velocity4ClassSourceLookup(this.proxy.pluginManager()); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + this.proxy.pluginManager().plugins(), + plugin -> plugin.description().id(), + plugin -> plugin.description().version(), + plugin -> String.join(", ", plugin.description().authors()) + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { return new Velocity4PlayerPingProvider(this.proxy); diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java index 36e6a57..2207c9e 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogClassSourceLookup.java @@ -20,7 +20,7 @@ package me.lucko.spark.waterdog; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import dev.waterdog.waterdogpe.ProxyServer; import dev.waterdog.waterdogpe.plugin.Plugin; diff --git a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java index 07b153a..1a64a98 100644 --- a/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java +++ b/spark-waterdog/src/main/java/me/lucko/spark/waterdog/WaterdogSparkPlugin.java @@ -24,7 +24,8 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; import dev.waterdog.waterdogpe.ProxyServer; import dev.waterdog.waterdogpe.command.Command; @@ -32,6 +33,7 @@ import dev.waterdog.waterdogpe.command.CommandSender; import dev.waterdog.waterdogpe.plugin.Plugin; import java.nio.file.Path; +import java.util.Collection; import java.util.logging.Level; import java.util.stream.Stream; @@ -100,6 +102,16 @@ public class WaterdogSparkPlugin extends Plugin implements SparkPlugin { return new WaterdogClassSourceLookup(getProxy()); } + @Override + public Collection getKnownSources() { + return SourceMetadata.gather( + getProxy().getPluginManager().getPlugins(), + Plugin::getName, + plugin -> plugin.getDescription().getVersion(), + plugin -> plugin.getDescription().getAuthor() + ); + } + @Override public PlayerPingProvider createPlayerPingProvider() { return new WaterdogPlayerPingProvider(getProxy()); -- cgit From a42dda9eebdc8db6c310978d138708c367f95096 Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 19 Sep 2022 20:08:10 +0100 Subject: Fix issues with temporary files going missing (#225) --- .../java/me/lucko/spark/common/SparkPlatform.java | 8 ++- .../lucko/spark/common/sampler/SamplerBuilder.java | 2 +- .../common/sampler/async/AsyncProfilerAccess.java | 30 +++++--- .../spark/common/sampler/async/AsyncSampler.java | 7 +- .../me/lucko/spark/common/util/TemporaryFiles.java | 81 +++++++++++++++++++--- 5 files changed, 103 insertions(+), 25 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 1969206..2790a3c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -89,6 +89,7 @@ public class SparkPlatform { private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss"); private final SparkPlugin plugin; + private final TemporaryFiles temporaryFiles; private final Configuration configuration; private final String viewerUrl; private final BytebinClient bytebinClient; @@ -109,6 +110,7 @@ public class SparkPlatform { public SparkPlatform(SparkPlugin plugin) { this.plugin = plugin; + this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp")); this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); @@ -192,13 +194,17 @@ public class SparkPlatform { SparkApi.unregister(); - TemporaryFiles.deleteTemporaryFiles(); + this.temporaryFiles.deleteTemporaryFiles(); } public SparkPlugin getPlugin() { return this.plugin; } + public TemporaryFiles getTemporaryFiles() { + return this.temporaryFiles; + } + public Configuration getConfiguration() { return this.configuration; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88b9d91..52a7387 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -98,7 +98,7 @@ public class SamplerBuilder { Sampler sampler; if (this.ticksOver != -1 && this.tickHook != null) { sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); - } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { + } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.getInstance(platform).checkSupported(platform)) { sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); } else { sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index ef2c035..abde21d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -22,9 +22,9 @@ package me.lucko.spark.common.sampler.async; import com.google.common.collect.ImmutableTable; import com.google.common.collect.Table; +import com.google.common.io.ByteStreams; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.util.TemporaryFiles; import one.profiler.AsyncProfiler; import one.profiler.Events; @@ -32,19 +32,29 @@ import one.profiler.Events; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.OutputStream; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.StandardCopyOption; import java.util.Locale; +import java.util.Objects; import java.util.logging.Level; import java.util.stream.Collectors; /** * Provides a bridge between spark and async-profiler. */ -public enum AsyncProfilerAccess { - INSTANCE; +public class AsyncProfilerAccess { + private static AsyncProfilerAccess instance; + + // singleton, needs a SparkPlatform for first init + public static synchronized AsyncProfilerAccess getInstance(SparkPlatform platform) { + if (instance == null) { + Objects.requireNonNull(platform, "platform"); + instance = new AsyncProfilerAccess(platform); + } + return instance; + } /** An instance of the async-profiler Java API. */ private final AsyncProfiler profiler; @@ -55,13 +65,13 @@ public enum AsyncProfilerAccess { /** If profiler is null, contains the reason why setup failed */ private final Exception setupException; - AsyncProfilerAccess() { + AsyncProfilerAccess(SparkPlatform platform) { AsyncProfiler profiler; ProfilingEvent profilingEvent = null; Exception setupException = null; try { - profiler = load(); + profiler = load(platform); if (isEventSupported(profiler, ProfilingEvent.CPU, false)) { profilingEvent = ProfilingEvent.CPU; } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { @@ -106,7 +116,7 @@ public enum AsyncProfilerAccess { return this.profiler != null; } - private static AsyncProfiler load() throws Exception { + private static AsyncProfiler load(SparkPlatform platform) throws Exception { // check compatibility String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); @@ -135,10 +145,10 @@ public enum AsyncProfilerAccess { throw new IllegalStateException("Could not find " + resource + " in spark jar file"); } - Path extractPath = TemporaryFiles.create("spark-", "-libasyncProfiler.so.tmp"); + Path extractPath = platform.getTemporaryFiles().create("spark-", "-libasyncProfiler.so.tmp"); - try (InputStream in = profilerResource.openStream()) { - Files.copy(in, extractPath, StandardCopyOption.REPLACE_EXISTING); + try (InputStream in = profilerResource.openStream(); OutputStream out = Files.newOutputStream(extractPath)) { + ByteStreams.copy(in, out); } // get an instance of async-profiler diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 37ccd96..7d9cb81 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -31,7 +31,6 @@ import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; -import me.lucko.spark.common.util.TemporaryFiles; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import one.profiler.AsyncProfiler; @@ -67,7 +66,7 @@ public class AsyncSampler extends AbstractSampler { public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { super(platform, interval, threadDumper, endTime); - this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); + this.profiler = AsyncProfilerAccess.getInstance(platform).getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } @@ -93,12 +92,12 @@ public class AsyncSampler extends AbstractSampler { super.start(); try { - this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp"); + this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp"); } catch (IOException e) { throw new RuntimeException("Unable to create temporary output file", e); } - String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); + String command = "start,event=" + AsyncProfilerAccess.getInstance(this.platform).getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); if (this.threadDumper instanceof ThreadDumper.Specific) { command += ",filter"; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java index 8a4a621..91a474c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java @@ -20,10 +20,18 @@ package me.lucko.spark.common.util; +import com.google.common.collect.ImmutableList; + import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.Set; @@ -32,23 +40,47 @@ import java.util.Set; * Utility for handling temporary files. */ public final class TemporaryFiles { - private TemporaryFiles() {} - private static final Set DELETE_SET = Collections.synchronizedSet(new HashSet<>()); + public static final FileAttribute[] OWNER_ONLY_FILE_PERMISSIONS; + + static { + boolean isPosix = FileSystems.getDefault().supportedFileAttributeViews().contains("posix"); + if (isPosix) { + OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[]{PosixFilePermissions.asFileAttribute(EnumSet.of( + PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE + ))}; + } else { + OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[0]; + } + } + + private final Path tmpDirectory; + private final Set files = Collections.synchronizedSet(new HashSet<>()); - public static Path create(String prefix, String suffix) throws IOException { - return register(Files.createTempFile(prefix, suffix)); + public TemporaryFiles(Path tmpDirectory) { + this.tmpDirectory = tmpDirectory; } - public static Path register(Path path) { + public Path create(String prefix, String suffix) throws IOException { + Path file; + if (ensureDirectoryIsReady()) { + String name = prefix + Long.toHexString(System.nanoTime()) + suffix; + file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS); + } else { + file = Files.createTempFile(prefix, suffix); + } + return register(file); + } + + public Path register(Path path) { path.toFile().deleteOnExit(); - DELETE_SET.add(path); + this.files.add(path); return path; } - public static void deleteTemporaryFiles() { - synchronized (DELETE_SET) { - for (Iterator iterator = DELETE_SET.iterator(); iterator.hasNext(); ) { + public void deleteTemporaryFiles() { + synchronized (this.files) { + for (Iterator iterator = this.files.iterator(); iterator.hasNext(); ) { Path path = iterator.next(); try { Files.deleteIfExists(path); @@ -60,4 +92,35 @@ public final class TemporaryFiles { } } + private boolean ensureDirectoryIsReady() { + if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) { + return false; + } + + if (Files.isDirectory(this.tmpDirectory)) { + return true; + } + + try { + Files.createDirectories(this.tmpDirectory); + + Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of( + "# What is this directory?", + "", + "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ", + "* Previously, a temporary directory provided by the operating system was used for this purpose. ", + "* However, this proved to be unreliable in some circumstances, so spark now stores temporary data here instead!", + "", + "spark will automatically cleanup the contents of this directory. " , + "(but if for some reason it doesn't, if the server is stopped, you can freely delete any files ending in .tmp)", + "", + "tl;dr: spark uses this folder to store some temporary data." + ), StandardCharsets.UTF_8); + + return true; + } catch (IOException e) { + return false; + } + } + } -- cgit From dbdd3eb1344b837abb13538b9c55d1d99e697e54 Mon Sep 17 00:00:00 2001 From: Luck Date: Thu, 22 Sep 2022 22:06:10 +0100 Subject: Allow platforms to pass extra misc metadata to the viewer --- .../spark/bukkit/BukkitServerConfigProvider.java | 4 +- .../java/me/lucko/spark/common/SparkPlugin.java | 12 +++- .../spark/common/platform/MetadataProvider.java | 47 ++++++++++++++ .../serverconfig/AbstractServerConfigProvider.java | 73 --------------------- .../serverconfig/ServerConfigProvider.java | 66 ++++++++++++------- .../spark/common/sampler/AbstractSampler.java | 10 ++- .../src/main/proto/spark/spark_sampler.proto | 1 + .../spark/fabric/FabricExtraMetadataProvider.java | 75 ++++++++++++++++++++++ .../spark/fabric/FabricServerConfigProvider.java | 4 +- .../fabric/plugin/FabricClientSparkPlugin.java | 7 ++ .../fabric/plugin/FabricServerSparkPlugin.java | 7 ++ .../spark/forge/ForgeExtraMetadataProvider.java | 75 ++++++++++++++++++++++ .../spark/forge/ForgeServerConfigProvider.java | 4 +- .../spark/forge/plugin/ForgeClientSparkPlugin.java | 7 ++ .../spark/forge/plugin/ForgeServerSparkPlugin.java | 7 ++ 15 files changed, 295 insertions(+), 104 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java create mode 100644 spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java create mode 100644 spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java index d095bed..5db1b38 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java @@ -28,10 +28,10 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; -import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import org.bukkit.Bukkit; import org.bukkit.World; @@ -51,7 +51,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -public class BukkitServerConfigProvider extends AbstractServerConfigProvider { +public class BukkitServerConfigProvider extends ServerConfigProvider { /** A map of provided files and their type */ private static final Map FILES; diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index e2a2dbd..b7aef2a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -23,6 +23,7 @@ package me.lucko.spark.common; import me.lucko.spark.api.Spark; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -161,7 +162,16 @@ public interface SparkPlugin { * @return the server config provider function */ default ServerConfigProvider createServerConfigProvider() { - return ServerConfigProvider.NO_OP; + return null; + } + + /** + * Creates a metadata provider for the platform. + * + * @return the platform extra metadata provider + */ + default MetadataProvider createExtraMetadataProvider() { + return null; } /** diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java new file mode 100644 index 0000000..39022b4 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java @@ -0,0 +1,47 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform; + +import com.google.gson.JsonElement; + +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Function to export dynamic metadata to be displayed within the spark viewer. + */ +@FunctionalInterface +public interface MetadataProvider { + + /** + * Produces a map of the metadata. + * + * @return the metadata + */ + Map get(); + + default Map export() { + Map map = new LinkedHashMap<>(); + get().forEach((key, value) -> map.put(key, value.toString())); + return map; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java deleted file mode 100644 index 559ae95..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.platform.serverconfig; - -import com.google.common.collect.ImmutableMap; -import com.google.gson.JsonElement; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -/** - * Abstract implementation of {@link ServerConfigProvider}. - * - *

This implementation is able to delete hidden paths from - * the configurations before they are sent to the viewer.

- */ -public abstract class AbstractServerConfigProvider implements ServerConfigProvider { - private final Map files; - private final ExcludedConfigFilter hiddenPathFilters; - - protected AbstractServerConfigProvider(Map files, Collection hiddenPaths) { - this.files = files; - this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths); - } - - @Override - public final Map loadServerConfigurations() { - ImmutableMap.Builder builder = ImmutableMap.builder(); - - this.files.forEach((path, parser) -> { - try { - JsonElement json = parser.load(path, this.hiddenPathFilters); - if (json == null) { - return; - } - builder.put(path, json); - } catch (Exception e) { - e.printStackTrace(); - } - }); - - return builder.build(); - } - - protected static List getSystemPropertyList(String property) { - String value = System.getProperty(property); - return value == null - ? Collections.emptyList() - : Arrays.asList(value.split(",")); - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java index c66305f..485f215 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java @@ -20,37 +20,57 @@ package me.lucko.spark.common.platform.serverconfig; +import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; +import me.lucko.spark.common.platform.MetadataProvider; + +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; -import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; /** - * Function to export server configuration files for access within the spark viewer. + * Abstract implementation of {@link MetadataProvider} which + * provides server configuration data. + * + *

This implementation is able to delete hidden paths from + * the configurations before they are sent to the viewer.

*/ -@FunctionalInterface -public interface ServerConfigProvider { - - /** - * Loads a map of the server configuration files. - * - *

The key is the name of the file and the value is a - * {@link JsonElement} of the contents.

- * - * @return the exported server configurations - */ - Map loadServerConfigurations(); - - default Map exportServerConfigurations() { - Map map = new LinkedHashMap<>(); - loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString())); - return map; +public abstract class ServerConfigProvider implements MetadataProvider { + private final Map files; + private final ExcludedConfigFilter hiddenPathFilters; + + protected ServerConfigProvider(Map files, Collection hiddenPaths) { + this.files = files; + this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths); + } + + @Override + public final Map get() { + ImmutableMap.Builder builder = ImmutableMap.builder(); + + this.files.forEach((path, parser) -> { + try { + JsonElement json = parser.load(path, this.hiddenPathFilters); + if (json == null) { + return; + } + builder.put(path, json); + } catch (Exception e) { + e.printStackTrace(); + } + }); + + return builder.build(); } - /** - * A no-op implementation - */ - ServerConfigProvider NO_OP = Collections::emptyMap; + protected static List getSystemPropertyList(String property) { + String value = System.getProperty(property); + return value == null + ? Collections.emptyList() + : Arrays.asList(value.split(",")); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 7b57504..e20a2a8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; @@ -148,7 +149,14 @@ public abstract class AbstractSampler implements Sampler { try { ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); - metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations()); + metadata.putAllServerConfigurations(serverConfigProvider.export()); + } catch (Exception e) { + e.printStackTrace(); + } + + try { + MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); + metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index e4c2481..3f30fb2 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -29,6 +29,7 @@ message SamplerMetadata { int64 end_time = 11; int32 number_of_ticks = 12; map sources = 13; + map extra_platform_metadata = 14; message ThreadDumper { Type type = 1; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java new file mode 100644 index 0000000..9eb2694 --- /dev/null +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricExtraMetadataProvider.java @@ -0,0 +1,75 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.fabric; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import me.lucko.spark.common.platform.MetadataProvider; + +import net.minecraft.resource.ResourcePackManager; +import net.minecraft.resource.ResourcePackProfile; +import net.minecraft.resource.ResourcePackSource; + +import java.util.LinkedHashMap; +import java.util.Map; + +public class FabricExtraMetadataProvider implements MetadataProvider { + + private final ResourcePackManager resourcePackManager; + + public FabricExtraMetadataProvider(ResourcePackManager resourcePackManager) { + this.resourcePackManager = resourcePackManager; + } + + @Override + public Map get() { + Map metadata = new LinkedHashMap<>(); + metadata.put("datapacks", datapackMetadata()); + return metadata; + } + + private JsonElement datapackMetadata() { + JsonObject datapacks = new JsonObject(); + for (ResourcePackProfile profile : this.resourcePackManager.getEnabledProfiles()) { + JsonObject obj = new JsonObject(); + obj.addProperty("name", profile.getDisplayName().getString()); + obj.addProperty("description", profile.getDescription().getString()); + obj.addProperty("source", resourcePackSource(profile.getSource())); + datapacks.add(profile.getName(), obj); + } + return datapacks; + } + + private static String resourcePackSource(ResourcePackSource source) { + if (source == ResourcePackSource.PACK_SOURCE_NONE) { + return "none"; + } else if (source == ResourcePackSource.PACK_SOURCE_BUILTIN) { + return "builtin"; + } else if (source == ResourcePackSource.PACK_SOURCE_WORLD) { + return "world"; + } else if (source == ResourcePackSource.PACK_SOURCE_SERVER) { + return "server"; + } else { + return "unknown"; + } + } +} diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java index 18079d3..325a324 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricServerConfigProvider.java @@ -23,14 +23,14 @@ package me.lucko.spark.fabric; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import java.util.Collection; import java.util.Map; -public class FabricServerConfigProvider extends AbstractServerConfigProvider { +public class FabricServerConfigProvider extends ServerConfigProvider { /** A map of provided files and their type */ private static final Map FILES; diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index 0ef6620..faf4eef 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -28,12 +28,14 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; +import me.lucko.spark.fabric.FabricExtraMetadataProvider; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricSparkMod; import me.lucko.spark.fabric.FabricTickHook; @@ -137,6 +139,11 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Comman return new FabricTickReporter.Client(); } + @Override + public MetadataProvider createExtraMetadataProvider() { + return new FabricExtraMetadataProvider(this.minecraft.getResourcePackManager()); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Client(this.minecraft); diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index f840f5e..c528e5b 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -30,6 +30,7 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -37,6 +38,7 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; +import me.lucko.spark.fabric.FabricExtraMetadataProvider; import me.lucko.spark.fabric.FabricPlatformInfo; import me.lucko.spark.fabric.FabricPlayerPingProvider; import me.lucko.spark.fabric.FabricServerConfigProvider; @@ -162,6 +164,11 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman return new FabricServerConfigProvider(); } + @Override + public MetadataProvider createExtraMetadataProvider() { + return new FabricExtraMetadataProvider(this.server.getDataPackManager()); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new FabricWorldInfoProvider.Server(this.server); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java new file mode 100644 index 0000000..cac2771 --- /dev/null +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeExtraMetadataProvider.java @@ -0,0 +1,75 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.forge; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import me.lucko.spark.common.platform.MetadataProvider; + +import net.minecraft.server.packs.repository.Pack; +import net.minecraft.server.packs.repository.PackRepository; +import net.minecraft.server.packs.repository.PackSource; + +import java.util.LinkedHashMap; +import java.util.Map; + +public class ForgeExtraMetadataProvider implements MetadataProvider { + + private final PackRepository resourcePackManager; + + public ForgeExtraMetadataProvider(PackRepository resourcePackManager) { + this.resourcePackManager = resourcePackManager; + } + + @Override + public Map get() { + Map metadata = new LinkedHashMap<>(); + metadata.put("datapacks", datapackMetadata()); + return metadata; + } + + private JsonElement datapackMetadata() { + JsonObject datapacks = new JsonObject(); + for (Pack profile : this.resourcePackManager.getSelectedPacks()) { + JsonObject obj = new JsonObject(); + obj.addProperty("name", profile.getTitle().getString()); + obj.addProperty("description", profile.getDescription().getString()); + obj.addProperty("source", resourcePackSource(profile.getPackSource())); + datapacks.add(profile.getId(), obj); + } + return datapacks; + } + + private static String resourcePackSource(PackSource source) { + if (source == PackSource.DEFAULT) { + return "none"; + } else if (source == PackSource.BUILT_IN) { + return "builtin"; + } else if (source == PackSource.WORLD) { + return "world"; + } else if (source == PackSource.SERVER) { + return "server"; + } else { + return "unknown"; + } + } +} diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java index baa1358..6feba52 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerConfigProvider.java @@ -23,14 +23,14 @@ package me.lucko.spark.forge; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider; import me.lucko.spark.common.platform.serverconfig.ConfigParser; import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import java.util.Collection; import java.util.Map; -public class ForgeServerConfigProvider extends AbstractServerConfigProvider { +public class ForgeServerConfigProvider extends ServerConfigProvider { /** A map of provided files and their type */ private static final Map FILES; diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index a4c6bd1..a8c7c92 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -27,12 +27,14 @@ import com.mojang.brigadier.suggestion.SuggestionProvider; import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; +import me.lucko.spark.forge.ForgeExtraMetadataProvider; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgeSparkMod; import me.lucko.spark.forge.ForgeTickHook; @@ -136,6 +138,11 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgeWorldInfoProvider.Client(this.minecraft); } + @Override + public MetadataProvider createExtraMetadataProvider() { + return new ForgeExtraMetadataProvider(this.minecraft.getResourcePackRepository()); + } + @Override public PlatformInfo getPlatformInfo() { return new ForgePlatformInfo(PlatformInfo.Type.CLIENT); diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 1aeb2b1..56d30b7 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -30,6 +30,7 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; @@ -37,6 +38,7 @@ import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; +import me.lucko.spark.forge.ForgeExtraMetadataProvider; import me.lucko.spark.forge.ForgePlatformInfo; import me.lucko.spark.forge.ForgePlayerPingProvider; import me.lucko.spark.forge.ForgeServerConfigProvider; @@ -219,6 +221,11 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command< return new ForgeServerConfigProvider(); } + @Override + public MetadataProvider createExtraMetadataProvider() { + return new ForgeExtraMetadataProvider(this.server.getPackRepository()); + } + @Override public WorldInfoProvider createWorldInfoProvider() { return new ForgeWorldInfoProvider.Server(this.server); -- cgit From c4b1eccd9cd51e348983fab42ced78166f39cb0e Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 2 Oct 2022 20:16:03 +0100 Subject: Fix NPE caused by extraMetadataProvider being null --- .../src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index e20a2a8..6fc5a10 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -156,7 +156,9 @@ public abstract class AbstractSampler implements Sampler { try { MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); - metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); + if (extraMetadataProvider != null) { + metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); + } } catch (Exception e) { e.printStackTrace(); } -- cgit From d31f3c7bdf03c874ff9518d47d060adc18322d6b Mon Sep 17 00:00:00 2001 From: lucko Date: Fri, 7 Oct 2022 20:26:24 +0100 Subject: Split profiler output into windows (#253) --- .../common/command/modules/SamplerModule.java | 14 +- .../spark/common/sampler/AbstractSampler.java | 42 ++-- .../me/lucko/spark/common/sampler/Sampler.java | 4 +- .../spark/common/sampler/ThreadNodeOrder.java | 52 ---- .../common/sampler/async/AsyncDataAggregator.java | 4 +- .../common/sampler/async/AsyncProfilerAccess.java | 4 +- .../common/sampler/async/AsyncProfilerJob.java | 264 ++++++++++++++++++++ .../spark/common/sampler/async/AsyncSampler.java | 255 ++++++-------------- .../common/sampler/async/JfrParsingException.java | 27 +++ .../spark/common/sampler/async/ProfileSegment.java | 50 ++++ .../spark/common/sampler/async/jfr/Dictionary.java | 4 + .../common/sampler/java/JavaDataAggregator.java | 7 +- .../spark/common/sampler/java/JavaSampler.java | 56 ++++- .../common/sampler/java/SimpleDataAggregator.java | 4 +- .../common/sampler/java/TickedDataAggregator.java | 41 ++-- .../spark/common/sampler/node/AbstractNode.java | 70 +++--- .../spark/common/sampler/node/StackTraceNode.java | 77 +----- .../spark/common/sampler/node/ThreadNode.java | 42 +++- .../sampler/window/ProfilingWindowUtils.java | 36 +++ .../common/sampler/window/ProtoTimeEncoder.java | 94 ++++++++ .../sampler/window/WindowStatisticsCollector.java | 267 +++++++++++++++++++++ spark-common/src/main/proto/spark/spark.proto | 9 + .../src/main/proto/spark/spark_sampler.proto | 15 +- 23 files changed, 1027 insertions(+), 411 deletions(-) delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 2afed64..c1e4981 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -35,7 +35,6 @@ import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.ThreadNodeOrder; import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; @@ -94,7 +93,6 @@ public class SamplerModule implements CommandModule { .argumentUsage("not-combined", null) .argumentUsage("force-java-sampler", null) .argumentUsage("stop --comment", "comment") - .argumentUsage("stop --order-by-time", null) .argumentUsage("stop --save-to-file", null) .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { @@ -103,7 +101,7 @@ public class SamplerModule implements CommandModule { } if (arguments.contains("--stop") || arguments.contains("--upload")) { - return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment", "--save-to-file"); + return TabCompleter.completeForOpts(arguments, "--comment", "--save-to-file"); } List opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel", @@ -249,14 +247,13 @@ public class SamplerModule implements CommandModule { // await the result if (timeoutSeconds != -1) { - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); boolean saveToFile = arguments.boolFlag("save-to-file"); future.thenAcceptAsync(s -> { resp.broadcastPrefixed(text("The active profiler has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile); + handleUpload(platform, resp, s, comment, mergeMode, saveToFile); }); } } @@ -293,18 +290,17 @@ public class SamplerModule implements CommandModule { } else { this.activeSampler.stop(); resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results...")); - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); boolean saveToFile = arguments.boolFlag("save-to-file"); - handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile); + handleUpload(platform, resp, this.activeSampler, comment, mergeMode, saveToFile); this.activeSampler = null; } } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, ClassSourceLookup.create(platform)); + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, String comment, MergeMode mergeMode, boolean saveToFileFlag) { + SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), comment, mergeMode, ClassSourceLookup.create(platform)); boolean saveToFile = false; if (saveToFileFlag) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 6fc5a10..c650738 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -30,7 +30,10 @@ import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -58,12 +61,12 @@ public abstract class AbstractSampler implements Sampler { /** The time when sampling first began */ protected long startTime = -1; - /** The game tick when sampling first began */ - protected int startTick = -1; - /** The unix timestamp (in millis) when this sampler should automatically complete. */ protected final long autoEndTime; // -1 for nothing + /** Collects statistics for each window in the sample */ + protected final WindowStatisticsCollector windowStatisticsCollector; + /** A future to encapsulate the completion of this sampler instance */ protected final CompletableFuture future = new CompletableFuture<>(); @@ -75,6 +78,7 @@ public abstract class AbstractSampler implements Sampler { this.interval = interval; this.threadDumper = threadDumper; this.autoEndTime = autoEndTime; + this.windowStatisticsCollector = new WindowStatisticsCollector(platform); } @Override @@ -106,11 +110,11 @@ public abstract class AbstractSampler implements Sampler { @Override public void start() { this.startTime = System.currentTimeMillis(); + } - TickHook tickHook = this.platform.getTickHook(); - if (tickHook != null) { - this.startTick = tickHook.getCurrentTick(); - } + @Override + public void stop() { + this.windowStatisticsCollector.stop(); } protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { @@ -127,12 +131,9 @@ public abstract class AbstractSampler implements Sampler { metadata.setComment(comment); } - if (this.startTick != -1) { - TickHook tickHook = this.platform.getTickHook(); - if (tickHook != null) { - int numberOfTicks = tickHook.getCurrentTick() - this.startTick; - metadata.setNumberOfTicks(numberOfTicks); - } + int totalTicks = this.windowStatisticsCollector.getTotalTicks(); + if (totalTicks != -1) { + metadata.setNumberOfTicks(totalTicks); } try { @@ -171,14 +172,23 @@ public abstract class AbstractSampler implements Sampler { proto.setMetadata(metadata); } - protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { List data = dataAggregator.exportData(); - data.sort(outputOrder); + data.sort(Comparator.comparing(ThreadNode::getThreadLabel)); ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); + ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(data); + int[] timeWindows = timeEncoder.getKeys(); + for (int timeWindow : timeWindows) { + proto.addTimeWindows(timeWindow); + } + + this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows); + proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export()); + for (ThreadNode entry : data) { - proto.addThreads(entry.toProto(mergeMode)); + proto.addThreads(entry.toProto(mergeMode, timeEncoder)); classSourceVisitor.visit(entry); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 98281de..e06cba6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -23,11 +23,9 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import java.util.Comparator; import java.util.concurrent.CompletableFuture; /** @@ -67,6 +65,6 @@ public interface Sampler { CompletableFuture getFuture(); // Methods used to export the sampler data to the web viewer. - SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); + SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java deleted file mode 100644 index adcedcd..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.common.sampler.node.ThreadNode; - -import java.util.Comparator; - -/** - * Methods of ordering {@link ThreadNode}s in the output data. - */ -public enum ThreadNodeOrder implements Comparator { - - /** - * Order by the name of the thread (alphabetically) - */ - BY_NAME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return o1.getThreadLabel().compareTo(o2.getThreadLabel()); - } - }, - - /** - * Order by the time taken by the thread (most time taken first) - */ - BY_TIME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return -Double.compare(o1.getTotalTime(), o2.getTotalTime()); - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index 3de3943..402330a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -47,10 +47,10 @@ public class AsyncDataAggregator extends AbstractDataAggregator { .build(); } - public void insertData(ProfileSegment element) { + public void insertData(ProfileSegment element, int window) { try { ThreadNode node = getNode(this.threadGrouper.getGroup(element.getNativeThreadId(), element.getThreadName())); - node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime()); + node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime(), window); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index abde21d..1480650 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -87,11 +87,11 @@ public class AsyncProfilerAccess { this.setupException = setupException; } - public AsyncProfiler getProfiler() { + public AsyncProfilerJob startNewProfilerJob() { if (this.profiler == null) { throw new UnsupportedOperationException("async-profiler not supported", this.setupException); } - return this.profiler; + return AsyncProfilerJob.createNew(this, this.profiler); } public ProfilingEvent getProfilingEvent() { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java new file mode 100644 index 0000000..7b123a7 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -0,0 +1,264 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.async.jfr.JfrReader; + +import one.profiler.AsyncProfiler; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; + +/** + * Represents a profiling job within async-profiler. + * + *

Only one job can be running at a time. This is guarded by + * {@link #createNew(AsyncProfilerAccess, AsyncProfiler)}.

+ */ +public class AsyncProfilerJob { + + /** + * The currently active job. + */ + private static final AtomicReference ACTIVE = new AtomicReference<>(); + + /** + * Creates a new {@link AsyncProfilerJob}. + * + *

Will throw an {@link IllegalStateException} if another job is already active.

+ * + * @param access the profiler access object + * @param profiler the profiler + * @return the job + */ + static AsyncProfilerJob createNew(AsyncProfilerAccess access, AsyncProfiler profiler) { + synchronized (ACTIVE) { + AsyncProfilerJob existing = ACTIVE.get(); + if (existing != null) { + throw new IllegalStateException("Another profiler is already active: " + existing); + } + + AsyncProfilerJob job = new AsyncProfilerJob(access, profiler); + ACTIVE.set(job); + return job; + } + } + + /** The async-profiler access object */ + private final AsyncProfilerAccess access; + /** The async-profiler instance */ + private final AsyncProfiler profiler; + + // Set on init + /** The platform */ + private SparkPlatform platform; + /** The sampling interval in microseconds */ + private int interval; + /** The thread dumper */ + private ThreadDumper threadDumper; + /** The profiling window */ + private int window; + + /** The file used by async-profiler to output data */ + private Path outputFile; + + private AsyncProfilerJob(AsyncProfilerAccess access, AsyncProfiler profiler) { + this.access = access; + this.profiler = profiler; + } + + /** + * Executes an async-profiler command. + * + * @param command the command + * @return the output + */ + private String execute(String command) { + try { + return this.profiler.execute(command); + } catch (IOException e) { + throw new RuntimeException("Exception whilst executing profiler command", e); + } + } + + /** + * Checks to ensure that this job is still active. + */ + private void checkActive() { + if (ACTIVE.get() != this) { + throw new IllegalStateException("Profiler job no longer active!"); + } + } + + // Initialise the job + public void init(SparkPlatform platform, int interval, ThreadDumper threadDumper, int window) { + this.platform = platform; + this.interval = interval; + this.threadDumper = threadDumper; + this.window = window; + } + + /** + * Starts the job. + */ + public void start() { + checkActive(); + + try { + // create a new temporary output file + try { + this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp"); + } catch (IOException e) { + throw new RuntimeException("Unable to create temporary output file", e); + } + + // construct a command to send to async-profiler + String command = "start,event=" + this.access.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); + if (this.threadDumper instanceof ThreadDumper.Specific) { + command += ",filter"; + } + + // start the profiler + String resp = execute(command).trim(); + + if (!resp.equalsIgnoreCase("profiling started")) { + throw new RuntimeException("Unexpected response: " + resp); + } + + // append threads to be profiled, if necessary + if (this.threadDumper instanceof ThreadDumper.Specific) { + ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; + for (Thread thread : threadDumper.getThreads()) { + this.profiler.addThread(thread); + } + } + + } catch (Exception e) { + try { + this.profiler.stop(); + } catch (Exception e2) { + // ignore + } + close(); + + throw e; + } + } + + /** + * Stops the job. + */ + public void stop() { + checkActive(); + + try { + this.profiler.stop(); + } catch (IllegalStateException e) { + if (!e.getMessage().equals("Profiler is not active")) { // ignore + throw e; + } + } finally { + close(); + } + } + + /** + * Aggregates the collected data. + */ + public void aggregate(AsyncDataAggregator dataAggregator) { + + Predicate threadFilter; + if (this.threadDumper instanceof ThreadDumper.Specific) { + ThreadDumper.Specific specificDumper = (ThreadDumper.Specific) this.threadDumper; + threadFilter = n -> specificDumper.getThreadNames().contains(n.toLowerCase()); + } else { + threadFilter = n -> true; + } + + // read the jfr file produced by async-profiler + try (JfrReader reader = new JfrReader(this.outputFile)) { + readSegments(reader, threadFilter, dataAggregator, this.window); + } catch (Exception e) { + boolean fileExists; + try { + fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0; + } catch (IOException ex) { + fileExists = false; + } + + if (fileExists) { + throw new JfrParsingException("Error parsing JFR data from profiler output", e); + } else { + throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e); + } + } + + // delete the output file after reading + try { + Files.deleteIfExists(this.outputFile); + } catch (IOException e) { + // ignore + } + + } + + private void readSegments(JfrReader reader, Predicate threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException { + List samples = reader.readAllEvents(JfrReader.ExecutionSample.class); + for (int i = 0; i < samples.size(); i++) { + JfrReader.ExecutionSample sample = samples.get(i); + + long duration; + if (i == 0) { + // we don't really know the duration of the first sample, so just use the sampling + // interval + duration = this.interval; + } else { + // calculate the duration of the sample by calculating the time elapsed since the + // previous sample + duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time); + } + + String threadName = reader.threads.get(sample.tid); + if (!threadFilter.test(threadName)) { + continue; + } + + // parse the segment and give it to the data aggregator + ProfileSegment segment = ProfileSegment.parseSegment(reader, sample, threadName, duration); + dataAggregator.insertData(segment, window); + } + } + + public int getWindow() { + return this.window; + } + + private void close() { + ACTIVE.compareAndSet(this, null); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 7d9cb81..2c9bb5f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -27,61 +27,41 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; +import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import one.profiler.AsyncProfiler; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Comparator; -import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import java.util.function.Predicate; /** * A sampler implementation using async-profiler. */ public class AsyncSampler extends AbstractSampler { - private final AsyncProfiler profiler; + private final AsyncProfilerAccess profilerAccess; /** Responsible for aggregating and then outputting collected sampling data */ private final AsyncDataAggregator dataAggregator; - /** Flag to mark if the output has been completed */ - private boolean outputComplete = false; + /** Mutex for the current profiler job */ + private final Object[] currentJobMutex = new Object[0]; - /** The temporary output file */ - private Path outputFile; + /** Current profiler job */ + private AsyncProfilerJob currentJob; - /** The executor used for timeouts */ - private ScheduledExecutorService timeoutExecutor; + /** The executor used for scheduling and management */ + private ScheduledExecutorService scheduler; public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { super(platform, interval, threadDumper, endTime); - this.profiler = AsyncProfilerAccess.getInstance(platform).getProfiler(); + this.profilerAccess = AsyncProfilerAccess.getInstance(platform); this.dataAggregator = new AsyncDataAggregator(threadGrouper); - } - - /** - * Executes a profiler command. - * - * @param command the command to execute - * @return the response - */ - private String execute(String command) { - try { - return this.profiler.execute(command); - } catch (IOException e) { - throw new RuntimeException("Exception whilst executing profiler command", e); - } + this.scheduler = Executors.newSingleThreadScheduledExecutor( + new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-worker-thread").build() + ); } /** @@ -91,33 +71,58 @@ public class AsyncSampler extends AbstractSampler { public void start() { super.start(); - try { - this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp"); - } catch (IOException e) { - throw new RuntimeException("Unable to create temporary output file", e); + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + this.windowStatisticsCollector.startCountingTicks(tickHook); } - String command = "start,event=" + AsyncProfilerAccess.getInstance(this.platform).getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); - if (this.threadDumper instanceof ThreadDumper.Specific) { - command += ",filter"; - } + int window = ProfilingWindowUtils.unixMillisToWindow(System.currentTimeMillis()); - String resp = execute(command).trim(); - if (!resp.equalsIgnoreCase("profiling started")) { - throw new RuntimeException("Unexpected response: " + resp); - } + AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob(); + job.init(this.platform, this.interval, this.threadDumper, window); + job.start(); + this.currentJob = job; - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; - for (Thread thread : threadDumper.getThreads()) { - this.profiler.addThread(thread); - } - } + // rotate the sampler job every minute to put data into a new window + this.scheduler.scheduleAtFixedRate(this::rotateProfilerJob, 1, 1, TimeUnit.MINUTES); recordInitialGcStats(); scheduleTimeout(); } + private void rotateProfilerJob() { + try { + synchronized (this.currentJobMutex) { + AsyncProfilerJob previousJob = this.currentJob; + if (previousJob == null) { + return; + } + + try { + // stop the previous job + previousJob.stop(); + + // collect statistics for the window + this.windowStatisticsCollector.measureNow(previousJob.getWindow()); + } catch (Exception e) { + e.printStackTrace(); + } + + // start a new job + int window = previousJob.getWindow() + 1; + AsyncProfilerJob newJob = this.profilerAccess.startNewProfilerJob(); + newJob.init(this.platform, this.interval, this.threadDumper, window); + newJob.start(); + this.currentJob = newJob; + + // aggregate the output of the previous job + previousJob.aggregate(this.dataAggregator); + } + } catch (Throwable e) { + e.printStackTrace(); + } + } + private void scheduleTimeout() { if (this.autoEndTime == -1) { return; @@ -128,11 +133,7 @@ public class AsyncSampler extends AbstractSampler { return; } - this.timeoutExecutor = Executors.newSingleThreadScheduledExecutor( - new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-timeout-thread").build() - ); - - this.timeoutExecutor.schedule(() -> { + this.scheduler.schedule(() -> { stop(); this.future.complete(this); }, delay, TimeUnit.MILLISECONDS); @@ -143,145 +144,27 @@ public class AsyncSampler extends AbstractSampler { */ @Override public void stop() { - try { - this.profiler.stop(); - } catch (IllegalStateException e) { - if (!e.getMessage().equals("Profiler is not active")) { // ignore - throw e; - } - } + super.stop(); + synchronized (this.currentJobMutex) { + this.currentJob.stop(); + this.windowStatisticsCollector.measureNow(this.currentJob.getWindow()); + this.currentJob.aggregate(this.dataAggregator); + this.currentJob = null; + } - if (this.timeoutExecutor != null) { - this.timeoutExecutor.shutdown(); - this.timeoutExecutor = null; + if (this.scheduler != null) { + this.scheduler.shutdown(); + this.scheduler = null; } } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); - aggregateOutput(); - writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); + writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup); return proto.build(); } - private void aggregateOutput() { - if (this.outputComplete) { - return; - } - this.outputComplete = true; - - Predicate threadFilter; - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; - threadFilter = n -> threadDumper.getThreadNames().contains(n.toLowerCase()); - } else { - threadFilter = n -> true; - } - - // read the jfr file produced by async-profiler - try (JfrReader reader = new JfrReader(this.outputFile)) { - readSegments(reader, threadFilter); - } catch (Exception e) { - boolean fileExists; - try { - fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0; - } catch (IOException ex) { - fileExists = false; - } - - if (fileExists) { - throw new JfrParsingException("Error parsing JFR data from profiler output", e); - } else { - throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e); - } - } - - // delete the output file after reading - try { - Files.deleteIfExists(this.outputFile); - } catch (IOException e) { - // ignore - } - } - - private void readSegments(JfrReader reader, Predicate threadFilter) throws IOException { - List samples = reader.readAllEvents(JfrReader.ExecutionSample.class); - for (int i = 0; i < samples.size(); i++) { - JfrReader.ExecutionSample sample = samples.get(i); - - long duration; - if (i == 0) { - // we don't really know the duration of the first sample, so just use the sampling - // interval - duration = this.interval; - } else { - // calculate the duration of the sample by calculating the time elapsed since the - // previous sample - duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time); - } - - String threadName = reader.threads.get(sample.tid); - if (!threadFilter.test(threadName)) { - continue; - } - - // parse the segment and give it to the data aggregator - ProfileSegment segment = parseSegment(reader, sample, threadName, duration); - this.dataAggregator.insertData(segment); - } - } - - private static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) { - JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId); - int len = stackTrace.methods.length; - - AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len]; - for (int i = 0; i < len; i++) { - stack[i] = parseStackFrame(reader, stackTrace.methods[i]); - } - - return new ProfileSegment(sample.tid, threadName, stack, duration); - } - - private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) { - AsyncStackTraceElement result = reader.stackFrames.get(methodId); - if (result != null) { - return result; - } - - JfrReader.MethodRef methodRef = reader.methods.get(methodId); - JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls); - - byte[] className = reader.symbols.get(classRef.name); - byte[] methodName = reader.symbols.get(methodRef.name); - - if (className == null || className.length == 0) { - // native call - result = new AsyncStackTraceElement( - AsyncStackTraceElement.NATIVE_CALL, - new String(methodName, StandardCharsets.UTF_8), - null - ); - } else { - // java method - byte[] methodDesc = reader.symbols.get(methodRef.sig); - result = new AsyncStackTraceElement( - new String(className, StandardCharsets.UTF_8).replace('/', '.'), - new String(methodName, StandardCharsets.UTF_8), - new String(methodDesc, StandardCharsets.UTF_8) - ); - } - - reader.stackFrames.put(methodId, result); - return result; - } - - private static final class JfrParsingException extends RuntimeException { - public JfrParsingException(String message, Throwable cause) { - super(message, cause); - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java new file mode 100644 index 0000000..6dab359 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java @@ -0,0 +1,27 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +public class JfrParsingException extends RuntimeException { + public JfrParsingException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java index 154e6fe..26debaf 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java @@ -20,6 +20,10 @@ package me.lucko.spark.common.sampler.async; +import me.lucko.spark.common.sampler.async.jfr.JfrReader; + +import java.nio.charset.StandardCharsets; + /** * Represents a profile "segment". * @@ -58,4 +62,50 @@ public class ProfileSegment { public long getTime() { return this.time; } + + public static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) { + JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId); + int len = stackTrace.methods.length; + + AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len]; + for (int i = 0; i < len; i++) { + stack[i] = parseStackFrame(reader, stackTrace.methods[i]); + } + + return new ProfileSegment(sample.tid, threadName, stack, duration); + } + + private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) { + AsyncStackTraceElement result = reader.stackFrames.get(methodId); + if (result != null) { + return result; + } + + JfrReader.MethodRef methodRef = reader.methods.get(methodId); + JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls); + + byte[] className = reader.symbols.get(classRef.name); + byte[] methodName = reader.symbols.get(methodRef.name); + + if (className == null || className.length == 0) { + // native call + result = new AsyncStackTraceElement( + AsyncStackTraceElement.NATIVE_CALL, + new String(methodName, StandardCharsets.UTF_8), + null + ); + } else { + // java method + byte[] methodDesc = reader.symbols.get(methodRef.sig); + result = new AsyncStackTraceElement( + new String(className, StandardCharsets.UTF_8).replace('/', '.'), + new String(methodName, StandardCharsets.UTF_8), + new String(methodDesc, StandardCharsets.UTF_8) + ); + } + + reader.stackFrames.put(methodId, result); + return result; + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java index 23223a2..60f6543 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java @@ -37,6 +37,10 @@ public class Dictionary { size = 0; } + public int size() { + return this.size; + } + public void put(long key, T value) { if (key == 0) { throw new IllegalArgumentException("Zero key not allowed"); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index cc530d6..c51ec05 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -66,10 +66,11 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { * Inserts sampling data into this aggregator * * @param threadInfo the thread info + * @param window the window */ - public abstract void insertData(ThreadInfo threadInfo); + public abstract void insertData(ThreadInfo threadInfo, int window); - protected void writeData(ThreadInfo threadInfo) { + protected void writeData(ThreadInfo threadInfo, int window) { if (this.ignoreSleeping && isSleeping(threadInfo)) { return; } @@ -79,7 +80,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { try { ThreadNode node = getNode(this.threadGrouper.getGroup(threadInfo.getThreadId(), threadInfo.getThreadName())); - node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval); + node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval, window); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 0f73a9f..8c96fd3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -28,15 +28,17 @@ import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; +import org.checkerframework.checker.units.qual.A; + import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; -import java.util.Comparator; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; @@ -62,6 +64,9 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; + + /** The last window that was profiled */ + private final AtomicInteger lastWindow = new AtomicInteger(); public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { super(platform, interval, threadDumper, endTime); @@ -76,12 +81,28 @@ public class JavaSampler extends AbstractSampler implements Runnable { @Override public void start() { super.start(); + + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + if (this.dataAggregator instanceof TickedDataAggregator) { + WindowStatisticsCollector.ExplicitTickCounter counter = this.windowStatisticsCollector.startCountingTicksExplicit(tickHook); + ((TickedDataAggregator) this.dataAggregator).setTickCounter(counter); + } else { + this.windowStatisticsCollector.startCountingTicks(tickHook); + } + } + this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS); } @Override public void stop() { + super.stop(); + this.task.cancel(false); + + // collect statistics for the final window + this.windowStatisticsCollector.measureNow(this.lastWindow.get()); } @Override @@ -89,27 +110,30 @@ public class JavaSampler extends AbstractSampler implements Runnable { // this is effectively synchronized, the worker pool will not allow this task // to concurrently execute. try { - if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) { - this.future.complete(this); + long time = System.currentTimeMillis(); + + if (this.autoEndTime != -1 && this.autoEndTime <= time) { stop(); + this.future.complete(this); return; } + int window = ProfilingWindowUtils.unixMillisToWindow(time); ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean); - this.workerPool.execute(new InsertDataTask(this.dataAggregator, threadDumps)); + this.workerPool.execute(new InsertDataTask(threadDumps, window)); } catch (Throwable t) { - this.future.completeExceptionally(t); stop(); + this.future.completeExceptionally(t); } } - private static final class InsertDataTask implements Runnable { - private final JavaDataAggregator dataAggregator; + private final class InsertDataTask implements Runnable { private final ThreadInfo[] threadDumps; + private final int window; - InsertDataTask(JavaDataAggregator dataAggregator, ThreadInfo[] threadDumps) { - this.dataAggregator = dataAggregator; + InsertDataTask(ThreadInfo[] threadDumps, int window) { this.threadDumps = threadDumps; + this.window = window; } @Override @@ -118,16 +142,22 @@ public class JavaSampler extends AbstractSampler implements Runnable { if (threadInfo.getThreadName() == null || threadInfo.getStackTrace() == null) { continue; } - this.dataAggregator.insertData(threadInfo); + JavaSampler.this.dataAggregator.insertData(threadInfo, this.window); + } + + // if we have just stepped over into a new window, collect statistics for the previous window + int previousWindow = JavaSampler.this.lastWindow.getAndSet(this.window); + if (previousWindow != 0 && previousWindow != this.window) { + JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow); } } } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); + writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup); return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java index 39e21aa..54173fe 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java @@ -44,8 +44,8 @@ public class SimpleDataAggregator extends JavaDataAggregator { } @Override - public void insertData(ThreadInfo threadInfo) { - writeData(threadInfo); + public void insertData(ThreadInfo threadInfo, int window) { + writeData(threadInfo, window); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e062f31..d537b96 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler.java; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -31,7 +32,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; /** * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" @@ -48,14 +48,15 @@ public class TickedDataAggregator extends JavaDataAggregator { /** The expected number of samples in each tick */ private final int expectedSize; - /** The number of ticks aggregated so far */ - private final AtomicInteger numberOfTicks = new AtomicInteger(); - - private final Object mutex = new Object(); + /** Counts the number of ticks aggregated */ + private WindowStatisticsCollector.ExplicitTickCounter tickCounter; // state private int currentTick = -1; - private TickList currentData = new TickList(0); + private TickList currentData = null; + + // guards currentData + private final Object mutex = new Object(); public TickedDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { super(workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); @@ -66,29 +67,34 @@ public class TickedDataAggregator extends JavaDataAggregator { this.expectedSize = (int) ((50 / intervalMilliseconds) + 10); } + public void setTickCounter(WindowStatisticsCollector.ExplicitTickCounter tickCounter) { + this.tickCounter = tickCounter; + } + @Override public SamplerMetadata.DataAggregator getMetadata() { // push the current tick (so numberOfTicks is accurate) synchronized (this.mutex) { pushCurrentTick(); + this.currentData = null; } return SamplerMetadata.DataAggregator.newBuilder() .setType(SamplerMetadata.DataAggregator.Type.TICKED) .setThreadGrouper(this.threadGrouper.asProto()) .setTickLengthThreshold(this.tickLengthThreshold) - .setNumberOfIncludedTicks(this.numberOfTicks.get()) + .setNumberOfIncludedTicks(this.tickCounter.getTotalCountedTicks()) .build(); } @Override - public void insertData(ThreadInfo threadInfo) { + public void insertData(ThreadInfo threadInfo, int window) { synchronized (this.mutex) { int tick = this.tickHook.getCurrentTick(); - if (this.currentTick != tick) { + if (this.currentTick != tick || this.currentData == null) { pushCurrentTick(); this.currentTick = tick; - this.currentData = new TickList(this.expectedSize); + this.currentData = new TickList(this.expectedSize, window); } this.currentData.addData(threadInfo); @@ -98,6 +104,9 @@ public class TickedDataAggregator extends JavaDataAggregator { // guarded by 'mutex' private void pushCurrentTick() { TickList currentData = this.currentData; + if (currentData == null) { + return; + } // approximate how long the tick lasted int tickLengthMicros = currentData.getList().size() * this.interval; @@ -107,8 +116,8 @@ public class TickedDataAggregator extends JavaDataAggregator { return; } - this.numberOfTicks.incrementAndGet(); this.workerPool.submit(currentData); + this.tickCounter.increment(); } @Override @@ -121,21 +130,19 @@ public class TickedDataAggregator extends JavaDataAggregator { return super.exportData(); } - public int getNumberOfTicks() { - return this.numberOfTicks.get(); - } - private final class TickList implements Runnable { private final List list; + private final int window; - TickList(int expectedSize) { + TickList(int expectedSize, int window) { this.list = new ArrayList<>(expectedSize); + this.window = window; } @Override public void run() { for (ThreadInfo data : this.list) { - writeData(data); + writeData(data, this.window); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index fd2be8d..fe1afcd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -20,6 +20,9 @@ package me.lucko.spark.common.sampler.node; +import me.lucko.spark.common.sampler.async.jfr.Dictionary; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; + import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -27,62 +30,63 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.LongAdder; +import java.util.stream.IntStream; /** * Encapsulates a timed node in the sampling stack. */ public abstract class AbstractNode { - private static final int MAX_STACK_DEPTH = 300; + protected static final int MAX_STACK_DEPTH = 300; /** A map of the nodes children */ private final Map children = new ConcurrentHashMap<>(); /** The accumulated sample time for this node, measured in microseconds */ - private final LongAdder totalTime = new LongAdder(); + // long key = the window (effectively System.currentTimeMillis() / 60_000) + // LongAdder value = accumulated time in microseconds + private final Dictionary times = new Dictionary<>(); /** - * Gets the total sample time logged for this node in milliseconds. + * Gets the time accumulator for a given window * - * @return the total time + * @param window the window + * @return the accumulator */ - public double getTotalTime() { - return this.totalTime.longValue() / 1000d; + protected LongAdder getTimeAccumulator(int window) { + LongAdder adder = this.times.get(window); + if (adder == null) { + adder = new LongAdder(); + this.times.put(window, adder); + } + return adder; } - public Collection getChildren() { - return this.children.values(); + /** + * Gets the time windows that have been logged for this node. + * + * @return the time windows + */ + public IntStream getTimeWindows() { + IntStream.Builder keys = IntStream.builder(); + this.times.forEach((key, value) -> keys.add((int) key)); + return keys.build(); } /** - * Logs the given stack trace against this node and its children. + * Gets the encoded total sample times logged for this node in milliseconds. * - * @param describer the function that describes the elements of the stack - * @param stack the stack - * @param time the total time to log - * @param the stack trace element type + * @return the total times */ - public void log(StackTraceNode.Describer describer, T[] stack, long time) { - if (stack.length == 0) { - return; - } - - this.totalTime.add(time); - - AbstractNode node = this; - T previousElement = null; - - for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) { - T element = stack[(stack.length - 1) - offset]; - - node = node.resolveChild(describer.describe(element, previousElement)); - node.totalTime.add(time); + protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) { + return encoder.encode(this.times); + } - previousElement = element; - } + public Collection getChildren() { + return this.children.values(); } - private StackTraceNode resolveChild(StackTraceNode.Description description) { + protected StackTraceNode resolveChild(StackTraceNode.Description description) { StackTraceNode result = this.children.get(description); // fast path if (result != null) { return result; @@ -96,7 +100,7 @@ public abstract class AbstractNode { * @param other the other node */ protected void merge(AbstractNode other) { - this.totalTime.add(other.totalTime.longValue()); + other.times.forEach((key, value) -> getTimeAccumulator((int) key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } @@ -123,7 +127,7 @@ public abstract class AbstractNode { list.add(child); } - list.sort(null); + //list.sort(null); return list; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index b0d9237..ed938d5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.sampler.node; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; @@ -30,7 +31,7 @@ import java.util.Objects; /** * Represents a stack trace element within the {@link AbstractNode node} structure. */ -public final class StackTraceNode extends AbstractNode implements Comparable { +public final class StackTraceNode extends AbstractNode { /** * Magic number to denote "no present" line number for a node. @@ -64,12 +65,16 @@ public final class StackTraceNode extends AbstractNode implements Comparable= 0) { proto.setLineNumber(this.description.lineNumber); } @@ -87,26 +92,12 @@ public final class StackTraceNode extends AbstractNode implements Comparable { + public static final class Description { private final String className; private final String methodName; @@ -162,54 +153,6 @@ public final class StackTraceNode extends AbstractNode implements Comparable> int nullCompare(T a, T b) { - if (a == null && b == null) { - return 0; - } else if (a == null) { - return -1; - } else if (b == null) { - return 1; - } else { - return a.compareTo(b); - } - } - - @Override - public int compareTo(Description that) { - if (this == that) { - return 0; - } - - int i = this.className.compareTo(that.className); - if (i != 0) { - return i; - } - - i = this.methodName.compareTo(that.methodName); - if (i != 0) { - return i; - } - - i = nullCompare(this.methodDescription, that.methodDescription); - if (i != 0) { - return i; - } - - if (this.methodDescription != null && that.methodDescription != null) { - i = this.methodDescription.compareTo(that.methodDescription); - if (i != 0) { - return i; - } - } - - i = Integer.compare(this.lineNumber, that.lineNumber); - if (i != 0) { - return i; - } - - return Integer.compare(this.parentLineNumber, that.parentLineNumber); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index ed97443..1dce523 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.sampler.node; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.proto.SparkSamplerProtos; /** @@ -53,13 +54,46 @@ public final class ThreadNode extends AbstractNode { this.label = label; } - public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) { + /** + * Logs the given stack trace against this node and its children. + * + * @param describer the function that describes the elements of the stack + * @param stack the stack + * @param time the total time to log + * @param window the window + * @param the stack trace element type + */ + public void log(StackTraceNode.Describer describer, T[] stack, long time, int window) { + if (stack.length == 0) { + return; + } + + getTimeAccumulator(window).add(time); + + AbstractNode node = this; + T previousElement = null; + + for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) { + T element = stack[(stack.length - 1) - offset]; + + node = node.resolveChild(describer.describe(element, previousElement)); + node.getTimeAccumulator(window).add(time); + + previousElement = element; + } + } + + public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()) - .setTime(getTotalTime()); + .setName(getThreadLabel()); + + double[] times = encodeTimesForProto(timeEncoder); + for (double time : times) { + proto.addTimes(time); + } for (StackTraceNode child : exportChildren(mergeMode)) { - proto.addChildren(child.toProto(mergeMode)); + proto.addChildren(child.toProto(mergeMode, timeEncoder)); } return proto.build(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java new file mode 100644 index 0000000..109adb3 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +public enum ProfilingWindowUtils { + ; + + /** + * Gets the profiling window for the given time in unix-millis. + * + * @param time the time in milliseconds + * @return the window + */ + public static int unixMillisToWindow(long time) { + // one window per minute + return (int) (time / 60_000); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java new file mode 100644 index 0000000..edb2309 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java @@ -0,0 +1,94 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import me.lucko.spark.common.sampler.async.jfr.Dictionary; +import me.lucko.spark.common.sampler.node.AbstractNode; +import me.lucko.spark.common.sampler.node.ThreadNode; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.IntStream; + +/** + * Encodes a map of int->double into a double array. + */ +public class ProtoTimeEncoder { + /** A sorted array of all possible keys to encode */ + private final int[] keys; + /** A map of key value -> index in the keys array */ + private final Map keysToIndex; + + public ProtoTimeEncoder(List sourceData) { + // get an array of all keys that show up in the source data + this.keys = sourceData.stream() + .map(AbstractNode::getTimeWindows) + .reduce(IntStream.empty(), IntStream::concat) + .distinct() + .sorted() + .toArray(); + + // construct a reverse index lookup + this.keysToIndex = new HashMap<>(this.keys.length); + for (int i = 0; i < this.keys.length; i++) { + this.keysToIndex.put(this.keys[i], i); + } + } + + /** + * Gets an array of the keys that could be encoded by this encoder. + * + * @return an array of keys + */ + public int[] getKeys() { + return this.keys; + } + + /** + * Encode a {@link Dictionary} (map) of times/durations into a double array. + * + * @param times a dictionary of times (unix-time millis -> duration in microseconds) + * @return the times encoded as a double array + */ + public double[] encode(Dictionary times) { + // construct an array of values - length needs to exactly match the + // number of keys, even if some values are zero. + double[] array = new double[this.keys.length]; + + times.forEach((key, value) -> { + // get the index for the given key + Integer idx = this.keysToIndex.get((int) key); + if (idx == null) { + throw new RuntimeException("No index for key " + key + " in " + this.keysToIndex.keySet()); + } + + // convert the duration from microseconds -> milliseconds + double durationInMilliseconds = value.longValue() / 1000d; + + // store in the array + array[idx] = durationInMilliseconds; + }); + + return array; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java new file mode 100644 index 0000000..47f739d --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -0,0 +1,267 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.RollingAverage; +import me.lucko.spark.proto.SparkProtos; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Collects statistics for each profiling window. + */ +public class WindowStatisticsCollector { + private static final SparkProtos.WindowStatistics ZERO = SparkProtos.WindowStatistics.newBuilder().build(); + + /** The platform */ + private final SparkPlatform platform; + + /** Map of profiling window -> statistics */ + private final Map stats; + + private TickCounter tickCounter; + + public WindowStatisticsCollector(SparkPlatform platform) { + this.platform = platform; + this.stats = new ConcurrentHashMap<>(); + } + + /** + * Indicates to the statistics collector that it should count the number + * of ticks in each window using the provided {@link TickHook}. + * + * @param hook the tick hook + */ + public void startCountingTicks(TickHook hook) { + this.tickCounter = new NormalTickCounter(this.platform, hook); + } + + /** + * Indicates to the statistics collector that it should count the number + * of ticks in each window, according to how many times the + * {@link ExplicitTickCounter#increment()} method is called. + * + * @param hook the tick hook + * @return the counter + */ + public ExplicitTickCounter startCountingTicksExplicit(TickHook hook) { + ExplicitTickCounter counter = new ExplicitTickCounter(this.platform, hook); + this.tickCounter = counter; + return counter; + } + + public void stop() { + if (this.tickCounter != null) { + this.tickCounter.stop(); + } + } + + /** + * Gets the total number of ticks that have passed between the time + * when the profiler started and stopped. + * + *

Importantly, note that this metric is different to the total number of ticks in a window + * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()}) or the total number + * of observed ticks if the 'only-ticks-over' aggregator is being used + * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()} + * and {@link ExplicitTickCounter#getTotalCountedTicks()}.

+ * + * @return the total number of ticks in the profile + */ + public int getTotalTicks() { + return this.tickCounter == null ? -1 : this.tickCounter.getTotalTicks(); + } + + /** + * Measures statistics for the given window if none have been recorded yet. + * + * @param window the window + */ + public void measureNow(int window) { + this.stats.computeIfAbsent(window, w -> measure()); + } + + /** + * Ensures that the exported map has statistics (even if they are zeroed) for all windows. + * + * @param windows the expected windows + */ + public void ensureHasStatisticsForAllWindows(int[] windows) { + for (int window : windows) { + this.stats.computeIfAbsent(window, w -> ZERO); + } + } + + public Map export() { + return this.stats; + } + + /** + * Measures current statistics, where possible averaging over the last minute. (1 min = 1 window) + * + * @return the current statistics + */ + private SparkProtos.WindowStatistics measure() { + SparkProtos.WindowStatistics.Builder builder = SparkProtos.WindowStatistics.newBuilder(); + + TickStatistics tickStatistics = this.platform.getTickStatistics(); + if (tickStatistics != null) { + builder.setTps(tickStatistics.tps1Min()); + + RollingAverage mspt = tickStatistics.duration1Min(); + if (mspt != null) { + builder.setMsptMedian(mspt.median()); + builder.setMsptMax(mspt.max()); + } + } + + if (this.tickCounter != null) { + int ticks = this.tickCounter.getCountedTicksThisWindowAndReset(); + builder.setTicks(ticks); + } + + builder.setCpuProcess(CpuMonitor.processLoad1MinAvg()); + builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg()); + + return builder.build(); + } + + /** + * Responsible for counting the number of ticks in a profile/window. + */ + public interface TickCounter { + + /** + * Stop the counter. + */ + void stop(); + + /** + * Get the total number of ticks. + * + *

See {@link WindowStatisticsCollector#getTotalTicks()} for a longer explanation + * of what this means exactly.

+ * + * @return the total ticks + */ + int getTotalTicks(); + + /** + * Gets the total number of ticks counted in the last window, + * and resets the counter to zero. + * + * @return the number of ticks counted since the last time this method was called + */ + int getCountedTicksThisWindowAndReset(); + } + + private static abstract class BaseTickCounter implements TickCounter { + protected final SparkPlatform platform; + protected final TickHook tickHook; + + /** The game tick when sampling first began */ + private final int startTick; + + /** The game tick when sampling stopped */ + private int stopTick = -1; + + BaseTickCounter(SparkPlatform platform, TickHook tickHook) { + this.platform = platform; + this.tickHook = tickHook; + this.startTick = this.tickHook.getCurrentTick(); + } + + @Override + public void stop() { + this.stopTick = this.tickHook.getCurrentTick(); + } + + @Override + public int getTotalTicks() { + if (this.startTick == -1) { + throw new IllegalStateException("start tick not recorded"); + } + if (this.stopTick == -1) { + throw new IllegalStateException("stop tick not recorded"); + } + + return this.stopTick - this.startTick; + } + } + + /** + * Counts the number of ticks in a window using a {@link TickHook}. + */ + public static final class NormalTickCounter extends BaseTickCounter { + private int last; + + NormalTickCounter(SparkPlatform platform, TickHook tickHook) { + super(platform, tickHook); + this.last = this.tickHook.getCurrentTick(); + } + + @Override + public int getCountedTicksThisWindowAndReset() { + synchronized (this) { + int now = this.tickHook.getCurrentTick(); + int ticks = now - this.last; + this.last = now; + return ticks; + } + } + } + + /** + * Counts the number of ticks in a window according to the number of times + * {@link #increment()} is called. + * + * Used by the {@link me.lucko.spark.common.sampler.java.TickedDataAggregator}. + */ + public static final class ExplicitTickCounter extends BaseTickCounter { + private final AtomicInteger counted = new AtomicInteger(); + private final AtomicInteger total = new AtomicInteger(); + + ExplicitTickCounter(SparkPlatform platform, TickHook tickHook) { + super(platform, tickHook); + } + + public void increment() { + this.counted.incrementAndGet(); + this.total.incrementAndGet(); + } + + public int getTotalCountedTicks() { + return this.total.get(); + } + + @Override + public int getCountedTicksThisWindowAndReset() { + return this.counted.getAndSet(0); + } + } + +} diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index 2ea341f..be76bd7 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -152,6 +152,15 @@ message WorldStatistics { } } +message WindowStatistics { + int32 ticks = 1; + double cpu_process = 2; + double cpu_system = 3; + double tps = 4; + double mspt_median = 5; + double mspt_max = 6; +} + message RollingAverageValues { double mean = 1; double max = 2; diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index 3f30fb2..2cb08f1 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -13,6 +13,8 @@ message SamplerData { map class_sources = 3; // optional map method_sources = 4; // optional map line_sources = 5; // optional + repeated int32 time_windows = 6; + map time_window_statistics = 7; } message SamplerMetadata { @@ -69,16 +71,25 @@ message SamplerMetadata { message ThreadNode { string name = 1; - double time = 2; + + // replaced + reserved 2; + reserved "time"; + repeated StackTraceNode children = 3; + repeated double times = 4; } message StackTraceNode { - double time = 1; + // replaced + reserved 1; + reserved "time"; + repeated StackTraceNode children = 2; string class_name = 3; string method_name = 4; int32 parent_line_number = 5; // optional int32 line_number = 6; // optional string method_desc = 7; // optional + repeated double times = 8; } -- cgit From fafc14712fa78001b431241bd961ca429d6f74bc Mon Sep 17 00:00:00 2001 From: Luck Date: Thu, 27 Oct 2022 23:35:27 +0100 Subject: Tidy up command feedback messages --- .../java/me/lucko/spark/common/SparkPlatform.java | 23 ++++-- .../common/command/modules/SamplerModule.java | 91 +++++++++++++--------- 2 files changed, 74 insertions(+), 40 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 2790a3c..4c3875c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -52,6 +52,7 @@ import me.lucko.spark.common.util.BytebinClient; import me.lucko.spark.common.util.Configuration; import me.lucko.spark.common.util.TemporaryFiles; +import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; @@ -78,7 +79,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; import static net.kyori.adventure.text.format.NamedTextColor.WHITE; import static net.kyori.adventure.text.format.TextDecoration.BOLD; -import static net.kyori.adventure.text.format.TextDecoration.UNDERLINED; /** * Abstract spark implementation used by all platforms. @@ -362,14 +362,15 @@ public class SparkPlatform { .append(text("v" + getPlugin().getVersion(), GRAY)) .build() ); + + String helpCmd = "/" + getPlugin().getCommandName() + " help"; resp.replyPrefixed(text() .color(GRAY) - .append(text("Use ")) + .append(text("Run ")) .append(text() - .content("/" + getPlugin().getCommandName() + " help") + .content(helpCmd) .color(WHITE) - .decoration(UNDERLINED, true) - .clickEvent(ClickEvent.runCommand("/" + getPlugin().getCommandName() + " help")) + .clickEvent(ClickEvent.runCommand(helpCmd)) .build() ) .append(text(" to view usage information.")) @@ -462,6 +463,18 @@ public class SparkPlatform { } } } + + sender.reply(Component.empty()); + sender.replyPrefixed(text() + .append(text("For full usage information, please go to: ")) + .append(text() + .content("https://spark.lucko.me/docs/Command-Usage") + .color(WHITE) + .clickEvent(ClickEvent.openUrl("https://spark.lucko.me/docs/Command-Usage")) + .build() + ) + .build() + ); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index c1e4981..6dbf913 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -42,6 +42,7 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; +import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; @@ -62,6 +63,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; +import static net.kyori.adventure.text.format.NamedTextColor.WHITE; public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; @@ -83,17 +85,11 @@ public class SamplerModule implements CommandModule { .aliases("profiler", "sampler") .argumentUsage("info", null) .argumentUsage("stop", null) - .argumentUsage("cancel", null) - .argumentUsage("interval", "interval millis") + .argumentUsage("timeout", "timeout seconds") + .argumentUsage("thread *", null) .argumentUsage("thread", "thread name") .argumentUsage("only-ticks-over", "tick length millis") - .argumentUsage("timeout", "timeout seconds") - .argumentUsage("regex --thread", "thread regex") - .argumentUsage("combine-all", null) - .argumentUsage("not-combined", null) - .argumentUsage("force-java-sampler", null) - .argumentUsage("stop --comment", "comment") - .argumentUsage("stop --save-to-file", null) + .argumentUsage("interval", "interval millis") .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { if (arguments.contains("--info") || arguments.contains("--cancel")) { @@ -120,7 +116,7 @@ public class SamplerModule implements CommandModule { private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { if (arguments.boolFlag("info")) { - profilerInfo(resp); + profilerInfo(platform, resp); return; } @@ -138,6 +134,11 @@ public class SamplerModule implements CommandModule { } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + if (this.activeSampler != null) { + profilerInfo(platform, resp); + return; + } + int timeoutSeconds = arguments.intFlag("timeout"); if (timeoutSeconds != -1 && timeoutSeconds <= 10) { resp.replyPrefixed(text("The specified timeout is not long enough for accurate results to be formed. " + @@ -194,12 +195,7 @@ public class SamplerModule implements CommandModule { } } - if (this.activeSampler != null) { - resp.replyPrefixed(text("An active profiler is already running.")); - return; - } - - resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); + resp.broadcastPrefixed(text("Starting a new profiler, please wait...")); SamplerBuilder builder = new SamplerBuilder(); builder.threadDumper(threadDumper); @@ -217,13 +213,16 @@ public class SamplerModule implements CommandModule { Sampler sampler = this.activeSampler = builder.start(platform); resp.broadcastPrefixed(text() - .append(text("Profiler now active!", GOLD)) + .append(text("Profiler is now running!", GOLD)) .append(space()) .append(text("(" + (sampler instanceof AsyncSampler ? "async" : "built-in java") + ")", DARK_GRAY)) .build() ); + if (timeoutSeconds == -1) { - resp.broadcastPrefixed(text("Use '/" + platform.getPlugin().getCommandName() + " profiler --stop' to stop profiling and upload the results.")); + resp.broadcastPrefixed(text("It will run in the background until it is stopped by an admin.")); + resp.broadcastPrefixed(text("To stop the profiler and upload the results, run:")); + resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); } else { resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } @@ -258,20 +257,28 @@ public class SamplerModule implements CommandModule { } } - private void profilerInfo(CommandResponseHandler resp) { + private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) { if (this.activeSampler == null) { - resp.replyPrefixed(text("There isn't an active profiler running.")); + resp.replyPrefixed(text("The profiler isn't running!")); + resp.replyPrefixed(text("To start a new one, run:")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler")); } else { + resp.replyPrefixed(text("Profiler is already running!", GOLD)); + + long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + resp.replyPrefixed(text("So far, it has profiled for " + runningTime + " seconds.")); + long timeout = this.activeSampler.getAutoEndTime(); if (timeout == -1) { - resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); + resp.replyPrefixed(text("To stop the profiler and upload the results, run:")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); } else { long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - resp.replyPrefixed(text("There is an active profiler currently running, due to timeout in " + timeoutDiff + " seconds.")); + resp.replyPrefixed(text("It is due to complete automatically and upload results in " + timeoutDiff + " seconds.")); } - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; - resp.replyPrefixed(text("It has been profiling for " + runningTime + " seconds so far.")); + resp.replyPrefixed(text("To cancel the profiler without uploading the results, run:")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --cancel")); } } @@ -280,7 +287,7 @@ public class SamplerModule implements CommandModule { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { close(); - resp.broadcastPrefixed(text("The active profiler has been cancelled.", GOLD)); + resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD)); } } @@ -289,11 +296,17 @@ public class SamplerModule implements CommandModule { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { this.activeSampler.stop(); - resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results...")); + + boolean saveToFile = arguments.boolFlag("save-to-file"); + if (saveToFile) { + resp.broadcastPrefixed(text("Stopping the profiler & saving results, please wait...")); + } else { + resp.broadcastPrefixed(text("Stopping the profiler & uploading results, please wait...")); + } + String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); - boolean saveToFile = arguments.boolFlag("save-to-file"); handleUpload(platform, resp, this.activeSampler, comment, mergeMode, saveToFile); this.activeSampler = null; } @@ -310,7 +323,7 @@ public class SamplerModule implements CommandModule { String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); String url = platform.getViewerUrl() + key; - resp.broadcastPrefixed(text("Profiler results:", GOLD)); + resp.broadcastPrefixed(text("Profiler stopped & upload complete!", GOLD)); resp.broadcast(text() .content(url) .color(GRAY) @@ -331,13 +344,9 @@ public class SamplerModule implements CommandModule { try { Files.write(file, output.toByteArray()); - resp.broadcastPrefixed(text() - .content("Profile written to: ") - .color(GOLD) - .append(text(file.toString(), GRAY)) - .build() - ); - resp.broadcastPrefixed(text("You can read the profile file using the viewer web-app - " + platform.getViewerUrl(), GRAY)); + resp.broadcastPrefixed(text("Profiler stopped & save complete!", GOLD)); + resp.broadcastPrefixed(text("Data has been written to: " + file)); + resp.broadcastPrefixed(text("You can view the profile file using the web app @ " + platform.getViewerUrl(), GRAY)); platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString())); } catch (IOException e) { @@ -346,4 +355,16 @@ public class SamplerModule implements CommandModule { } } } + + private static Component cmdPrompt(String cmd) { + return text() + .append(text(" ")) + .append(text() + .content(cmd) + .color(WHITE) + .clickEvent(ClickEvent.runCommand(cmd)) + .build() + ) + .build(); + } } -- cgit From 1d8e0f3a0a115a70146c1462a68990508a71af6e Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 09:39:33 +0000 Subject: Configurable max stack depth --- .../src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index fe1afcd..e6f6cf5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -37,7 +37,7 @@ import java.util.stream.IntStream; */ public abstract class AbstractNode { - protected static final int MAX_STACK_DEPTH = 300; + protected static final int MAX_STACK_DEPTH = Integer.getInteger("spark.maxStackDepth", 300); /** A map of the nodes children */ private final Map children = new ConcurrentHashMap<>(); -- cgit From 5af2e6fb4cbd21f836c7ad56100b3c4535a831de Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 12:30:59 +0000 Subject: Remove recursion in protobuf data --- .../spark/common/sampler/node/StackTraceNode.java | 6 +- .../spark/common/sampler/node/ThreadNode.java | 81 +++++++++++++++++++++- .../spark/common/util/IndexedListBuilder.java | 43 ++++++++++++ .../src/main/proto/spark/spark_sampler.proto | 7 +- 4 files changed, 129 insertions(+), 8 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index ed938d5..c0dcc5b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -65,7 +65,7 @@ public final class StackTraceNode extends AbstractNode { return this.description.parentLineNumber; } - public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { + public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder, Iterable childrenRefs) { SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder() .setClassName(this.description.className) .setMethodName(this.description.methodName); @@ -91,9 +91,7 @@ public final class StackTraceNode extends AbstractNode { .ifPresent(proto::setMethodDesc); } - for (StackTraceNode child : exportChildren(mergeMode)) { - proto.addChildren(child.toProto(mergeMode, timeEncoder)); - } + proto.addAllChildrenRefs(childrenRefs); return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 1dce523..9faece6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -21,8 +21,14 @@ package me.lucko.spark.common.sampler.node; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.util.IndexedListBuilder; import me.lucko.spark.proto.SparkSamplerProtos; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.LinkedList; +import java.util.List; + /** * The root of a sampling stack for a given thread / thread group. */ @@ -92,10 +98,83 @@ public final class ThreadNode extends AbstractNode { proto.addTimes(time); } + // When converting to a proto, we change the data structure from a recursive tree to an array. + // Effectively, instead of: + // + // { + // data: 'one', + // children: [ + // { + // data: 'two', + // children: [{ data: 'four' }] + // }, + // { data: 'three' } + // ] + // } + // + // we transmit: + // + // [ + // { data: 'one', children: [1, 2] }, + // { data: 'two', children: [3] } + // { data: 'three', children: [] } + // { data: 'four', children: [] } + // ] + // + + // the flattened array of nodes + IndexedListBuilder nodesArray = new IndexedListBuilder<>(); + + // Perform a depth-first post order traversal of the tree + Deque stack = new ArrayDeque<>(); + + // push the thread node's children to the stack + List childrenRefs = new LinkedList<>(); for (StackTraceNode child : exportChildren(mergeMode)) { - proto.addChildren(child.toProto(mergeMode, timeEncoder)); + stack.push(new Node(child, childrenRefs)); + } + + Node node; + while (!stack.isEmpty()) { + node = stack.peek(); + + // on the first visit, just push this node's children and leave it on the stack + if (node.firstVisit) { + for (StackTraceNode child : node.stackTraceNode.exportChildren(mergeMode)) { + stack.push(new Node(child, node.childrenRefs)); + } + node.firstVisit = false; + continue; + } + + // convert StackTraceNode to a proto + // - at this stage, we have already visited this node's children + // - the refs for each child are stored in node.childrenRefs + SparkSamplerProtos.StackTraceNode childProto = node.stackTraceNode.toProto(mergeMode, timeEncoder, node.childrenRefs); + + // add the child proto to the nodes array, and record the ref in the parent + int childIndex = nodesArray.add(childProto); + node.parentChildrenRefs.add(childIndex); + + // pop from the stack + stack.pop(); } + proto.addAllChildrenRefs(childrenRefs); + proto.addAllChildren(nodesArray.build()); + return proto.build(); } + + private static final class Node { + private final StackTraceNode stackTraceNode; + private boolean firstVisit = true; + private final List childrenRefs = new LinkedList<>(); + private final List parentChildrenRefs; + + private Node(StackTraceNode node, List parentChildrenRefs) { + this.stackTraceNode = node; + this.parentChildrenRefs = parentChildrenRefs; + } + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java new file mode 100644 index 0000000..b2315f9 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java @@ -0,0 +1,43 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util; + +import java.util.ArrayList; +import java.util.List; + +/** + * List builder that returns the index of the inserted element. + * + * @param generic type + */ +public class IndexedListBuilder { + private int i = 0; + private final List nodes = new ArrayList<>(); + + public int add(T node) { + this.nodes.add(node); + return this.i++; + } + + public List build() { + return this.nodes; + } +} diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index 2cb08f1..245da37 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -78,18 +78,19 @@ message ThreadNode { repeated StackTraceNode children = 3; repeated double times = 4; + repeated int32 children_refs = 5; } message StackTraceNode { // replaced - reserved 1; - reserved "time"; + reserved 1, 2; + reserved "time", "children"; - repeated StackTraceNode children = 2; string class_name = 3; string method_name = 4; int32 parent_line_number = 5; // optional int32 line_number = 6; // optional string method_desc = 7; // optional repeated double times = 8; + repeated int32 children_refs = 9; } -- cgit From 76f43ab59d3839600bd9e040ff2d09199ebe778a Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 19:15:54 +0000 Subject: Limit profile length to 1 hour --- .../java/me/lucko/spark/common/SparkPlatform.java | 8 +++ .../common/command/modules/SamplerModule.java | 50 ++++++-------- .../lucko/spark/common/sampler/SamplerBuilder.java | 23 +++++-- .../spark/common/sampler/SamplerContainer.java | 76 ++++++++++++++++++++++ .../sampler/aggregator/AbstractDataAggregator.java | 6 ++ .../common/sampler/aggregator/DataAggregator.java | 8 +++ .../spark/common/sampler/async/AsyncSampler.java | 14 +++- .../spark/common/sampler/java/JavaSampler.java | 9 ++- .../spark/common/sampler/node/AbstractNode.java | 28 +++++--- .../spark/common/sampler/node/ThreadNode.java | 44 +++++++++++++ .../sampler/window/ProfilingWindowUtils.java | 38 ++++++++++- .../common/sampler/window/ProtoTimeEncoder.java | 7 +- 12 files changed, 254 insertions(+), 57 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 4c3875c..a015e42 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -45,6 +45,7 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.PlatformStatisticsProvider; +import me.lucko.spark.common.sampler.SamplerContainer; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -98,6 +99,7 @@ public class SparkPlatform { private final List commands; private final ReentrantLock commandExecuteLock = new ReentrantLock(true); private final ActivityLog activityLog; + private final SamplerContainer samplerContainer; private final TickHook tickHook; private final TickReporter tickReporter; private final TickStatistics tickStatistics; @@ -137,6 +139,8 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); + this.samplerContainer = new SamplerContainer(); + this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null; @@ -229,6 +233,10 @@ public class SparkPlatform { return this.activityLog; } + public SamplerContainer getSamplerContainer() { + return this.samplerContainer; + } + public TickHook getTickHook() { return this.tickHook; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 6dbf913..00bf1a9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -68,17 +68,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.WHITE; public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; - /** The sampler instance currently running, if any */ - private Sampler activeSampler = null; - - @Override - public void close() { - if (this.activeSampler != null) { - this.activeSampler.stop(); - this.activeSampler = null; - } - } - @Override public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() @@ -121,7 +110,7 @@ public class SamplerModule implements CommandModule { } if (arguments.boolFlag("cancel")) { - profilerCancel(resp); + profilerCancel(platform, resp); return; } @@ -134,7 +123,7 @@ public class SamplerModule implements CommandModule { } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (this.activeSampler != null) { + if (platform.getSamplerContainer().getActiveSampler() != null) { profilerInfo(platform, resp); return; } @@ -210,7 +199,8 @@ public class SamplerModule implements CommandModule { if (ticksOver != -1) { builder.ticksOver(ticksOver, tickHook); } - Sampler sampler = this.activeSampler = builder.start(platform); + Sampler sampler = builder.start(platform); + platform.getSamplerContainer().setActiveSampler(sampler); resp.broadcastPrefixed(text() .append(text("Profiler is now running!", GOLD)) @@ -227,7 +217,7 @@ public class SamplerModule implements CommandModule { resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } - CompletableFuture future = this.activeSampler.getFuture(); + CompletableFuture future = sampler.getFuture(); // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { @@ -238,11 +228,7 @@ public class SamplerModule implements CommandModule { }); // set activeSampler to null when complete. - future.whenCompleteAsync((s, throwable) -> { - if (sampler == this.activeSampler) { - this.activeSampler = null; - } - }); + sampler.getFuture().whenCompleteAsync((s, throwable) -> platform.getSamplerContainer().unsetActiveSampler(s)); // await the result if (timeoutSeconds != -1) { @@ -258,17 +244,18 @@ public class SamplerModule implements CommandModule { } private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) { - if (this.activeSampler == null) { + Sampler sampler = platform.getSamplerContainer().getActiveSampler(); + if (sampler == null) { resp.replyPrefixed(text("The profiler isn't running!")); resp.replyPrefixed(text("To start a new one, run:")); resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler")); } else { resp.replyPrefixed(text("Profiler is already running!", GOLD)); - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + long runningTime = (System.currentTimeMillis() - sampler.getStartTime()) / 1000L; resp.replyPrefixed(text("So far, it has profiled for " + runningTime + " seconds.")); - long timeout = this.activeSampler.getAutoEndTime(); + long timeout = sampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("To stop the profiler and upload the results, run:")); resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); @@ -282,20 +269,24 @@ public class SamplerModule implements CommandModule { } } - private void profilerCancel(CommandResponseHandler resp) { - if (this.activeSampler == null) { + private void profilerCancel(SparkPlatform platform, CommandResponseHandler resp) { + Sampler sampler = platform.getSamplerContainer().getActiveSampler(); + if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - close(); + platform.getSamplerContainer().stopActiveSampler(); resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD)); } } private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (this.activeSampler == null) { + Sampler sampler = platform.getSamplerContainer().getActiveSampler(); + + if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - this.activeSampler.stop(); + platform.getSamplerContainer().unsetActiveSampler(sampler); + sampler.stop(); boolean saveToFile = arguments.boolFlag("save-to-file"); if (saveToFile) { @@ -307,8 +298,7 @@ public class SamplerModule implements CommandModule { String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); - handleUpload(platform, resp, this.activeSampler, comment, mergeMode, saveToFile); - this.activeSampler = null; + handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 52a7387..382950a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -93,15 +93,28 @@ public class SamplerBuilder { } public Sampler start(SparkPlatform platform) { + boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null; + boolean canUseAsyncProfiler = this.useAsyncProfiler && + !(this.ignoreSleeping || this.ignoreNative) && + !(this.threadDumper instanceof ThreadDumper.Regex) && + AsyncProfilerAccess.getInstance(platform).checkSupported(platform); + + int intervalMicros = (int) (this.samplingInterval * 1000d); Sampler sampler; - if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); - } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.getInstance(platform).checkSupported(platform)) { - sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); + if (onlyTicksOverMode) { + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, + this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, + this.tickHook, this.ticksOver); + + } else if (canUseAsyncProfiler) { + sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, + this.threadGrouper, this.timeout); + } else { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, + this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java new file mode 100644 index 0000000..55913d8 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -0,0 +1,76 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import java.util.concurrent.atomic.AtomicReference; + +/** + * Container for the active sampler. + */ +public class SamplerContainer implements AutoCloseable { + + private final AtomicReference activeSampler = new AtomicReference<>(); + + /** + * Gets the active sampler, or null if a sampler is not active. + * + * @return the active sampler + */ + public Sampler getActiveSampler() { + return this.activeSampler.get(); + } + + /** + * Sets the active sampler, throwing an exception if another sampler is already active. + * + * @param sampler the sampler + */ + public void setActiveSampler(Sampler sampler) { + if (!this.activeSampler.compareAndSet(null, sampler)) { + throw new IllegalStateException("Attempted to set active sampler when another was already active!"); + } + } + + /** + * Unsets the active sampler, if the provided sampler is active. + * + * @param sampler the sampler + */ + public void unsetActiveSampler(Sampler sampler) { + this.activeSampler.compareAndSet(sampler, null); + } + + /** + * Stops the active sampler, if there is one. + */ + public void stopActiveSampler() { + Sampler sampler = this.activeSampler.getAndSet(null); + if (sampler != null) { + sampler.stop(); + } + } + + @Override + public void close() { + stopActiveSampler(); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index ad9dee4..2c003e5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.IntPredicate; /** * Abstract implementation of {@link DataAggregator}. @@ -51,6 +52,11 @@ public abstract class AbstractDataAggregator implements DataAggregator { return this.threadData.computeIfAbsent(group, ThreadNode::new); } + @Override + public void pruneData(IntPredicate timeWindowPredicate) { + this.threadData.values().removeIf(node -> node.removeTimeWindowsRecursively(timeWindowPredicate)); + } + @Override public List exportData() { List data = new ArrayList<>(this.threadData.values()); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java index 5590a96..ed33204 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java @@ -24,6 +24,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; import java.util.List; +import java.util.function.IntPredicate; /** * Aggregates sampling data. @@ -37,6 +38,13 @@ public interface DataAggregator { */ List exportData(); + /** + * Prunes windows of data from this aggregator if the given {@code timeWindowPredicate} returns true. + * + * @param timeWindowPredicate the predicate + */ + void pruneData(IntPredicate timeWindowPredicate); + /** * Gets metadata about the data aggregator instance. */ diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 2c9bb5f..cbc81c7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -76,15 +76,20 @@ public class AsyncSampler extends AbstractSampler { this.windowStatisticsCollector.startCountingTicks(tickHook); } - int window = ProfilingWindowUtils.unixMillisToWindow(System.currentTimeMillis()); + int window = ProfilingWindowUtils.windowNow(); AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob(); job.init(this.platform, this.interval, this.threadDumper, window); job.start(); this.currentJob = job; - // rotate the sampler job every minute to put data into a new window - this.scheduler.scheduleAtFixedRate(this::rotateProfilerJob, 1, 1, TimeUnit.MINUTES); + // rotate the sampler job to put data into a new window + this.scheduler.scheduleAtFixedRate( + this::rotateProfilerJob, + ProfilingWindowUtils.WINDOW_SIZE_SECONDS, + ProfilingWindowUtils.WINDOW_SIZE_SECONDS, + TimeUnit.SECONDS + ); recordInitialGcStats(); scheduleTimeout(); @@ -117,6 +122,9 @@ public class AsyncSampler extends AbstractSampler { // aggregate the output of the previous job previousJob.aggregate(this.dataAggregator); + + // prune data older than the history size + this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(window)); } } catch (Throwable e) { e.printStackTrace(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 8c96fd3..6aad5e3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -34,8 +34,6 @@ import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import org.checkerframework.checker.units.qual.A; - import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; @@ -145,10 +143,15 @@ public class JavaSampler extends AbstractSampler implements Runnable { JavaSampler.this.dataAggregator.insertData(threadInfo, this.window); } - // if we have just stepped over into a new window, collect statistics for the previous window + // if we have just stepped over into a new window... int previousWindow = JavaSampler.this.lastWindow.getAndSet(this.window); if (previousWindow != 0 && previousWindow != this.window) { + + // collect statistics for the previous window JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow); + + // prune data older than the history size + JavaSampler.this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(this.window)); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index e6f6cf5..2e4b055 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -20,17 +20,18 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.async.jfr.Dictionary; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.LongAdder; -import java.util.stream.IntStream; +import java.util.function.IntPredicate; /** * Encapsulates a timed node in the sampling stack. @@ -43,9 +44,9 @@ public abstract class AbstractNode { private final Map children = new ConcurrentHashMap<>(); /** The accumulated sample time for this node, measured in microseconds */ - // long key = the window (effectively System.currentTimeMillis() / 60_000) + // Integer key = the window (effectively System.currentTimeMillis() / 60_000) // LongAdder value = accumulated time in microseconds - private final Dictionary times = new Dictionary<>(); + private final Map times = new HashMap<>(); /** * Gets the time accumulator for a given window @@ -67,10 +68,18 @@ public abstract class AbstractNode { * * @return the time windows */ - public IntStream getTimeWindows() { - IntStream.Builder keys = IntStream.builder(); - this.times.forEach((key, value) -> keys.add((int) key)); - return keys.build(); + public Set getTimeWindows() { + return this.times.keySet(); + } + + /** + * Removes time windows from this node if they pass the given {@code predicate} test. + * + * @param predicate the predicate + * @return true if any time windows were removed + */ + public boolean removeTimeWindows(IntPredicate predicate) { + return this.times.keySet().removeIf(predicate::test); } /** @@ -100,7 +109,7 @@ public abstract class AbstractNode { * @param other the other node */ protected void merge(AbstractNode other) { - other.times.forEach((key, value) -> getTimeAccumulator((int) key).add(value.longValue())); + other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } @@ -127,7 +136,6 @@ public abstract class AbstractNode { list.add(child); } - //list.sort(null); return list; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 9faece6..5035046 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -25,9 +25,13 @@ import me.lucko.spark.common.util.IndexedListBuilder; import me.lucko.spark.proto.SparkSamplerProtos; import java.util.ArrayDeque; +import java.util.Collection; import java.util.Deque; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Queue; +import java.util.function.IntPredicate; /** * The root of a sampling stack for a given thread / thread group. @@ -89,6 +93,46 @@ public final class ThreadNode extends AbstractNode { } } + /** + * Removes time windows that match the given {@code predicate}. + * + * @param predicate the predicate to use to test the time windows + * @return true if this node is now empty + */ + public boolean removeTimeWindowsRecursively(IntPredicate predicate) { + Queue queue = new ArrayDeque<>(); + queue.add(this); + + while (!queue.isEmpty()) { + AbstractNode node = queue.remove(); + Collection children = node.getChildren(); + + boolean needToProcessChildren = false; + + for (Iterator it = children.iterator(); it.hasNext(); ) { + StackTraceNode child = it.next(); + + boolean windowsWereRemoved = child.removeTimeWindows(predicate); + boolean childIsNowEmpty = child.getTimeWindows().isEmpty(); + + if (childIsNowEmpty) { + it.remove(); + continue; + } + + if (windowsWereRemoved) { + needToProcessChildren = true; + } + } + + if (needToProcessChildren) { + queue.addAll(children); + } + } + + return getTimeWindows().isEmpty(); + } + public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() .setName(getThreadLabel()); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java index 109adb3..be6f08a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java @@ -20,9 +20,25 @@ package me.lucko.spark.common.sampler.window; +import me.lucko.spark.common.sampler.aggregator.DataAggregator; + +import java.util.function.IntPredicate; + public enum ProfilingWindowUtils { ; + /** + * The size/duration of a profiling window in seconds. + * (1 window = 1 minute) + */ + public static final int WINDOW_SIZE_SECONDS = 60; + + /** + * The number of windows to record in continuous profiling before data is dropped. + * (60 windows * 1 minute = 1 hour of profiling data) + */ + public static final int HISTORY_SIZE = Integer.getInteger("spark.continuousProfilingHistorySize", 60); + /** * Gets the profiling window for the given time in unix-millis. * @@ -30,7 +46,25 @@ public enum ProfilingWindowUtils { * @return the window */ public static int unixMillisToWindow(long time) { - // one window per minute - return (int) (time / 60_000); + return (int) (time / (WINDOW_SIZE_SECONDS * 1000L)); + } + + /** + * Gets the window at the current time. + * + * @return the window + */ + public static int windowNow() { + return unixMillisToWindow(System.currentTimeMillis()); + } + + /** + * Gets a prune predicate that can be passed to {@link DataAggregator#pruneData(IntPredicate)}. + * + * @return the prune predicate + */ + public static IntPredicate keepHistoryBefore(int currentWindow) { + // windows that were earlier than (currentWindow minus history size) should be pruned + return window -> window < (currentWindow - HISTORY_SIZE); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java index edb2309..03da075 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.sampler.window; import me.lucko.spark.common.sampler.async.jfr.Dictionary; -import me.lucko.spark.common.sampler.node.AbstractNode; import me.lucko.spark.common.sampler.node.ThreadNode; import java.util.HashMap; @@ -42,7 +41,7 @@ public class ProtoTimeEncoder { public ProtoTimeEncoder(List sourceData) { // get an array of all keys that show up in the source data this.keys = sourceData.stream() - .map(AbstractNode::getTimeWindows) + .map(n -> n.getTimeWindows().stream().mapToInt(i -> i)) .reduce(IntStream.empty(), IntStream::concat) .distinct() .sorted() @@ -70,14 +69,14 @@ public class ProtoTimeEncoder { * @param times a dictionary of times (unix-time millis -> duration in microseconds) * @return the times encoded as a double array */ - public double[] encode(Dictionary times) { + public double[] encode(Map times) { // construct an array of values - length needs to exactly match the // number of keys, even if some values are zero. double[] array = new double[this.keys.length]; times.forEach((key, value) -> { // get the index for the given key - Integer idx = this.keysToIndex.get((int) key); + Integer idx = this.keysToIndex.get(key); if (idx == null) { throw new RuntimeException("No index for key " + key + " in " + this.keysToIndex.keySet()); } -- cgit From f2d77d875f32f107987c93da1f90529fc6812444 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 21:24:57 +0000 Subject: Background profiler --- .../java/me/lucko/spark/common/SparkPlatform.java | 82 ++++++++++----- .../me/lucko/spark/common/command/Arguments.java | 11 +- .../me/lucko/spark/common/command/Command.java | 58 ++++++++++- .../common/command/modules/GcMonitoringModule.java | 22 +--- .../common/command/modules/SamplerModule.java | 115 +++++++++++++++------ .../spark/common/sampler/AbstractSampler.java | 19 ++-- .../me/lucko/spark/common/sampler/Sampler.java | 7 ++ .../lucko/spark/common/sampler/SamplerBuilder.java | 28 ++--- .../spark/common/sampler/SamplerContainer.java | 9 ++ .../spark/common/sampler/SamplerSettings.java | 61 +++++++++++ .../spark/common/sampler/async/AsyncSampler.java | 14 +-- .../spark/common/sampler/java/JavaSampler.java | 20 ++-- .../spark/common/sampler/node/ThreadNode.java | 1 + .../sampler/window/WindowStatisticsCollector.java | 5 + .../me/lucko/spark/common/util/Configuration.java | 10 ++ .../me/lucko/spark/common/util/FormatUtil.java | 20 ++++ 16 files changed, 362 insertions(+), 120 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index a015e42..5461ed4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -45,7 +45,10 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.PlatformStatisticsProvider; +import me.lucko.spark.common.sampler.Sampler; +import me.lucko.spark.common.sampler.SamplerBuilder; import me.lucko.spark.common.sampler.SamplerContainer; +import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -64,6 +67,7 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -74,7 +78,6 @@ import java.util.stream.Collectors; import static net.kyori.adventure.text.Component.space; import static net.kyori.adventure.text.Component.text; -import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; @@ -139,7 +142,7 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); - this.samplerContainer = new SamplerContainer(); + this.samplerContainer = new SamplerContainer(this.configuration.getBoolean("backgroundProfiler", true)); this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); @@ -179,6 +182,16 @@ public class SparkPlatform { SparkApi api = new SparkApi(this); this.plugin.registerApi(api); SparkApi.register(api); + + if (this.samplerContainer.isBackgroundProfilerEnabled()) { + this.plugin.log(Level.INFO, "Starting background profiler..."); + try { + startBackgroundProfiler(); + this.plugin.log(Level.INFO, "... done!"); + } catch (Exception e) { + e.printStackTrace(); + } + } } public void disable() { @@ -196,6 +209,8 @@ public class SparkPlatform { module.close(); } + this.samplerContainer.close(); + SparkApi.unregister(); this.temporaryFiles.deleteTemporaryFiles(); @@ -269,6 +284,17 @@ public class SparkPlatform { return this.serverNormalOperationStartTime; } + public void startBackgroundProfiler() { + Sampler sampler = new SamplerBuilder() + .background(true) + .threadDumper(this.plugin.getDefaultThreadDumper()) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(this.configuration.getInteger("backgroundProfilerInterval", 10)) + .start(this); + + this.samplerContainer.setActiveSampler(sampler); + } + public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { @@ -394,7 +420,7 @@ public class SparkPlatform { if (command.aliases().contains(alias)) { resp.setCommandPrimaryAlias(command.primaryAlias()); try { - command.executor().execute(this, sender, resp, new Arguments(rawArgs)); + command.executor().execute(this, sender, resp, new Arguments(rawArgs, command.allowSubCommand())); } catch (Arguments.ParseException e) { resp.replyPrefixed(text(e.getMessage(), RED)); } @@ -442,32 +468,38 @@ public class SparkPlatform { ); for (Command command : commands) { String usage = "/" + getPlugin().getCommandName() + " " + command.primaryAlias(); - ClickEvent clickEvent = ClickEvent.suggestCommand(usage); - sender.reply(text() - .append(text(">", GOLD, BOLD)) - .append(space()) - .append(text().content(usage).color(GRAY).clickEvent(clickEvent).build()) - .build() - ); - for (Command.ArgumentInfo arg : command.arguments()) { - if (arg.requiresParameter()) { + + if (command.allowSubCommand()) { + Map> argumentsBySubCommand = command.arguments().stream() + .collect(Collectors.groupingBy(Command.ArgumentInfo::subCommandName, LinkedHashMap::new, Collectors.toList())); + + argumentsBySubCommand.forEach((subCommand, arguments) -> { + String subCommandUsage = usage + " " + subCommand; + sender.reply(text() - .content(" ") - .append(text("[", DARK_GRAY)) - .append(text("--" + arg.argumentName(), GRAY)) + .append(text(">", GOLD, BOLD)) .append(space()) - .append(text("<" + arg.parameterDescription() + ">", DARK_GRAY)) - .append(text("]", DARK_GRAY)) - .build() - ); - } else { - sender.reply(text() - .content(" ") - .append(text("[", DARK_GRAY)) - .append(text("--" + arg.argumentName(), GRAY)) - .append(text("]", DARK_GRAY)) + .append(text().content(subCommandUsage).color(GRAY).clickEvent(ClickEvent.suggestCommand(subCommandUsage)).build()) .build() ); + + for (Command.ArgumentInfo arg : arguments) { + if (arg.argumentName().isEmpty()) { + continue; + } + sender.reply(arg.toComponent(" ")); + } + }); + } else { + sender.reply(text() + .append(text(">", GOLD, BOLD)) + .append(space()) + .append(text().content(usage).color(GRAY).clickEvent(ClickEvent.suggestCommand(usage)).build()) + .build() + ); + + for (Command.ArgumentInfo arg : command.arguments()) { + sender.reply(arg.toComponent(" ")); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java index 17c49e2..ad8c777 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java @@ -38,8 +38,9 @@ public class Arguments { private final List rawArgs; private final SetMultimap parsedArgs; + private String parsedSubCommand = null; - public Arguments(List rawArgs) { + public Arguments(List rawArgs, boolean allowSubCommand) { this.rawArgs = rawArgs; this.parsedArgs = HashMultimap.create(); @@ -52,7 +53,9 @@ public class Arguments { Matcher matcher = FLAG_REGEX.matcher(arg); boolean matches = matcher.matches(); - if (flag == null || matches) { + if (i == 0 && allowSubCommand && !matches) { + this.parsedSubCommand = arg; + } else if (flag == null || matches) { if (!matches) { throw new ParseException("Expected flag at position " + i + " but got '" + arg + "' instead!"); } @@ -80,6 +83,10 @@ public class Arguments { return this.rawArgs; } + public String subCommand() { + return this.parsedSubCommand; + } + public int intFlag(String key) { Iterator it = this.parsedArgs.get(key).iterator(); if (it.hasNext()) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java index dad15e6..c6871a9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java @@ -25,10 +25,17 @@ import com.google.common.collect.ImmutableList; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; +import net.kyori.adventure.text.Component; + import java.util.Collections; import java.util.List; import java.util.Objects; +import static net.kyori.adventure.text.Component.space; +import static net.kyori.adventure.text.Component.text; +import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; +import static net.kyori.adventure.text.format.NamedTextColor.GRAY; + public class Command { public static Builder builder() { @@ -39,12 +46,14 @@ public class Command { private final List arguments; private final Executor executor; private final TabCompleter tabCompleter; + private final boolean allowSubCommand; - private Command(List aliases, List arguments, Executor executor, TabCompleter tabCompleter) { + private Command(List aliases, List arguments, Executor executor, TabCompleter tabCompleter, boolean allowSubCommand) { this.aliases = aliases; this.arguments = arguments; this.executor = executor; this.tabCompleter = tabCompleter; + this.allowSubCommand = allowSubCommand; } public List aliases() { @@ -67,11 +76,16 @@ public class Command { return this.aliases.get(0); } + public boolean allowSubCommand() { + return this.allowSubCommand; + } + public static final class Builder { private final ImmutableList.Builder aliases = ImmutableList.builder(); private final ImmutableList.Builder arguments = ImmutableList.builder(); private Executor executor = null; private TabCompleter tabCompleter = null; + private boolean allowSubCommand = false; Builder() { @@ -82,8 +96,13 @@ public class Command { return this; } + public Builder argumentUsage(String subCommandName, String argumentName, String parameterDescription) { + this.arguments.add(new ArgumentInfo(subCommandName, argumentName, parameterDescription)); + return this; + } + public Builder argumentUsage(String argumentName, String parameterDescription) { - this.arguments.add(new ArgumentInfo(argumentName, parameterDescription)); + this.arguments.add(new ArgumentInfo("", argumentName, parameterDescription)); return this; } @@ -97,6 +116,11 @@ public class Command { return this; } + public Builder allowSubCommand(boolean allowSubCommand) { + this.allowSubCommand = allowSubCommand; + return this; + } + public Command build() { List aliases = this.aliases.build(); if (aliases.isEmpty()) { @@ -108,7 +132,7 @@ public class Command { if (this.tabCompleter == null) { this.tabCompleter = TabCompleter.empty(); } - return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter); + return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter, this.allowSubCommand); } } @@ -127,14 +151,20 @@ public class Command { } public static final class ArgumentInfo { + private final String subCommandName; private final String argumentName; private final String parameterDescription; - public ArgumentInfo(String argumentName, String parameterDescription) { + public ArgumentInfo(String subCommandName, String argumentName, String parameterDescription) { + this.subCommandName = subCommandName; this.argumentName = argumentName; this.parameterDescription = parameterDescription; } + public String subCommandName() { + return this.subCommandName; + } + public String argumentName() { return this.argumentName; } @@ -146,6 +176,26 @@ public class Command { public boolean requiresParameter() { return this.parameterDescription != null; } + + public Component toComponent(String padding) { + if (requiresParameter()) { + return text() + .content(padding) + .append(text("[", DARK_GRAY)) + .append(text("--" + argumentName(), GRAY)) + .append(space()) + .append(text("<" + parameterDescription() + ">", DARK_GRAY)) + .append(text("]", DARK_GRAY)) + .build(); + } else { + return text() + .content(padding) + .append(text("[", DARK_GRAY)) + .append(text("--" + argumentName(), GRAY)) + .append(text("]", DARK_GRAY)) + .build(); + } + } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java index 2ce83fd..a2da0a0 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java @@ -123,7 +123,7 @@ public class GcMonitoringModule implements CommandModule { ); report.add(text() .content(" ") - .append(text(formatTime((long) averageFrequency), WHITE)) + .append(text(FormatUtil.formatSeconds((long) averageFrequency / 1000), WHITE)) .append(text(" avg frequency", GRAY)) .build() ); @@ -153,26 +153,6 @@ public class GcMonitoringModule implements CommandModule { ); } - private static String formatTime(long millis) { - if (millis <= 0) { - return "0s"; - } - - long second = millis / 1000; - long minute = second / 60; - second = second % 60; - - StringBuilder sb = new StringBuilder(); - if (minute != 0) { - sb.append(minute).append("m "); - } - if (second != 0) { - sb.append(second).append("s "); - } - - return sb.toString().trim(); - } - private static class ReportingGcMonitor extends GarbageCollectionMonitor implements GarbageCollectionMonitor.Listener { private final SparkPlatform platform; private final CommandResponseHandler resp; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 00bf1a9..6a76748 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -39,6 +39,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; @@ -72,31 +73,36 @@ public class SamplerModule implements CommandModule { public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() .aliases("profiler", "sampler") - .argumentUsage("info", null) - .argumentUsage("stop", null) - .argumentUsage("timeout", "timeout seconds") - .argumentUsage("thread *", null) - .argumentUsage("thread", "thread name") - .argumentUsage("only-ticks-over", "tick length millis") - .argumentUsage("interval", "interval millis") + .allowSubCommand(true) + .argumentUsage("info", "", null) + .argumentUsage("start", "timeout", "timeout seconds") + .argumentUsage("start", "thread *", null) + .argumentUsage("start", "thread", "thread name") + .argumentUsage("start", "only-ticks-over", "tick length millis") + .argumentUsage("start", "interval", "interval millis") + .argumentUsage("stop", "", null) + .argumentUsage("cancel", "", null) .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { - if (arguments.contains("--info") || arguments.contains("--cancel")) { - return Collections.emptyList(); + List opts = Collections.emptyList(); + + if (arguments.size() > 0) { + String subCommand = arguments.get(0); + if (subCommand.equals("stop") || subCommand.equals("upload")) { + opts = new ArrayList<>(Arrays.asList("--comment", "--save-to-file")); + opts.removeAll(arguments); + } + if (subCommand.equals("start")) { + opts = new ArrayList<>(Arrays.asList("--timeout", "--regex", "--combine-all", + "--not-combined", "--interval", "--only-ticks-over", "--force-java-sampler")); + opts.removeAll(arguments); + opts.add("--thread"); // allowed multiple times + } } - if (arguments.contains("--stop") || arguments.contains("--upload")) { - return TabCompleter.completeForOpts(arguments, "--comment", "--save-to-file"); - } - - List opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel", - "--timeout", "--regex", "--combine-all", "--not-combined", "--interval", - "--only-ticks-over", "--force-java-sampler")); - opts.removeAll(arguments); - opts.add("--thread"); // allowed multiple times - return TabCompleter.create() - .from(0, CompletionSupplier.startsWith(opts)) + .at(0, CompletionSupplier.startsWith(Arrays.asList("info", "start", "stop", "cancel"))) + .from(1, CompletionSupplier.startsWith(opts)) .complete(arguments); }) .build() @@ -104,28 +110,48 @@ public class SamplerModule implements CommandModule { } private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (arguments.boolFlag("info")) { + String subCommand = arguments.subCommand() == null ? "" : arguments.subCommand(); + + if (subCommand.equals("info") || arguments.boolFlag("info")) { profilerInfo(platform, resp); return; } - if (arguments.boolFlag("cancel")) { + if (subCommand.equals("cancel") || arguments.boolFlag("cancel")) { profilerCancel(platform, resp); return; } - if (arguments.boolFlag("stop") || arguments.boolFlag("upload")) { + if (subCommand.equals("stop") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) { profilerStop(platform, sender, resp, arguments); return; } - profilerStart(platform, sender, resp, arguments); + if (subCommand.equals("start") || arguments.boolFlag("start")) { + profilerStart(platform, sender, resp, arguments); + return; + } + + if (arguments.raw().isEmpty()) { + profilerInfo(platform, resp); + } else { + profilerStart(platform, sender, resp, arguments); + } } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (platform.getSamplerContainer().getActiveSampler() != null) { - profilerInfo(platform, resp); - return; + Sampler previousSampler = platform.getSamplerContainer().getActiveSampler(); + if (previousSampler != null) { + if (previousSampler.isRunningInBackground()) { + // there is a background profiler running - stop that first + resp.replyPrefixed(text("Stopping the background profiler before starting... please wait")); + previousSampler.stop(); + platform.getSamplerContainer().unsetActiveSampler(previousSampler); + } else { + // there is a non-background profiler running - tell the user + profilerInfo(platform, resp); + return; + } } int timeoutSeconds = arguments.intFlag("timeout"); @@ -212,9 +238,9 @@ public class SamplerModule implements CommandModule { if (timeoutSeconds == -1) { resp.broadcastPrefixed(text("It will run in the background until it is stopped by an admin.")); resp.broadcastPrefixed(text("To stop the profiler and upload the results, run:")); - resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); + resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop")); } else { - resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); + resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + FormatUtil.formatSeconds(timeoutSeconds) + ".")); } CompletableFuture future = sampler.getFuture(); @@ -248,24 +274,34 @@ public class SamplerModule implements CommandModule { if (sampler == null) { resp.replyPrefixed(text("The profiler isn't running!")); resp.replyPrefixed(text("To start a new one, run:")); - resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler start")); } else { resp.replyPrefixed(text("Profiler is already running!", GOLD)); long runningTime = (System.currentTimeMillis() - sampler.getStartTime()) / 1000L; - resp.replyPrefixed(text("So far, it has profiled for " + runningTime + " seconds.")); + + if (sampler.isRunningInBackground()) { + resp.replyPrefixed(text() + .append(text("It was started ")) + .append(text("automatically", WHITE)) + .append(text(" when spark enabled and has been running in the background for " + FormatUtil.formatSeconds(runningTime) + ".")) + .build() + ); + } else { + resp.replyPrefixed(text("So far, it has profiled for " + FormatUtil.formatSeconds(runningTime) + ".")); + } long timeout = sampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("To stop the profiler and upload the results, run:")); - resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop")); } else { long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - resp.replyPrefixed(text("It is due to complete automatically and upload results in " + timeoutDiff + " seconds.")); + resp.replyPrefixed(text("It is due to complete automatically and upload results in " + FormatUtil.formatSeconds(timeoutDiff) + ".")); } resp.replyPrefixed(text("To cancel the profiler without uploading the results, run:")); - resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --cancel")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler cancel")); } } @@ -299,6 +335,17 @@ public class SamplerModule implements CommandModule { MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile); + + // if the previous sampler was running in the background, create a new one + if (platform.getSamplerContainer().isBackgroundProfilerEnabled()) { + platform.startBackgroundProfiler(); + + resp.broadcastPrefixed(text() + .append(text("Restarted the background profiler. ")) + .append(text("(If you don't want this to happen, run: /" + platform.getPlugin().getCommandName() + " profiler cancel)", DARK_GRAY)) + .build() + ); + } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index c650738..feefd66 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -32,8 +32,6 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; -import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -64,6 +62,9 @@ public abstract class AbstractSampler implements Sampler { /** The unix timestamp (in millis) when this sampler should automatically complete. */ protected final long autoEndTime; // -1 for nothing + /** If the sampler is running in the background */ + protected boolean background; + /** Collects statistics for each window in the sample */ protected final WindowStatisticsCollector windowStatisticsCollector; @@ -73,11 +74,12 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + protected AbstractSampler(SparkPlatform platform, SamplerSettings settings) { this.platform = platform; - this.interval = interval; - this.threadDumper = threadDumper; - this.autoEndTime = autoEndTime; + this.interval = settings.interval(); + this.threadDumper = settings.threadDumper(); + this.autoEndTime = settings.autoEndTime(); + this.background = settings.runningInBackground(); this.windowStatisticsCollector = new WindowStatisticsCollector(platform); } @@ -94,6 +96,11 @@ public abstract class AbstractSampler implements Sampler { return this.autoEndTime; } + @Override + public boolean isRunningInBackground() { + return this.background; + } + @Override public CompletableFuture getFuture() { return this.future; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index e06cba6..5d2026d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -57,6 +57,13 @@ public interface Sampler { */ long getAutoEndTime(); + /** + * If this sampler is running in the background. (wasn't started by a specific user) + * + * @return true if the sampler is running in the background + */ + boolean isRunningInBackground(); + /** * Gets a future to encapsulate the completion of the sampler * diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 382950a..ec635ef 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -38,7 +38,8 @@ public class SamplerBuilder { private boolean ignoreSleeping = false; private boolean ignoreNative = false; private boolean useAsyncProfiler = true; - private long timeout = -1; + private long autoEndTime = -1; + private boolean background = false; private ThreadDumper threadDumper = ThreadDumper.ALL; private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; @@ -57,7 +58,12 @@ public class SamplerBuilder { if (timeout <= 0) { throw new IllegalArgumentException("timeout > 0"); } - this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); + this.autoEndTime = System.currentTimeMillis() + unit.toMillis(timeout); + return this; + } + + public SamplerBuilder background(boolean background) { + this.background = background; return this; } @@ -95,26 +101,22 @@ public class SamplerBuilder { public Sampler start(SparkPlatform platform) { boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null; boolean canUseAsyncProfiler = this.useAsyncProfiler && + !onlyTicksOverMode && !(this.ignoreSleeping || this.ignoreNative) && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.getInstance(platform).checkSupported(platform); int intervalMicros = (int) (this.samplingInterval * 1000d); + SamplerSettings settings = new SamplerSettings(intervalMicros, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background); Sampler sampler; - if (onlyTicksOverMode) { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, - this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, - this.tickHook, this.ticksOver); - - } else if (canUseAsyncProfiler) { - sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, - this.threadGrouper, this.timeout); - + if (canUseAsyncProfiler) { + sampler = new AsyncSampler(platform, settings); + } else if (onlyTicksOverMode) { + sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); } else { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, - this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java index 55913d8..f56dee5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -28,6 +28,11 @@ import java.util.concurrent.atomic.AtomicReference; public class SamplerContainer implements AutoCloseable { private final AtomicReference activeSampler = new AtomicReference<>(); + private final boolean backgroundProfilerEnabled; + + public SamplerContainer(boolean backgroundProfilerEnabled) { + this.backgroundProfilerEnabled = backgroundProfilerEnabled; + } /** * Gets the active sampler, or null if a sampler is not active. @@ -68,6 +73,10 @@ public class SamplerContainer implements AutoCloseable { } } + public boolean isBackgroundProfilerEnabled() { + return this.backgroundProfilerEnabled; + } + @Override public void close() { stopActiveSampler(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java new file mode 100644 index 0000000..6e55a43 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +/** + * Base settings for all samplers + */ +public class SamplerSettings { + + private final int interval; + private final ThreadDumper threadDumper; + private final ThreadGrouper threadGrouper; + private final long autoEndTime; + private final boolean runningInBackground; + + public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground) { + this.interval = interval; + this.threadDumper = threadDumper; + this.threadGrouper = threadGrouper; + this.autoEndTime = autoEndTime; + this.runningInBackground = runningInBackground; + } + + public int interval() { + return this.interval; + } + + public ThreadDumper threadDumper() { + return this.threadDumper; + } + + public ThreadGrouper threadGrouper() { + return this.threadGrouper; + } + + public long autoEndTime() { + return this.autoEndTime; + } + + public boolean runningInBackground() { + return this.runningInBackground; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index cbc81c7..d6cfd4f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -25,8 +25,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.SamplerSettings; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; @@ -36,6 +35,7 @@ import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.IntPredicate; /** * A sampler implementation using async-profiler. @@ -55,10 +55,10 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for scheduling and management */ private ScheduledExecutorService scheduler; - public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(platform, interval, threadDumper, endTime); + public AsyncSampler(SparkPlatform platform, SamplerSettings settings) { + super(platform, settings); this.profilerAccess = AsyncProfilerAccess.getInstance(platform); - this.dataAggregator = new AsyncDataAggregator(threadGrouper); + this.dataAggregator = new AsyncDataAggregator(settings.threadGrouper()); this.scheduler = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-worker-thread").build() ); @@ -124,7 +124,9 @@ public class AsyncSampler extends AbstractSampler { previousJob.aggregate(this.dataAggregator); // prune data older than the history size - this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(window)); + IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(window); + this.dataAggregator.pruneData(predicate); + this.windowStatisticsCollector.pruneStatistics(predicate); } } catch (Throwable e) { e.printStackTrace(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 6aad5e3..95c3508 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -25,8 +25,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.SamplerSettings; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; @@ -42,6 +41,7 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.IntPredicate; /** * A sampler implementation using Java (WarmRoast). @@ -66,14 +66,14 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** The last window that was profiled */ private final AtomicInteger lastWindow = new AtomicInteger(); - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(platform, interval, threadDumper, endTime); - this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); + public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative) { + super(platform, settings); + this.dataAggregator = new SimpleDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative); } - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(platform, interval, threadDumper, endTime); - this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); + public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(platform, settings); + this.dataAggregator = new TickedDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @Override @@ -151,7 +151,9 @@ public class JavaSampler extends AbstractSampler implements Runnable { JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow); // prune data older than the history size - JavaSampler.this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(this.window)); + IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(this.window); + JavaSampler.this.dataAggregator.pruneData(predicate); + JavaSampler.this.windowStatisticsCollector.pruneStatistics(predicate); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 5035046..37ff359 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -130,6 +130,7 @@ public final class ThreadNode extends AbstractNode { } } + removeTimeWindows(predicate); return getTimeWindows().isEmpty(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java index 47f739d..7da62fa 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -30,6 +30,7 @@ import me.lucko.spark.proto.SparkProtos; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.IntPredicate; /** * Collects statistics for each profiling window. @@ -116,6 +117,10 @@ public class WindowStatisticsCollector { } } + public void pruneStatistics(IntPredicate predicate) { + this.stats.keySet().removeIf(predicate::test); + } + public Map export() { return this.stats; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java index 7588645..ce63878 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java @@ -67,4 +67,14 @@ public final class Configuration { return val.isBoolean() ? val.getAsBoolean() : def; } + public int getInteger(String path, int def) { + JsonElement el = this.root.get(path); + if (el == null || !el.isJsonPrimitive()) { + return def; + } + + JsonPrimitive val = el.getAsJsonPrimitive(); + return val.isBoolean() ? val.getAsInt() : def; + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java index c4a3d66..1ee3b0f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java @@ -62,4 +62,24 @@ public enum FormatUtil { .append(Component.text(unit)) .build(); } + + public static String formatSeconds(long seconds) { + if (seconds <= 0) { + return "0s"; + } + + long second = seconds; + long minute = second / 60; + second = second % 60; + + StringBuilder sb = new StringBuilder(); + if (minute != 0) { + sb.append(minute).append("m "); + } + if (second != 0) { + sb.append(second).append("s "); + } + + return sb.toString().trim(); + } } -- cgit From b9f0e49ed17a7c32f36f31141c02529359944d03 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 22:42:49 +0000 Subject: Add upload subcommand as per documentation oops --- .../main/java/me/lucko/spark/common/command/modules/SamplerModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 6a76748..00cd4fa 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -122,7 +122,7 @@ public class SamplerModule implements CommandModule { return; } - if (subCommand.equals("stop") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) { + if (subCommand.equals("stop") || subCommand.equals("upload") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) { profilerStop(platform, sender, resp, arguments); return; } -- cgit From 3b0564cc4b259f49a15aa6cdb2c4b8e35f5e26e0 Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 14 Nov 2022 19:54:58 +0000 Subject: Suppress profiler logs when running in background --- .../src/main/java/me/lucko/spark/common/SparkPlatform.java | 1 - .../me/lucko/spark/common/sampler/async/AsyncProfilerJob.java | 8 +++++++- .../java/me/lucko/spark/common/sampler/async/AsyncSampler.java | 4 ++-- 3 files changed, 9 insertions(+), 4 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 5461ed4..105b167 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -187,7 +187,6 @@ public class SparkPlatform { this.plugin.log(Level.INFO, "Starting background profiler..."); try { startBackgroundProfiler(); - this.plugin.log(Level.INFO, "... done!"); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index 7b123a7..8991e94 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -83,6 +83,8 @@ public class AsyncProfilerJob { private ThreadDumper threadDumper; /** The profiling window */ private int window; + /** If the profiler should run in quiet mode */ + private boolean quiet; /** The file used by async-profiler to output data */ private Path outputFile; @@ -116,11 +118,12 @@ public class AsyncProfilerJob { } // Initialise the job - public void init(SparkPlatform platform, int interval, ThreadDumper threadDumper, int window) { + public void init(SparkPlatform platform, int interval, ThreadDumper threadDumper, int window, boolean quiet) { this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; this.window = window; + this.quiet = quiet; } /** @@ -139,6 +142,9 @@ public class AsyncProfilerJob { // construct a command to send to async-profiler String command = "start,event=" + this.access.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); + if (this.quiet) { + command += ",loglevel=NONE"; + } if (this.threadDumper instanceof ThreadDumper.Specific) { command += ",filter"; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index d6cfd4f..f2e7191 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -79,7 +79,7 @@ public class AsyncSampler extends AbstractSampler { int window = ProfilingWindowUtils.windowNow(); AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob(); - job.init(this.platform, this.interval, this.threadDumper, window); + job.init(this.platform, this.interval, this.threadDumper, window, this.background); job.start(); this.currentJob = job; @@ -116,7 +116,7 @@ public class AsyncSampler extends AbstractSampler { // start a new job int window = previousJob.getWindow() + 1; AsyncProfilerJob newJob = this.profilerAccess.startNewProfilerJob(); - newJob.init(this.platform, this.interval, this.threadDumper, window); + newJob.init(this.platform, this.interval, this.threadDumper, window, this.background); newJob.start(); this.currentJob = newJob; -- cgit From ea4d78c0f2600e7593175ba7f3d35493e6c84869 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 16 Nov 2022 21:25:17 +0000 Subject: Remove recursive calls in class source visitor --- .../spark/common/sampler/source/ClassSourceLookup.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java index 66b41d2..ab63c00 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java @@ -36,10 +36,12 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.security.CodeSource; import java.security.ProtectionDomain; +import java.util.ArrayDeque; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.Queue; import java.util.function.Function; import java.util.stream.Collectors; @@ -265,8 +267,10 @@ public interface ClassSourceLookup { @Override public void visit(ThreadNode node) { - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); + Queue queue = new ArrayDeque<>(node.getChildren()); + for (StackTraceNode n = queue.poll(); n != null; n = queue.poll()) { + visitStackNode(n); + queue.addAll(n.getChildren()); } } @@ -288,11 +292,6 @@ public interface ClassSourceLookup { MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber()); this.lineSources.computeIfAbsent(methodCall, this.lookup::identify); } - - // recursively - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } } @Override -- cgit From 59e6a9814a357162409b096300dd69f7db58f8bf Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 16 Nov 2022 21:26:05 +0000 Subject: Fix NPE from server config provider call (#268) --- .../src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index feefd66..59e873c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -157,7 +157,9 @@ public abstract class AbstractSampler implements Sampler { try { ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); - metadata.putAllServerConfigurations(serverConfigProvider.export()); + if (serverConfigProvider != null) { + metadata.putAllServerConfigurations(serverConfigProvider.export()); + } } catch (Exception e) { e.printStackTrace(); } -- cgit From d12f13f7e3024c632f181473711290c88de8cfde Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 16 Nov 2022 21:35:14 +0000 Subject: Fix NPE from null thread name (#263) --- .../java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index 8991e94..1310e97 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -250,6 +250,10 @@ public class AsyncProfilerJob { } String threadName = reader.threads.get(sample.tid); + if (threadName == null) { + continue; + } + if (!threadFilter.test(threadName)) { continue; } -- cgit From e52ea7dbac9df3d610aef2ab3924fa9410d167e3 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 16 Nov 2022 21:41:04 +0000 Subject: Fix exception from zero thread id (#262) --- .../java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java | 2 +- .../main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index 1310e97..db1808c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -249,7 +249,7 @@ public class AsyncProfilerJob { duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time); } - String threadName = reader.threads.get(sample.tid); + String threadName = reader.threads.get((long) sample.tid); if (threadName == null) { continue; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java index e0cc4e9..ea4985e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java @@ -51,7 +51,7 @@ public class JfrReader implements Closeable { public final Dictionary types = new Dictionary<>(); public final Map typesByName = new HashMap<>(); - public final Dictionary threads = new Dictionary<>(); + public final Map threads = new HashMap<>(); // spark public final Dictionary classes = new Dictionary<>(); public final Dictionary symbols = new Dictionary<>(); public final Dictionary methods = new Dictionary<>(); @@ -324,7 +324,7 @@ public class JfrReader implements Closeable { } private void readThreads(boolean hasGroup) { - int count = threads.preallocate(getVarint()); + int count = getVarint(); //threads.preallocate(getVarint()); for (int i = 0; i < count; i++) { long id = getVarlong(); String osName = getString(); -- cgit From f5ed027fcda244f1a1fea7f7bc7f9f4324f95db9 Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 16 Nov 2022 22:20:50 +0000 Subject: Fix concurrency errors in java sampler (#267) --- .../src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java | 2 +- .../src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 95c3508..42a457d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -144,7 +144,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { } // if we have just stepped over into a new window... - int previousWindow = JavaSampler.this.lastWindow.getAndSet(this.window); + int previousWindow = JavaSampler.this.lastWindow.getAndUpdate(previous -> Math.max(this.window, previous)); if (previousWindow != 0 && previousWindow != this.window) { // collect statistics for the previous window diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index 2e4b055..163365c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -25,7 +25,6 @@ import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -46,7 +45,7 @@ public abstract class AbstractNode { /** The accumulated sample time for this node, measured in microseconds */ // Integer key = the window (effectively System.currentTimeMillis() / 60_000) // LongAdder value = accumulated time in microseconds - private final Map times = new HashMap<>(); + private final Map times = new ConcurrentHashMap<>(); /** * Gets the time accumulator for a given window -- cgit From 149b0558afb77b1dffcd413f9259cdacce63711c Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 19 Nov 2022 13:33:04 +0000 Subject: Background profiler default to disabled for clients/proxies --- .../src/main/java/me/lucko/spark/common/SparkPlatform.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 105b167..2574443 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -44,6 +44,7 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.PlatformStatisticsProvider; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; @@ -142,7 +143,10 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); - this.samplerContainer = new SamplerContainer(this.configuration.getBoolean("backgroundProfiler", true)); + this.samplerContainer = new SamplerContainer(this.configuration.getBoolean( + "backgroundProfiler", + plugin.getPlatformInfo().getType() == PlatformInfo.Type.SERVER + )); this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); @@ -187,7 +191,7 @@ public class SparkPlatform { this.plugin.log(Level.INFO, "Starting background profiler..."); try { startBackgroundProfiler(); - } catch (Exception e) { + } catch (Throwable e) { e.printStackTrace(); } } -- cgit From 65f9460a1a27e930b3749525766fd44d57b65300 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 26 Nov 2022 23:00:53 +0000 Subject: Include player/entity/chunk counts in window statistics --- .../spark/bukkit/BukkitWorldInfoProvider.java | 44 ++++++++++- .../common/command/modules/SamplerModule.java | 6 +- .../platform/PlatformStatisticsProvider.java | 7 +- .../platform/world/AsyncWorldInfoProvider.java | 90 ++++++++++++++++++++++ .../common/platform/world/WorldInfoProvider.java | 57 ++++++++++++-- .../platform/world/WorldStatisticsProvider.java | 37 +-------- .../spark/common/sampler/AbstractSampler.java | 2 +- .../me/lucko/spark/common/sampler/Sampler.java | 2 +- .../spark/common/sampler/SamplerContainer.java | 6 +- .../common/sampler/async/AsyncProfilerJob.java | 6 +- .../spark/common/sampler/async/AsyncSampler.java | 14 ++-- .../spark/common/sampler/java/JavaSampler.java | 14 ++-- .../sampler/window/WindowStatisticsCollector.java | 15 ++++ spark-common/src/main/proto/spark/spark.proto | 6 ++ .../spark/fabric/FabricWorldInfoProvider.java | 42 +++++++++- .../fabric/mixin/ClientEntityManagerAccessor.java | 4 + .../fabric/mixin/ServerEntityManagerAccessor.java | 4 + .../lucko/spark/forge/ForgeWorldInfoProvider.java | 42 +++++++++- .../main/resources/META-INF/accesstransformer.cfg | 4 +- .../spark/sponge/Sponge7WorldInfoProvider.java | 21 ++++- .../spark/sponge/Sponge8WorldInfoProvider.java | 21 ++++- 21 files changed, 366 insertions(+), 78 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java (limited to 'spark-common/src') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java index 79c2715..8f876cf 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -34,6 +34,21 @@ import java.util.ArrayList; import java.util.List; public class BukkitWorldInfoProvider implements WorldInfoProvider { + private static final boolean SUPPORTS_PAPER_COUNT_METHODS; + + static { + boolean supportsPaperCountMethods = false; + try { + World.class.getMethod("getEntityCount"); + World.class.getMethod("getTileEntityCount"); + World.class.getMethod("getChunkCount"); + supportsPaperCountMethods = true; + } catch (Exception e) { + // ignored + } + SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods; + } + private final Server server; public BukkitWorldInfoProvider(Server server) { @@ -41,8 +56,33 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getOnlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (World world : this.server.getWorlds()) { + if (SUPPORTS_PAPER_COUNT_METHODS) { + entities += world.getEntityCount(); + tileEntities += world.getTileEntityCount(); + chunks += world.getChunkCount(); + } else { + entities += world.getEntities().size(); + Chunk[] chunksArray = world.getLoadedChunks(); + for (Chunk chunk : chunksArray) { + tileEntities += chunk.getTileEntities().length; + } + chunks += chunksArray.length; + } + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (World world : this.server.getWorlds()) { Chunk[] chunks = world.getLoadedChunks(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 00cd4fa..f576eac 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -145,7 +145,7 @@ public class SamplerModule implements CommandModule { if (previousSampler.isRunningInBackground()) { // there is a background profiler running - stop that first resp.replyPrefixed(text("Stopping the background profiler before starting... please wait")); - previousSampler.stop(); + previousSampler.stop(true); platform.getSamplerContainer().unsetActiveSampler(previousSampler); } else { // there is a non-background profiler running - tell the user @@ -310,7 +310,7 @@ public class SamplerModule implements CommandModule { if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - platform.getSamplerContainer().stopActiveSampler(); + platform.getSamplerContainer().stopActiveSampler(true); resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD)); } } @@ -322,7 +322,7 @@ public class SamplerModule implements CommandModule { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { platform.getSamplerContainer().unsetActiveSampler(sampler); - sampler.stop(); + sampler.stop(false); boolean saveToFile = arguments.boolFlag("save-to-file"); if (saveToFile) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index 1eb9753..fc7e78a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -31,7 +31,7 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.os.OperatingSystemInfo; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; -import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider; import me.lucko.spark.common.platform.world.WorldStatisticsProvider; import me.lucko.spark.proto.SparkProtos.PlatformStatistics; import me.lucko.spark.proto.SparkProtos.SystemStatistics; @@ -188,8 +188,9 @@ public class PlatformStatisticsProvider { } try { - WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider(); - WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo); + WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider( + new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()) + ); WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics(); if (worldStatistics != null) { builder.setWorld(worldStatistics); diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java new file mode 100644 index 0000000..82cddef --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java @@ -0,0 +1,90 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Function; +import java.util.logging.Level; + +/** + * Async-friendly wrapper around {@link WorldInfoProvider}. + */ +public class AsyncWorldInfoProvider { + private static final int TIMEOUT_SECONDS = 5; + + private final SparkPlatform platform; + private final WorldInfoProvider provider; + + public AsyncWorldInfoProvider(SparkPlatform platform, WorldInfoProvider provider) { + this.platform = platform; + this.provider = provider == WorldInfoProvider.NO_OP ? null : provider; + } + + private CompletableFuture async(Function function) { + if (this.provider == null) { + return null; + } + + if (this.provider.mustCallSync()) { + SparkPlugin plugin = this.platform.getPlugin(); + return CompletableFuture.supplyAsync(() -> function.apply(this.provider), plugin::executeSync); + } else { + return CompletableFuture.completedFuture(function.apply(this.provider)); + } + } + + private T get(CompletableFuture future) { + if (future == null) { + return null; + } + + try { + return future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } catch (TimeoutException e) { + this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); + return null; + } + } + + public CompletableFuture pollCounts() { + return async(WorldInfoProvider::pollCounts); + } + + public CompletableFuture>> pollChunks() { + return async(WorldInfoProvider::pollChunks); + } + + public WorldInfoProvider.CountsResult getCounts() { + return get(pollCounts()); + } + + public WorldInfoProvider.ChunksResult> getChunks() { + return get(pollChunks()); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java index 9494816..7fb581d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -29,20 +29,37 @@ import java.util.Map; */ public interface WorldInfoProvider { - WorldInfoProvider NO_OP = () -> null; + WorldInfoProvider NO_OP = new WorldInfoProvider() { + @Override + public CountsResult pollCounts() { + return null; + } + + @Override + public ChunksResult> pollChunks() { + return null; + } + }; + + /** + * Polls for counts. + * + * @return the counts + */ + CountsResult pollCounts(); /** - * Polls for information. + * Polls for chunk information. * - * @return the information + * @return the chunk information */ - Result> poll(); + ChunksResult> pollChunks(); default boolean mustCallSync() { return true; } - final class Result { + final class ChunksResult> { private final Map> worlds = new HashMap<>(); public void put(String worldName, List chunks) { @@ -54,4 +71,34 @@ public interface WorldInfoProvider { } } + final class CountsResult { + private final int players; + private final int entities; + private final int tileEntities; + private final int chunks; + + public CountsResult(int players, int entities, int tileEntities, int chunks) { + this.players = players; + this.entities = entities; + this.tileEntities = tileEntities; + this.chunks = chunks; + } + + public int players() { + return this.players; + } + + public int entities() { + return this.entities; + } + + public int tileEntities() { + return this.tileEntities; + } + + public int chunks() { + return this.chunks; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java index 80c35a6..7e63222 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -20,8 +20,6 @@ package me.lucko.spark.common.platform.world; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.util.ArrayList; @@ -30,46 +28,17 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; -import java.util.logging.Level; public class WorldStatisticsProvider { - private final SparkPlatform platform; - private final WorldInfoProvider provider; + private final AsyncWorldInfoProvider provider; - public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) { - this.platform = platform; + public WorldStatisticsProvider(AsyncWorldInfoProvider provider) { this.provider = provider; } public WorldStatistics getWorldStatistics() { - if (this.provider == WorldInfoProvider.NO_OP) { - return null; - } - - CompletableFuture>> future; - - if (this.provider.mustCallSync()) { - SparkPlugin plugin = this.platform.getPlugin(); - future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync); - } else { - future = CompletableFuture.completedFuture(this.provider.poll()); - } - - WorldInfoProvider.Result> result; - try { - result = future.get(5, TimeUnit.SECONDS); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } catch (TimeoutException e) { - this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); - return null; - } - + WorldInfoProvider.ChunksResult> result = provider.getChunks(); if (result == null) { return null; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 59e873c..e324fd3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -120,7 +120,7 @@ public abstract class AbstractSampler implements Sampler { } @Override - public void stop() { + public void stop(boolean cancelled) { this.windowStatisticsCollector.stop(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 5d2026d..36a63f1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -41,7 +41,7 @@ public interface Sampler { /** * Stops the sampler. */ - void stop(); + void stop(boolean cancelled); /** * Gets the time when the sampler started (unix timestamp in millis) diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java index f56dee5..d55909c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -66,10 +66,10 @@ public class SamplerContainer implements AutoCloseable { /** * Stops the active sampler, if there is one. */ - public void stopActiveSampler() { + public void stopActiveSampler(boolean cancelled) { Sampler sampler = this.activeSampler.getAndSet(null); if (sampler != null) { - sampler.stop(); + sampler.stop(cancelled); } } @@ -79,7 +79,7 @@ public class SamplerContainer implements AutoCloseable { @Override public void close() { - stopActiveSampler(); + stopActiveSampler(true); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index db1808c..d74b75f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -224,13 +224,15 @@ public class AsyncProfilerJob { } } - // delete the output file after reading + deleteOutputFile(); + } + + public void deleteOutputFile() { try { Files.deleteIfExists(this.outputFile); } catch (IOException e) { // ignore } - } private void readSegments(JfrReader reader, Predicate threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index f2e7191..178f055 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -144,7 +144,7 @@ public class AsyncSampler extends AbstractSampler { } this.scheduler.schedule(() -> { - stop(); + stop(false); this.future.complete(this); }, delay, TimeUnit.MILLISECONDS); } @@ -153,13 +153,17 @@ public class AsyncSampler extends AbstractSampler { * Stops the profiler. */ @Override - public void stop() { - super.stop(); + public void stop(boolean cancelled) { + super.stop(cancelled); synchronized (this.currentJobMutex) { this.currentJob.stop(); - this.windowStatisticsCollector.measureNow(this.currentJob.getWindow()); - this.currentJob.aggregate(this.dataAggregator); + if (!cancelled) { + this.windowStatisticsCollector.measureNow(this.currentJob.getWindow()); + this.currentJob.aggregate(this.dataAggregator); + } else { + this.currentJob.deleteOutputFile(); + } this.currentJob = null; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 42a457d..72a37e8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -94,13 +94,15 @@ public class JavaSampler extends AbstractSampler implements Runnable { } @Override - public void stop() { - super.stop(); + public void stop(boolean cancelled) { + super.stop(cancelled); this.task.cancel(false); - // collect statistics for the final window - this.windowStatisticsCollector.measureNow(this.lastWindow.get()); + if (!cancelled) { + // collect statistics for the final window + this.windowStatisticsCollector.measureNow(this.lastWindow.get()); + } } @Override @@ -111,7 +113,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { long time = System.currentTimeMillis(); if (this.autoEndTime != -1 && this.autoEndTime <= time) { - stop(); + stop(false); this.future.complete(this); return; } @@ -120,7 +122,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean); this.workerPool.execute(new InsertDataTask(threadDumps, window)); } catch (Throwable t) { - stop(); + stop(false); this.future.completeExceptionally(t); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java index 7da62fa..ce65013 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -23,6 +23,8 @@ package me.lucko.spark.common.sampler.window; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.RollingAverage; import me.lucko.spark.proto.SparkProtos; @@ -152,6 +154,19 @@ public class WindowStatisticsCollector { builder.setCpuProcess(CpuMonitor.processLoad1MinAvg()); builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg()); + try { + AsyncWorldInfoProvider worldInfoProvider = new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()); + WorldInfoProvider.CountsResult counts = worldInfoProvider.getCounts(); + if (counts != null) { + builder.setPlayers(counts.players()); + builder.setEntities(counts.entities()); + builder.setTileEntities(counts.tileEntities()); + builder.setChunks(counts.chunks()); + } + } catch (Exception e) { + e.printStackTrace(); + } + return builder.build(); } diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index be76bd7..f61e585 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -159,6 +159,12 @@ message WindowStatistics { double tps = 4; double mspt_median = 5; double mspt_max = 6; + + // world + int32 players = 7; + int32 entities = 8; + int32 tile_entities = 9; + int32 chunks = 10; } message RollingAverageValues { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java index f2f7b96..156db89 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java @@ -40,6 +40,7 @@ import net.minecraft.server.MinecraftServer; import net.minecraft.server.world.ServerEntityManager; import net.minecraft.server.world.ServerWorld; import net.minecraft.util.math.ChunkPos; +import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.EntityTrackingSection; import net.minecraft.world.entity.SectionedEntityCache; @@ -72,8 +73,25 @@ public abstract class FabricWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getCurrentPlayerCount(); + int entities = 0; + int chunks = 0; + + for (ServerWorld world : this.server.getWorlds()) { + ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex(); + + entities += entityIndex.size(); + chunks += world.getChunkManager().getLoadedChunkCount(); + } + + return new CountsResult(players, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.getWorlds()) { ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); @@ -95,8 +113,24 @@ public abstract class FabricWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + ClientWorld world = this.client.world; + if (world == null) { + return null; + } + + ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex(); + + int entities = entityIndex.size(); + int chunks = world.getChunkManager().getLoadedChunkCount(); + + return new CountsResult(-1, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); ClientWorld world = this.client.world; if (world == null) { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java index 88c9521..994c9a3 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java @@ -22,6 +22,7 @@ package me.lucko.spark.fabric.mixin; import net.minecraft.client.world.ClientEntityManager; import net.minecraft.entity.Entity; +import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; import org.spongepowered.asm.mixin.Mixin; @@ -33,4 +34,7 @@ public interface ClientEntityManagerAccessor { @Accessor SectionedEntityCache getCache(); + @Accessor + EntityIndex getIndex(); + } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java index 160a12b..2c67502 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java @@ -22,6 +22,7 @@ package me.lucko.spark.fabric.mixin; import net.minecraft.entity.Entity; import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; import org.spongepowered.asm.mixin.Mixin; @@ -33,4 +34,7 @@ public interface ServerEntityManagerAccessor { @Accessor SectionedEntityCache getCache(); + @Accessor + EntityIndex getIndex(); + } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java index 1d65d6a..4750c08 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java @@ -34,6 +34,7 @@ import net.minecraft.server.level.ServerLevel; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.EntityType; import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.entity.EntityLookup; import net.minecraft.world.level.entity.EntitySection; import net.minecraft.world.level.entity.EntitySectionStorage; import net.minecraft.world.level.entity.PersistentEntitySectionManager; @@ -68,8 +69,25 @@ public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getPlayerCount(); + int entities = 0; + int chunks = 0; + + for (ServerLevel level : this.server.getAllLevels()) { + PersistentEntitySectionManager entityManager = level.entityManager; + EntityLookup entityIndex = entityManager.visibleEntityStorage; + + entities += entityIndex.count(); + chunks += level.getChunkSource().getLoadedChunksCount(); + } + + return new CountsResult(players, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerLevel level : this.server.getAllLevels()) { PersistentEntitySectionManager entityManager = level.entityManager; @@ -91,8 +109,24 @@ public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + + TransientEntitySectionManager entityManager = level.entityStorage; + EntityLookup entityIndex = entityManager.entityStorage; + + int entities = entityIndex.count(); + int chunks = level.getChunkSource().getLoadedChunksCount(); + + return new CountsResult(-1, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); ClientLevel level = this.client.level; if (level == null) { diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg index 39e9c1a..2699a0e 100644 --- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -1,5 +1,7 @@ public net.minecraft.server.level.ServerLevel f_143244_ # entityManager public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage +public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157494_ # visibleEntityStorage public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage -public net.minecraft.client.Minecraft f_91018_ # gameThread \ No newline at end of file +public net.minecraft.world.level.entity.TransientEntitySectionManager f_157637_ # entityStorage +public net.minecraft.client.Minecraft f_91018_ # gameThread diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java index fa6fa6b..df58028 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.sponge; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import me.lucko.spark.common.platform.world.AbstractChunkInfo; @@ -44,8 +45,24 @@ public class Sponge7WorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getOnlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (World world : this.server.getWorlds()) { + entities += world.getEntities().size(); + tileEntities += world.getTileEntities().size(); + chunks += Iterables.size(world.getLoadedChunks()); + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (World world : this.server.getWorlds()) { List chunks = Lists.newArrayList(world.getLoadedChunks()); diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java index bff4d6e..69b4515 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.sponge; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import me.lucko.spark.common.platform.world.AbstractChunkInfo; @@ -45,8 +46,24 @@ public class Sponge8WorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.onlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (ServerWorld world : this.server.worldManager().worlds()) { + entities += world.entities().size(); + tileEntities += world.blockEntities().size(); + chunks += Iterables.size(world.loadedChunks()); + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.worldManager().worlds()) { List chunks = Lists.newArrayList(world.loadedChunks()); -- cgit From 115ff5d8d58f6793fd8ea980a95718e7ffca1454 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 27 Nov 2022 22:42:51 +0000 Subject: Upgrade async-profiler to 2.9 --- .../spark/linux/aarch64/libasyncProfiler.so | Bin 334176 -> 343408 bytes .../spark/linux/amd64-musl/libasyncProfiler.so | Bin 308800 -> 317560 bytes .../spark/linux/amd64/libasyncProfiler.so | Bin 352112 -> 361312 bytes .../main/resources/spark/macos/libasyncProfiler.so | Bin 690208 -> 724576 bytes 4 files changed, 0 insertions(+), 0 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so index cf6c48b..800cf91 100755 Binary files a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so index 0a08f7c..3c81d1c 100755 Binary files a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so index 0deb9e0..5af5071 100755 Binary files a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so and b/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so differ diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so index 65b4aed..4930c67 100755 Binary files a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so and b/spark-common/src/main/resources/spark/macos/libasyncProfiler.so differ -- cgit From fc1e371d67551e9548491e9bf50534d91ce5d170 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 27 Nov 2022 23:38:21 +0000 Subject: Temporary solution to async-profiler JVM crashing issues (#271, #273, #274) --- .../java/me/lucko/spark/common/SparkPlatform.java | 36 ++----- .../common/command/modules/SamplerModule.java | 4 +- .../common/sampler/BackgroundSamplerManager.java | 116 +++++++++++++++++++++ .../spark/common/sampler/SamplerContainer.java | 9 -- .../me/lucko/spark/common/util/Configuration.java | 60 +++++++++-- 5 files changed, 179 insertions(+), 46 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 2574443..dae04ff 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -44,12 +44,9 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; -import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.PlatformStatisticsProvider; -import me.lucko.spark.common.sampler.Sampler; -import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.sampler.BackgroundSamplerManager; import me.lucko.spark.common.sampler.SamplerContainer; -import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -104,6 +101,7 @@ public class SparkPlatform { private final ReentrantLock commandExecuteLock = new ReentrantLock(true); private final ActivityLog activityLog; private final SamplerContainer samplerContainer; + private final BackgroundSamplerManager backgroundSamplerManager; private final TickHook tickHook; private final TickReporter tickReporter; private final TickStatistics tickStatistics; @@ -143,10 +141,8 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); - this.samplerContainer = new SamplerContainer(this.configuration.getBoolean( - "backgroundProfiler", - plugin.getPlatformInfo().getType() == PlatformInfo.Type.SERVER - )); + this.samplerContainer = new SamplerContainer(); + this.backgroundSamplerManager = new BackgroundSamplerManager(this, this.configuration); this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); @@ -187,14 +183,7 @@ public class SparkPlatform { this.plugin.registerApi(api); SparkApi.register(api); - if (this.samplerContainer.isBackgroundProfilerEnabled()) { - this.plugin.log(Level.INFO, "Starting background profiler..."); - try { - startBackgroundProfiler(); - } catch (Throwable e) { - e.printStackTrace(); - } - } + this.backgroundSamplerManager.initialise(); } public void disable() { @@ -255,6 +244,10 @@ public class SparkPlatform { return this.samplerContainer; } + public BackgroundSamplerManager getBackgroundSamplerManager() { + return this.backgroundSamplerManager; + } + public TickHook getTickHook() { return this.tickHook; } @@ -287,17 +280,6 @@ public class SparkPlatform { return this.serverNormalOperationStartTime; } - public void startBackgroundProfiler() { - Sampler sampler = new SamplerBuilder() - .background(true) - .threadDumper(this.plugin.getDefaultThreadDumper()) - .threadGrouper(ThreadGrouper.BY_POOL) - .samplingInterval(this.configuration.getInteger("backgroundProfilerInterval", 10)) - .start(this); - - this.samplerContainer.setActiveSampler(sampler); - } - public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index f576eac..cd00f0d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -337,9 +337,7 @@ public class SamplerModule implements CommandModule { handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile); // if the previous sampler was running in the background, create a new one - if (platform.getSamplerContainer().isBackgroundProfilerEnabled()) { - platform.startBackgroundProfiler(); - + if (platform.getBackgroundSamplerManager().restartBackgroundSampler()) { resp.broadcastPrefixed(text() .append(text("Restarted the background profiler. ")) .append(text("(If you don't want this to happen, run: /" + platform.getPlugin().getCommandName() + " profiler cancel)", DARK_GRAY)) diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java new file mode 100644 index 0000000..d655739 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -0,0 +1,116 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.Configuration; + +import java.util.logging.Level; + +public class BackgroundSamplerManager { + + private static final String OPTION_ENABLED = "backgroundProfiler"; + private static final String OPTION_ENGINE = "backgroundProfilerEngine"; + private static final String OPTION_INTERVAL = "backgroundProfilerInterval"; + + private static final String MARKER_FAILED = "_marker_background_profiler_failed"; + + private final SparkPlatform platform; + private final Configuration configuration; + private final boolean enabled; + + public BackgroundSamplerManager(SparkPlatform platform, Configuration configuration) { + this.platform = platform; + this.configuration = configuration; + this.enabled = this.configuration.getBoolean( + OPTION_ENABLED, + this.platform.getPlugin().getPlatformInfo().getType() == PlatformInfo.Type.SERVER + ); + } + + public void initialise() { + if (!this.enabled) { + return; + } + + // are we enabling the background profiler by default for the first time? + boolean didEnableByDefault = false; + if (!this.configuration.contains(OPTION_ENABLED)) { + this.configuration.setBoolean(OPTION_ENABLED, true); + didEnableByDefault = true; + } + + // did the background profiler fail to start on the previous attempt? + if (this.configuration.getBoolean(MARKER_FAILED, false)) { + this.platform.getPlugin().log(Level.WARNING, "It seems the background profiler failed to start when spark was last enabled. Sorry about that!"); + this.platform.getPlugin().log(Level.WARNING, "In the future, spark will try to use the built-in Java profiling engine instead."); + + this.configuration.remove(MARKER_FAILED); + this.configuration.setString(OPTION_ENGINE, "java"); + this.configuration.save(); + } + + this.platform.getPlugin().log(Level.INFO, "Starting background profiler..."); + + if (didEnableByDefault) { + // set the failed marker and save before we try to start the profiler, + // then remove the marker afterwards if everything goes ok! + this.configuration.setBoolean(MARKER_FAILED, true); + this.configuration.save(); + } + + try { + startSampler(); + + if (didEnableByDefault) { + this.configuration.remove(MARKER_FAILED); + this.configuration.save(); + } + + } catch (Throwable e) { + e.printStackTrace(); + } + } + + public boolean restartBackgroundSampler() { + if (this.enabled) { + startSampler(); + return true; + } + return false; + } + + private void startSampler() { + boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java"); + + Sampler sampler = new SamplerBuilder() + .background(true) + .threadDumper(this.platform.getPlugin().getDefaultThreadDumper()) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(this.configuration.getInteger(OPTION_INTERVAL, 10)) + .forceJavaSampler(forceJavaEngine) + .start(this.platform); + + this.platform.getSamplerContainer().setActiveSampler(sampler); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java index d55909c..15b1029 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -28,11 +28,6 @@ import java.util.concurrent.atomic.AtomicReference; public class SamplerContainer implements AutoCloseable { private final AtomicReference activeSampler = new AtomicReference<>(); - private final boolean backgroundProfilerEnabled; - - public SamplerContainer(boolean backgroundProfilerEnabled) { - this.backgroundProfilerEnabled = backgroundProfilerEnabled; - } /** * Gets the active sampler, or null if a sampler is not active. @@ -73,10 +68,6 @@ public class SamplerContainer implements AutoCloseable { } } - public boolean isBackgroundProfilerEnabled() { - return this.backgroundProfilerEnabled; - } - @Override public void close() { stopActiveSampler(true); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java index ce63878..32f3bc6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java @@ -20,32 +20,58 @@ package me.lucko.spark.common.util; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; public final class Configuration { - private static final JsonParser PARSER = new JsonParser(); + private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); - private final JsonObject root; + private final Path file; + private JsonObject root; public Configuration(Path file) { + this.file = file; + load(); + } + + public void load() { JsonObject root = null; - if (Files.exists(file)) { - try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { - root = PARSER.parse(reader).getAsJsonObject(); + if (Files.exists(this.file)) { + try (BufferedReader reader = Files.newBufferedReader(this.file, StandardCharsets.UTF_8)) { + root = GSON.fromJson(reader, JsonObject.class); } catch (IOException e) { e.printStackTrace(); } } - this.root = root != null ? root : new JsonObject(); + if (root == null) { + root = new JsonObject(); + root.addProperty("_header", "spark configuration file - https://spark.lucko.me/docs/Configuration"); + } + this.root = root; + } + + public void save() { + try { + Files.createDirectories(this.file.getParent()); + } catch (IOException e) { + // ignore + } + + try (BufferedWriter writer = Files.newBufferedWriter(this.file, StandardCharsets.UTF_8)) { + GSON.toJson(this.root, writer); + } catch (IOException e) { + e.printStackTrace(); + } } public String getString(String path, String def) { @@ -77,4 +103,24 @@ public final class Configuration { return val.isBoolean() ? val.getAsInt() : def; } + public void setString(String path, String value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setBoolean(String path, boolean value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setInteger(String path, int value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public boolean contains(String path) { + return this.root.has(path); + } + + public void remove(String path) { + this.root.remove(path); + } + } -- cgit From 1536537473aa5dead4c7353444e217df30d2ad96 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 10 Dec 2022 09:54:20 +0000 Subject: Never run the background profiler on MC clients --- .../me/lucko/spark/common/sampler/BackgroundSamplerManager.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) (limited to 'spark-common/src') diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java index d655739..7e3b6b4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -41,10 +41,9 @@ public class BackgroundSamplerManager { public BackgroundSamplerManager(SparkPlatform platform, Configuration configuration) { this.platform = platform; this.configuration = configuration; - this.enabled = this.configuration.getBoolean( - OPTION_ENABLED, - this.platform.getPlugin().getPlatformInfo().getType() == PlatformInfo.Type.SERVER - ); + + PlatformInfo.Type type = this.platform.getPlugin().getPlatformInfo().getType(); + this.enabled = type != PlatformInfo.Type.CLIENT && this.configuration.getBoolean(OPTION_ENABLED, type == PlatformInfo.Type.SERVER); } public void initialise() { -- cgit