aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me
diff options
context:
space:
mode:
authorlucko <git@lucko.me>2022-12-27 09:17:54 +0000
committerGitHub <noreply@github.com>2022-12-27 09:17:54 +0000
commite5b278047ccb7bc6b301d787474c51d162911867 (patch)
tree11bba64e8f28ce8b83adc05252b75f17e2ccbf6a /spark-common/src/main/java/me
parent4a16a1a2f4eb09f706b4a541e3d31618de29420b (diff)
parent1075665def4a41cf0064255a6da1d1a652f5d473 (diff)
downloadspark-e5b278047ccb7bc6b301d787474c51d162911867.tar.gz
spark-e5b278047ccb7bc6b301d787474c51d162911867.tar.bz2
spark-e5b278047ccb7bc6b301d787474c51d162911867.zip
Merge pull request #284 from embeddedt/forge-1.7.10
Align 1.7.10 with master
Diffstat (limited to 'spark-common/src/main/java/me')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java118
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java45
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/Command.java58
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java22
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java241
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java (renamed from spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java)9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java74
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java13
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java86
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java (renamed from spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java)35
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java24
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java48
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java)57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java)29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java69
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java55
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java90
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java110
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java104
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java189
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java99
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java115
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java17
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java76
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java61
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java24
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java58
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java276
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java273
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java27
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java50
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java85
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java79
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java166
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java462
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java81
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java70
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java93
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java287
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java75
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java241
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/Compression.java60
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java70
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java20
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java (renamed from spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java)33
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java191
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java81
63 files changed, 3890 insertions, 997 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 0ef4556..dae04ff 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -45,17 +45,18 @@ import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.platform.PlatformStatisticsProvider;
+import me.lucko.spark.common.sampler.BackgroundSamplerManager;
+import me.lucko.spark.common.sampler.SamplerContainer;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
-import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.common.util.Configuration;
import me.lucko.spark.common.util.TemporaryFiles;
+import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
-import okhttp3.OkHttpClient;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -64,6 +65,7 @@ import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -74,13 +76,11 @@ import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
import static net.kyori.adventure.text.Component.text;
-import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
import static net.kyori.adventure.text.format.TextDecoration.BOLD;
-import static net.kyori.adventure.text.format.TextDecoration.UNDERLINED;
/**
* Abstract spark implementation used by all platforms.
@@ -91,15 +91,17 @@ public class SparkPlatform {
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss");
private final SparkPlugin plugin;
+ private final TemporaryFiles temporaryFiles;
private final Configuration configuration;
private final String viewerUrl;
- private final OkHttpClient httpClient;
private final BytebinClient bytebinClient;
private final boolean disableResponseBroadcast;
private final List<CommandModule> commandModules;
private final List<Command> commands;
private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
private final ActivityLog activityLog;
+ private final SamplerContainer samplerContainer;
+ private final BackgroundSamplerManager backgroundSamplerManager;
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
@@ -112,13 +114,12 @@ public class SparkPlatform {
public SparkPlatform(SparkPlugin plugin) {
this.plugin = plugin;
+ this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp"));
this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json"));
this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/");
String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/");
-
- this.httpClient = new OkHttpClient();
- this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin");
+ this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin");
this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false);
@@ -140,6 +141,9 @@ public class SparkPlatform {
this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json"));
this.activityLog.load();
+ this.samplerContainer = new SamplerContainer();
+ this.backgroundSamplerManager = new BackgroundSamplerManager(this, this.configuration);
+
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null;
@@ -178,6 +182,8 @@ public class SparkPlatform {
SparkApi api = new SparkApi(this);
this.plugin.registerApi(api);
SparkApi.register(api);
+
+ this.backgroundSamplerManager.initialise();
}
public void disable() {
@@ -195,20 +201,21 @@ public class SparkPlatform {
module.close();
}
- SparkApi.unregister();
+ this.samplerContainer.close();
- TemporaryFiles.deleteTemporaryFiles();
+ SparkApi.unregister();
- // shutdown okhttp
- // see: https://github.com/square/okhttp/issues/4029
- this.httpClient.dispatcher().executorService().shutdown();
- this.httpClient.connectionPool().evictAll();
+ this.temporaryFiles.deleteTemporaryFiles();
}
public SparkPlugin getPlugin() {
return this.plugin;
}
+ public TemporaryFiles getTemporaryFiles() {
+ return this.temporaryFiles;
+ }
+
public Configuration getConfiguration() {
return this.configuration;
}
@@ -233,6 +240,14 @@ public class SparkPlatform {
return this.activityLog;
}
+ public SamplerContainer getSamplerContainer() {
+ return this.samplerContainer;
+ }
+
+ public BackgroundSamplerManager getBackgroundSamplerManager() {
+ return this.backgroundSamplerManager;
+ }
+
public TickHook getTickHook() {
return this.tickHook;
}
@@ -366,14 +381,15 @@ public class SparkPlatform {
.append(text("v" + getPlugin().getVersion(), GRAY))
.build()
);
+
+ String helpCmd = "/" + getPlugin().getCommandName() + " help";
resp.replyPrefixed(text()
.color(GRAY)
- .append(text("Use "))
+ .append(text("Run "))
.append(text()
- .content("/" + getPlugin().getCommandName() + " help")
+ .content(helpCmd)
.color(WHITE)
- .decoration(UNDERLINED, true)
- .clickEvent(ClickEvent.runCommand("/" + getPlugin().getCommandName() + " help"))
+ .clickEvent(ClickEvent.runCommand(helpCmd))
.build()
)
.append(text(" to view usage information."))
@@ -389,7 +405,7 @@ public class SparkPlatform {
if (command.aliases().contains(alias)) {
resp.setCommandPrimaryAlias(command.primaryAlias());
try {
- command.executor().execute(this, sender, resp, new Arguments(rawArgs));
+ command.executor().execute(this, sender, resp, new Arguments(rawArgs, command.allowSubCommand()));
} catch (Arguments.ParseException e) {
resp.replyPrefixed(text(e.getMessage(), RED));
}
@@ -437,35 +453,53 @@ public class SparkPlatform {
);
for (Command command : commands) {
String usage = "/" + getPlugin().getCommandName() + " " + command.primaryAlias();
- ClickEvent clickEvent = ClickEvent.suggestCommand(usage);
- sender.reply(text()
- .append(text(">", GOLD, BOLD))
- .append(space())
- .append(text().content(usage).color(GRAY).clickEvent(clickEvent).build())
- .build()
- );
- for (Command.ArgumentInfo arg : command.arguments()) {
- if (arg.requiresParameter()) {
+
+ if (command.allowSubCommand()) {
+ Map<String, List<Command.ArgumentInfo>> argumentsBySubCommand = command.arguments().stream()
+ .collect(Collectors.groupingBy(Command.ArgumentInfo::subCommandName, LinkedHashMap::new, Collectors.toList()));
+
+ argumentsBySubCommand.forEach((subCommand, arguments) -> {
+ String subCommandUsage = usage + " " + subCommand;
+
sender.reply(text()
- .content(" ")
- .append(text("[", DARK_GRAY))
- .append(text("--" + arg.argumentName(), GRAY))
+ .append(text(">", GOLD, BOLD))
.append(space())
- .append(text("<" + arg.parameterDescription() + ">", DARK_GRAY))
- .append(text("]", DARK_GRAY))
- .build()
- );
- } else {
- sender.reply(text()
- .content(" ")
- .append(text("[", DARK_GRAY))
- .append(text("--" + arg.argumentName(), GRAY))
- .append(text("]", DARK_GRAY))
+ .append(text().content(subCommandUsage).color(GRAY).clickEvent(ClickEvent.suggestCommand(subCommandUsage)).build())
.build()
);
+
+ for (Command.ArgumentInfo arg : arguments) {
+ if (arg.argumentName().isEmpty()) {
+ continue;
+ }
+ sender.reply(arg.toComponent(" "));
+ }
+ });
+ } else {
+ sender.reply(text()
+ .append(text(">", GOLD, BOLD))
+ .append(space())
+ .append(text().content(usage).color(GRAY).clickEvent(ClickEvent.suggestCommand(usage)).build())
+ .build()
+ );
+
+ for (Command.ArgumentInfo arg : command.arguments()) {
+ sender.reply(arg.toComponent(" "));
}
}
}
+
+ sender.reply(Component.empty());
+ sender.replyPrefixed(text()
+ .append(text("For full usage information, please go to: "))
+ .append(text()
+ .content("https://spark.lucko.me/docs/Command-Usage")
+ .color(WHITE)
+ .clickEvent(ClickEvent.openUrl("https://spark.lucko.me/docs/Command-Usage"))
+ .build()
+ )
+ .build()
+ );
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index b817df1..b7aef2a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -23,14 +23,19 @@ package me.lucko.spark.common;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
-import me.lucko.spark.common.util.ClassSourceLookup;
import java.nio.file.Path;
+import java.util.Collection;
+import java.util.Collections;
import java.util.logging.Level;
import java.util.stream.Stream;
@@ -75,6 +80,15 @@ public interface SparkPlugin {
void executeAsync(Runnable task);
/**
+ * Executes the given {@link Runnable} on the server/client main thread.
+ *
+ * @param task the task
+ */
+ default void executeSync(Runnable task) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
* Print to the plugin logger.
*
* @param level the log level
@@ -123,6 +137,15 @@ public interface SparkPlugin {
}
/**
+ * Gets a list of known sources (plugins/mods) on the platform.
+ *
+ * @return a list of sources
+ */
+ default Collection<SourceMetadata> getKnownSources() {
+ return Collections.emptyList();
+ }
+
+ /**
* Creates a player ping provider function.
*
* <p>Returns {@code null} if the platform does not support querying player pings</p>
@@ -139,7 +162,25 @@ public interface SparkPlugin {
* @return the server config provider function
*/
default ServerConfigProvider createServerConfigProvider() {
- return ServerConfigProvider.NO_OP;
+ return null;
+ }
+
+ /**
+ * Creates a metadata provider for the platform.
+ *
+ * @return the platform extra metadata provider
+ */
+ default MetadataProvider createExtraMetadataProvider() {
+ return null;
+ }
+
+ /**
+ * Creates a world info provider.
+ *
+ * @return the world info provider function
+ */
+ default WorldInfoProvider createWorldInfoProvider() {
+ return WorldInfoProvider.NO_OP;
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
index 17c49e2..ad8c777 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java
@@ -38,8 +38,9 @@ public class Arguments {
private final List<String> rawArgs;
private final SetMultimap<String, String> parsedArgs;
+ private String parsedSubCommand = null;
- public Arguments(List<String> rawArgs) {
+ public Arguments(List<String> rawArgs, boolean allowSubCommand) {
this.rawArgs = rawArgs;
this.parsedArgs = HashMultimap.create();
@@ -52,7 +53,9 @@ public class Arguments {
Matcher matcher = FLAG_REGEX.matcher(arg);
boolean matches = matcher.matches();
- if (flag == null || matches) {
+ if (i == 0 && allowSubCommand && !matches) {
+ this.parsedSubCommand = arg;
+ } else if (flag == null || matches) {
if (!matches) {
throw new ParseException("Expected flag at position " + i + " but got '" + arg + "' instead!");
}
@@ -80,6 +83,10 @@ public class Arguments {
return this.rawArgs;
}
+ public String subCommand() {
+ return this.parsedSubCommand;
+ }
+
public int intFlag(String key) {
Iterator<String> it = this.parsedArgs.get(key).iterator();
if (it.hasNext()) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
index dad15e6..c6871a9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java
@@ -25,10 +25,17 @@ import com.google.common.collect.ImmutableList;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
+import net.kyori.adventure.text.Component;
+
import java.util.Collections;
import java.util.List;
import java.util.Objects;
+import static net.kyori.adventure.text.Component.space;
+import static net.kyori.adventure.text.Component.text;
+import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
+import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
+
public class Command {
public static Builder builder() {
@@ -39,12 +46,14 @@ public class Command {
private final List<ArgumentInfo> arguments;
private final Executor executor;
private final TabCompleter tabCompleter;
+ private final boolean allowSubCommand;
- private Command(List<String> aliases, List<ArgumentInfo> arguments, Executor executor, TabCompleter tabCompleter) {
+ private Command(List<String> aliases, List<ArgumentInfo> arguments, Executor executor, TabCompleter tabCompleter, boolean allowSubCommand) {
this.aliases = aliases;
this.arguments = arguments;
this.executor = executor;
this.tabCompleter = tabCompleter;
+ this.allowSubCommand = allowSubCommand;
}
public List<String> aliases() {
@@ -67,11 +76,16 @@ public class Command {
return this.aliases.get(0);
}
+ public boolean allowSubCommand() {
+ return this.allowSubCommand;
+ }
+
public static final class Builder {
private final ImmutableList.Builder<String> aliases = ImmutableList.builder();
private final ImmutableList.Builder<ArgumentInfo> arguments = ImmutableList.builder();
private Executor executor = null;
private TabCompleter tabCompleter = null;
+ private boolean allowSubCommand = false;
Builder() {
@@ -82,8 +96,13 @@ public class Command {
return this;
}
+ public Builder argumentUsage(String subCommandName, String argumentName, String parameterDescription) {
+ this.arguments.add(new ArgumentInfo(subCommandName, argumentName, parameterDescription));
+ return this;
+ }
+
public Builder argumentUsage(String argumentName, String parameterDescription) {
- this.arguments.add(new ArgumentInfo(argumentName, parameterDescription));
+ this.arguments.add(new ArgumentInfo("", argumentName, parameterDescription));
return this;
}
@@ -97,6 +116,11 @@ public class Command {
return this;
}
+ public Builder allowSubCommand(boolean allowSubCommand) {
+ this.allowSubCommand = allowSubCommand;
+ return this;
+ }
+
public Command build() {
List<String> aliases = this.aliases.build();
if (aliases.isEmpty()) {
@@ -108,7 +132,7 @@ public class Command {
if (this.tabCompleter == null) {
this.tabCompleter = TabCompleter.empty();
}
- return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter);
+ return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter, this.allowSubCommand);
}
}
@@ -127,14 +151,20 @@ public class Command {
}
public static final class ArgumentInfo {
+ private final String subCommandName;
private final String argumentName;
private final String parameterDescription;
- public ArgumentInfo(String argumentName, String parameterDescription) {
+ public ArgumentInfo(String subCommandName, String argumentName, String parameterDescription) {
+ this.subCommandName = subCommandName;
this.argumentName = argumentName;
this.parameterDescription = parameterDescription;
}
+ public String subCommandName() {
+ return this.subCommandName;
+ }
+
public String argumentName() {
return this.argumentName;
}
@@ -146,6 +176,26 @@ public class Command {
public boolean requiresParameter() {
return this.parameterDescription != null;
}
+
+ public Component toComponent(String padding) {
+ if (requiresParameter()) {
+ return text()
+ .content(padding)
+ .append(text("[", DARK_GRAY))
+ .append(text("--" + argumentName(), GRAY))
+ .append(space())
+ .append(text("<" + parameterDescription() + ">", DARK_GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ } else {
+ return text()
+ .content(padding)
+ .append(text("[", DARK_GRAY))
+ .append(text("--" + argumentName(), GRAY))
+ .append(text("]", DARK_GRAY))
+ .build();
+ }
+ }
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
index b777f3e..6252ac7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
@@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class ActivityLogModule implements CommandModule, RowRenderer<Activity> {
private final Pagination.Builder pagination = Pagination.builder()
+ .width(45)
.renderer(new Renderer() {
@Override
public Component renderEmpty() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java
index 2ce83fd..a2da0a0 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java
@@ -123,7 +123,7 @@ public class GcMonitoringModule implements CommandModule {
);
report.add(text()
.content(" ")
- .append(text(formatTime((long) averageFrequency), WHITE))
+ .append(text(FormatUtil.formatSeconds((long) averageFrequency / 1000), WHITE))
.append(text(" avg frequency", GRAY))
.build()
);
@@ -153,26 +153,6 @@ public class GcMonitoringModule implements CommandModule {
);
}
- private static String formatTime(long millis) {
- if (millis <= 0) {
- return "0s";
- }
-
- long second = millis / 1000;
- long minute = second / 60;
- second = second % 60;
-
- StringBuilder sb = new StringBuilder();
- if (minute != 0) {
- sb.append(minute).append("m ");
- }
- if (second != 0) {
- sb.append(second).append("s ");
- }
-
- return sb.toString().trim();
- }
-
private static class ReportingGcMonitor extends GarbageCollectionMonitor implements GarbageCollectionMonitor.Listener {
private final SparkPlatform platform;
private final CommandResponseHandler resp;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 1030f35..5bd62a8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -36,8 +36,6 @@ import me.lucko.spark.proto.SparkHeapProtos;
import net.kyori.adventure.text.event.ClickEvent;
-import okhttp3.MediaType;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -54,7 +52,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.GREEN;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
public class HeapAnalysisModule implements CommandModule {
- private static final MediaType SPARK_HEAP_MEDIA_TYPE = MediaType.parse("application/x-spark-heap");
+ private static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap";
@Override
public void registerCommands(Consumer<Command> consumer) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 970d062..cd00f0d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -35,17 +35,17 @@ import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.SamplerBuilder;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
-import me.lucko.spark.common.sampler.ThreadNodeOrder;
import me.lucko.spark.common.sampler.async.AsyncSampler;
import me.lucko.spark.common.sampler.node.MergeMode;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.MethodDisambiguator;
import me.lucko.spark.proto.SparkSamplerProtos;
+import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
-import okhttp3.MediaType;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -64,57 +64,45 @@ import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
import static net.kyori.adventure.text.format.NamedTextColor.GRAY;
import static net.kyori.adventure.text.format.NamedTextColor.RED;
+import static net.kyori.adventure.text.format.NamedTextColor.WHITE;
public class SamplerModule implements CommandModule {
- private static final MediaType SPARK_SAMPLER_MEDIA_TYPE = MediaType.parse("application/x-spark-sampler");
-
- /** The sampler instance currently running, if any */
- private Sampler activeSampler = null;
-
- @Override
- public void close() {
- if (this.activeSampler != null) {
- this.activeSampler.stop();
- this.activeSampler = null;
- }
- }
+ private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler";
@Override
public void registerCommands(Consumer<Command> consumer) {
consumer.accept(Command.builder()
.aliases("profiler", "sampler")
- .argumentUsage("info", null)
- .argumentUsage("stop", null)
- .argumentUsage("cancel", null)
- .argumentUsage("interval", "interval millis")
- .argumentUsage("thread", "thread name")
- .argumentUsage("only-ticks-over", "tick length millis")
- .argumentUsage("timeout", "timeout seconds")
- .argumentUsage("regex --thread", "thread regex")
- .argumentUsage("combine-all", null)
- .argumentUsage("not-combined", null)
- .argumentUsage("force-java-sampler", null)
- .argumentUsage("stop --comment", "comment")
- .argumentUsage("stop --order-by-time", null)
- .argumentUsage("stop --save-to-file", null)
+ .allowSubCommand(true)
+ .argumentUsage("info", "", null)
+ .argumentUsage("start", "timeout", "timeout seconds")
+ .argumentUsage("start", "thread *", null)
+ .argumentUsage("start", "thread", "thread name")
+ .argumentUsage("start", "only-ticks-over", "tick length millis")
+ .argumentUsage("start", "interval", "interval millis")
+ .argumentUsage("stop", "", null)
+ .argumentUsage("cancel", "", null)
.executor(this::profiler)
.tabCompleter((platform, sender, arguments) -> {
- if (arguments.contains("--info") || arguments.contains("--cancel")) {
- return Collections.emptyList();
- }
-
- if (arguments.contains("--stop") || arguments.contains("--upload")) {
- return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment", "--save-to-file");
+ List<String> opts = Collections.emptyList();
+
+ if (arguments.size() > 0) {
+ String subCommand = arguments.get(0);
+ if (subCommand.equals("stop") || subCommand.equals("upload")) {
+ opts = new ArrayList<>(Arrays.asList("--comment", "--save-to-file"));
+ opts.removeAll(arguments);
+ }
+ if (subCommand.equals("start")) {
+ opts = new ArrayList<>(Arrays.asList("--timeout", "--regex", "--combine-all",
+ "--not-combined", "--interval", "--only-ticks-over", "--force-java-sampler"));
+ opts.removeAll(arguments);
+ opts.add("--thread"); // allowed multiple times
+ }
}
- List<String> opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel",
- "--timeout", "--regex", "--combine-all", "--not-combined", "--interval",
- "--only-ticks-over", "--force-java-sampler"));
- opts.removeAll(arguments);
- opts.add("--thread"); // allowed multiple times
-
return TabCompleter.create()
- .from(0, CompletionSupplier.startsWith(opts))
+ .at(0, CompletionSupplier.startsWith(Arrays.asList("info", "start", "stop", "cancel")))
+ .from(1, CompletionSupplier.startsWith(opts))
.complete(arguments);
})
.build()
@@ -122,25 +110,50 @@ public class SamplerModule implements CommandModule {
}
private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- if (arguments.boolFlag("info")) {
- profilerInfo(resp);
+ String subCommand = arguments.subCommand() == null ? "" : arguments.subCommand();
+
+ if (subCommand.equals("info") || arguments.boolFlag("info")) {
+ profilerInfo(platform, resp);
return;
}
- if (arguments.boolFlag("cancel")) {
- profilerCancel(resp);
+ if (subCommand.equals("cancel") || arguments.boolFlag("cancel")) {
+ profilerCancel(platform, resp);
return;
}
- if (arguments.boolFlag("stop") || arguments.boolFlag("upload")) {
+ if (subCommand.equals("stop") || subCommand.equals("upload") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) {
profilerStop(platform, sender, resp, arguments);
return;
}
- profilerStart(platform, sender, resp, arguments);
+ if (subCommand.equals("start") || arguments.boolFlag("start")) {
+ profilerStart(platform, sender, resp, arguments);
+ return;
+ }
+
+ if (arguments.raw().isEmpty()) {
+ profilerInfo(platform, resp);
+ } else {
+ profilerStart(platform, sender, resp, arguments);
+ }
}
private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
+ Sampler previousSampler = platform.getSamplerContainer().getActiveSampler();
+ if (previousSampler != null) {
+ if (previousSampler.isRunningInBackground()) {
+ // there is a background profiler running - stop that first
+ resp.replyPrefixed(text("Stopping the background profiler before starting... please wait"));
+ previousSampler.stop(true);
+ platform.getSamplerContainer().unsetActiveSampler(previousSampler);
+ } else {
+ // there is a non-background profiler running - tell the user
+ profilerInfo(platform, resp);
+ return;
+ }
+ }
+
int timeoutSeconds = arguments.intFlag("timeout");
if (timeoutSeconds != -1 && timeoutSeconds <= 10) {
resp.replyPrefixed(text("The specified timeout is not long enough for accurate results to be formed. " +
@@ -197,12 +210,7 @@ public class SamplerModule implements CommandModule {
}
}
- if (this.activeSampler != null) {
- resp.replyPrefixed(text("An active profiler is already running."));
- return;
- }
-
- resp.broadcastPrefixed(text("Initializing a new profiler, please wait..."));
+ resp.broadcastPrefixed(text("Starting a new profiler, please wait..."));
SamplerBuilder builder = new SamplerBuilder();
builder.threadDumper(threadDumper);
@@ -217,21 +225,25 @@ public class SamplerModule implements CommandModule {
if (ticksOver != -1) {
builder.ticksOver(ticksOver, tickHook);
}
- Sampler sampler = this.activeSampler = builder.start(platform);
+ Sampler sampler = builder.start(platform);
+ platform.getSamplerContainer().setActiveSampler(sampler);
resp.broadcastPrefixed(text()
- .append(text("Profiler now active!", GOLD))
+ .append(text("Profiler is now running!", GOLD))
.append(space())
.append(text("(" + (sampler instanceof AsyncSampler ? "async" : "built-in java") + ")", DARK_GRAY))
.build()
);
+
if (timeoutSeconds == -1) {
- resp.broadcastPrefixed(text("Use '/" + platform.getPlugin().getCommandName() + " profiler --stop' to stop profiling and upload the results."));
+ resp.broadcastPrefixed(text("It will run in the background until it is stopped by an admin."));
+ resp.broadcastPrefixed(text("To stop the profiler and upload the results, run:"));
+ resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop"));
} else {
- resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."));
+ resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + FormatUtil.formatSeconds(timeoutSeconds) + "."));
}
- CompletableFuture<Sampler> future = this.activeSampler.getFuture();
+ CompletableFuture<Sampler> future = sampler.getFuture();
// send message if profiling fails
future.whenCompleteAsync((s, throwable) -> {
@@ -242,70 +254,101 @@ public class SamplerModule implements CommandModule {
});
// set activeSampler to null when complete.
- future.whenCompleteAsync((s, throwable) -> {
- if (sampler == this.activeSampler) {
- this.activeSampler = null;
- }
- });
+ sampler.getFuture().whenCompleteAsync((s, throwable) -> platform.getSamplerContainer().unsetActiveSampler(s));
// await the result
if (timeoutSeconds != -1) {
- ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME;
String comment = Iterables.getFirst(arguments.stringFlag("comment"), null);
MethodDisambiguator methodDisambiguator = new MethodDisambiguator();
MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator);
boolean saveToFile = arguments.boolFlag("save-to-file");
future.thenAcceptAsync(s -> {
resp.broadcastPrefixed(text("The active profiler has completed! Uploading results..."));
- handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile);
+ handleUpload(platform, resp, s, comment, mergeMode, saveToFile);
});
}
}
- private void profilerInfo(CommandResponseHandler resp) {
- if (this.activeSampler == null) {
- resp.replyPrefixed(text("There isn't an active profiler running."));
+ private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) {
+ Sampler sampler = platform.getSamplerContainer().getActiveSampler();
+ if (sampler == null) {
+ resp.replyPrefixed(text("The profiler isn't running!"));
+ resp.replyPrefixed(text("To start a new one, run:"));
+ resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler start"));
} else {
- long timeout = this.activeSampler.getEndTime();
+ resp.replyPrefixed(text("Profiler is already running!", GOLD));
+
+ long runningTime = (System.currentTimeMillis() - sampler.getStartTime()) / 1000L;
+
+ if (sampler.isRunningInBackground()) {
+ resp.replyPrefixed(text()
+ .append(text("It was started "))
+ .append(text("automatically", WHITE))
+ .append(text(" when spark enabled and has been running in the background for " + FormatUtil.formatSeconds(runningTime) + "."))
+ .build()
+ );
+ } else {
+ resp.replyPrefixed(text("So far, it has profiled for " + FormatUtil.formatSeconds(runningTime) + "."));
+ }
+
+ long timeout = sampler.getAutoEndTime();
if (timeout == -1) {
- resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout."));
+ resp.replyPrefixed(text("To stop the profiler and upload the results, run:"));
+ resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop"));
} else {
long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L;
- resp.replyPrefixed(text("There is an active profiler currently running, due to timeout in " + timeoutDiff + " seconds."));
+ resp.replyPrefixed(text("It is due to complete automatically and upload results in " + FormatUtil.formatSeconds(timeoutDiff) + "."));
}
- long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L;
- resp.replyPrefixed(text("It has been profiling for " + runningTime + " seconds so far."));
+ resp.replyPrefixed(text("To cancel the profiler without uploading the results, run:"));
+ resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler cancel"));
}
}
- private void profilerCancel(CommandResponseHandler resp) {
- if (this.activeSampler == null) {
+ private void profilerCancel(SparkPlatform platform, CommandResponseHandler resp) {
+ Sampler sampler = platform.getSamplerContainer().getActiveSampler();
+ if (sampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- close();
- resp.broadcastPrefixed(text("The active profiler has been cancelled.", GOLD));
+ platform.getSamplerContainer().stopActiveSampler(true);
+ resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD));
}
}
private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- if (this.activeSampler == null) {
+ Sampler sampler = platform.getSamplerContainer().getActiveSampler();
+
+ if (sampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- this.activeSampler.stop();
- resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results..."));
- ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME;
+ platform.getSamplerContainer().unsetActiveSampler(sampler);
+ sampler.stop(false);
+
+ boolean saveToFile = arguments.boolFlag("save-to-file");
+ if (saveToFile) {
+ resp.broadcastPrefixed(text("Stopping the profiler & saving results, please wait..."));
+ } else {
+ resp.broadcastPrefixed(text("Stopping the profiler & uploading results, please wait..."));
+ }
+
String comment = Iterables.getFirst(arguments.stringFlag("comment"), null);
MethodDisambiguator methodDisambiguator = new MethodDisambiguator();
MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator);
- boolean saveToFile = arguments.boolFlag("save-to-file");
- handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile);
- this.activeSampler = null;
+ handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile);
+
+ // if the previous sampler was running in the background, create a new one
+ if (platform.getBackgroundSamplerManager().restartBackgroundSampler()) {
+ resp.broadcastPrefixed(text()
+ .append(text("Restarted the background profiler. "))
+ .append(text("(If you don't want this to happen, run: /" + platform.getPlugin().getCommandName() + " profiler cancel)", DARK_GRAY))
+ .build()
+ );
+ }
}
}
- private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
- SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
+ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
+ SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), comment, mergeMode, ClassSourceLookup.create(platform));
boolean saveToFile = false;
if (saveToFileFlag) {
@@ -315,7 +358,7 @@ public class SamplerModule implements CommandModule {
String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
String url = platform.getViewerUrl() + key;
- resp.broadcastPrefixed(text("Profiler results:", GOLD));
+ resp.broadcastPrefixed(text("Profiler stopped & upload complete!", GOLD));
resp.broadcast(text()
.content(url)
.color(GRAY)
@@ -336,13 +379,9 @@ public class SamplerModule implements CommandModule {
try {
Files.write(file, output.toByteArray());
- resp.broadcastPrefixed(text()
- .content("Profile written to: ")
- .color(GOLD)
- .append(text(file.toString(), GRAY))
- .build()
- );
- resp.broadcastPrefixed(text("You can read the profile file using the viewer web-app - " + platform.getViewerUrl(), GRAY));
+ resp.broadcastPrefixed(text("Profiler stopped & save complete!", GOLD));
+ resp.broadcastPrefixed(text("Data has been written to: " + file));
+ resp.broadcastPrefixed(text("You can view the profile file using the web app @ " + platform.getViewerUrl(), GRAY));
platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString()));
} catch (IOException e) {
@@ -351,4 +390,16 @@ public class SamplerModule implements CommandModule {
}
}
}
+
+ private static Component cmdPrompt(String cmd) {
+ return text()
+ .append(text(" "))
+ .append(text()
+ .content(cmd)
+ .color(WHITE)
+ .clickEvent(ClickEvent.runCommand(cmd))
+ .build()
+ )
+ .build();
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java
index 7d688d7..563e247 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/LinuxProc.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.util;
+package me.lucko.spark.common.monitor;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -49,7 +49,12 @@ public enum LinuxProc {
/**
* Information about the system network usage.
*/
- NET_DEV("/proc/net/dev");
+ NET_DEV("/proc/net/dev"),
+
+ /**
+ * Information about the operating system distro.
+ */
+ OSINFO("/etc/os-release");
private final Path path;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java
new file mode 100644
index 0000000..6b602d9
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/WindowsWmic.java
@@ -0,0 +1,74 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility for reading from wmic (Windows Management Instrumentation Commandline) on Windows systems.
+ */
+public enum WindowsWmic {
+
+ /**
+ * Gets the CPU name
+ */
+ CPU_GET_NAME("wmic", "cpu", "get", "name", "/FORMAT:list"),
+
+ /**
+ * Gets the operating system name (caption) and version.
+ */
+ OS_GET_CAPTION_AND_VERSION("wmic", "os", "get", "caption,version", "/FORMAT:list");
+
+ private static final boolean SUPPORTED = System.getProperty("os.name").startsWith("Windows");
+
+ private final String[] cmdArgs;
+
+ WindowsWmic(String... cmdArgs) {
+ this.cmdArgs = cmdArgs;
+ }
+
+ public @NonNull List<String> read() {
+ if (SUPPORTED) {
+ ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true);
+ try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) {
+ List<String> lines = new ArrayList<>();
+
+ String line;
+ while ((line = buf.readLine()) != null) {
+ lines.add(line);
+ }
+
+ return lines;
+ } catch (Exception e) {
+ // ignore
+ }
+ }
+
+ return Collections.emptyList();
+ }
+}
+
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
index 9bbe0f8..9954bd5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -20,12 +20,13 @@
package me.lucko.spark.common.monitor.cpu;
-import me.lucko.spark.common.util.LinuxProc;
+import me.lucko.spark.common.monitor.LinuxProc;
+import me.lucko.spark.common.monitor.WindowsWmic;
import java.util.regex.Pattern;
/**
- * Small utility to query the CPU model on Linux systems.
+ * Small utility to query the CPU model on Linux and Windows systems.
*/
public enum CpuInfo {
;
@@ -40,11 +41,17 @@ public enum CpuInfo {
public static String queryCpuModel() {
for (String line : LinuxProc.CPUINFO.read()) {
String[] splitLine = SPACE_COLON_SPACE_PATTERN.split(line);
-
if (splitLine[0].equals("model name") || splitLine[0].equals("Processor")) {
return splitLine[1];
}
}
+
+ for (String line : WindowsWmic.CPU_GET_NAME.read()) {
+ if (line.startsWith("Name")) {
+ return line.substring(5).trim();
+ }
+ }
+
return "";
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
index 226f75b..8f63f71 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/memory/MemoryInfo.java
@@ -20,7 +20,7 @@
package me.lucko.spark.common.monitor.memory;
-import me.lucko.spark.common.util.LinuxProc;
+import me.lucko.spark.common.monitor.LinuxProc;
import java.lang.management.ManagementFactory;
import java.util.regex.Matcher;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
index bd9e187..332077a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
@@ -22,7 +22,7 @@ package me.lucko.spark.common.monitor.net;
import com.google.common.collect.ImmutableMap;
-import me.lucko.spark.common.util.LinuxProc;
+import me.lucko.spark.common.monitor.LinuxProc;
import org.checkerframework.checker.nullness.qual.NonNull;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java
new file mode 100644
index 0000000..1c2732c
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/os/OperatingSystemInfo.java
@@ -0,0 +1,86 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor.os;
+
+import me.lucko.spark.common.monitor.LinuxProc;
+import me.lucko.spark.common.monitor.WindowsWmic;
+
+/**
+ * Small utility to query the operating system name & version.
+ */
+public final class OperatingSystemInfo {
+ private final String name;
+ private final String version;
+ private final String arch;
+
+ public OperatingSystemInfo(String name, String version, String arch) {
+ this.name = name;
+ this.version = version;
+ this.arch = arch;
+ }
+
+ public String name() {
+ return this.name;
+ }
+
+ public String version() {
+ return this.version;
+ }
+
+ public String arch() {
+ return this.arch;
+ }
+
+ public static OperatingSystemInfo poll() {
+ String name = null;
+ String version = null;
+
+ for (String line : LinuxProc.OSINFO.read()) {
+ if (line.startsWith("PRETTY_NAME") && line.length() > 13) {
+ name = line.substring(13).replace('"', ' ').trim();
+ }
+ }
+
+ for (String line : WindowsWmic.OS_GET_CAPTION_AND_VERSION.read()) {
+ if (line.startsWith("Caption") && line.length() > 18) {
+ // Caption=Microsoft Windows something
+ // \----------------/ = 18 chars
+ name = line.substring(18).trim();
+ } else if (line.startsWith("Version")) {
+ // Version=10.0.something
+ // \------/ = 8 chars
+ version = line.substring(8).trim();
+ }
+ }
+
+ if (name == null) {
+ name = System.getProperty("os.name");
+ }
+
+ if (version == null) {
+ version = System.getProperty("os.version");
+ }
+
+ String arch = System.getProperty("os.arch");
+
+ return new OperatingSystemInfo(name, version, arch);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java
index adcedcd..39022b4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java
@@ -18,35 +18,30 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.sampler;
+package me.lucko.spark.common.platform;
-import me.lucko.spark.common.sampler.node.ThreadNode;
+import com.google.gson.JsonElement;
-import java.util.Comparator;
+import java.util.LinkedHashMap;
+import java.util.Map;
/**
- * Methods of ordering {@link ThreadNode}s in the output data.
+ * Function to export dynamic metadata to be displayed within the spark viewer.
*/
-public enum ThreadNodeOrder implements Comparator<ThreadNode> {
+@FunctionalInterface
+public interface MetadataProvider {
/**
- * Order by the name of the thread (alphabetically)
+ * Produces a map of the metadata.
+ *
+ * @return the metadata
*/
- BY_NAME {
- @Override
- public int compare(ThreadNode o1, ThreadNode o2) {
- return o1.getThreadLabel().compareTo(o2.getThreadLabel());
- }
- },
+ Map<String, JsonElement> get();
- /**
- * Order by the time taken by the thread (most time taken first)
- */
- BY_TIME {
- @Override
- public int compare(ThreadNode o1, ThreadNode o2) {
- return -Double.compare(o1.getTotalTime(), o2.getTotalTime());
- }
+ default Map<String, String> export() {
+ Map<String, String> map = new LinkedHashMap<>();
+ get().forEach((key, value) -> map.put(key, value.toString()));
+ return map;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index f35bbbe..fc7e78a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -28,10 +28,14 @@ import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
import me.lucko.spark.common.monitor.memory.MemoryInfo;
import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
import me.lucko.spark.common.monitor.net.NetworkMonitor;
+import me.lucko.spark.common.monitor.os.OperatingSystemInfo;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldStatisticsProvider;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
@@ -47,6 +51,7 @@ public class PlatformStatisticsProvider {
public SystemStatistics getSystemStatistics() {
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
+ OperatingSystemInfo osInfo = OperatingSystemInfo.poll();
SystemStatistics.Builder builder = SystemStatistics.newBuilder()
.setCpu(SystemStatistics.Cpu.newBuilder()
@@ -83,9 +88,9 @@ public class PlatformStatisticsProvider {
.build()
)
.setOs(SystemStatistics.Os.newBuilder()
- .setArch(System.getProperty("os.arch"))
- .setName(System.getProperty("os.name"))
- .setVersion(System.getProperty("os.version"))
+ .setArch(osInfo.arch())
+ .setName(osInfo.name())
+ .setVersion(osInfo.version())
.build()
)
.setJava(SystemStatistics.Java.newBuilder()
@@ -182,6 +187,19 @@ public class PlatformStatisticsProvider {
builder.setPlayerCount(playerCount);
}
+ try {
+ WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(
+ new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider())
+ );
+ WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics();
+ if (worldStatistics != null) {
+ builder.setWorld(worldStatistics);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+
return builder.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
new file mode 100644
index 0000000..675a32e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
@@ -0,0 +1,48 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+
+public interface ConfigParser {
+
+ JsonElement load(String file, ExcludedConfigFilter filter) throws IOException;
+
+ default Map<String, Object> parse(Path file) throws IOException {
+ if (!Files.exists(file)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) {
+ return this.parse(reader);
+ }
+ }
+
+ Map<String, Object> parse(BufferedReader reader) throws IOException;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
index ead2131..c11c7f8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
@@ -20,11 +20,9 @@
package me.lucko.spark.common.platform.serverconfig;
-import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@@ -34,62 +32,26 @@ import java.util.Map;
import java.util.stream.Collectors;
/**
- * Abstract implementation of {@link ServerConfigProvider}.
- *
- * <p>This implementation is able to delete hidden paths from
- * the configurations before they are sent to the viewer.</p>
- *
- * @param <T> the file type
+ * Filtered excluded paths from {@link JsonElement}s (parsed configuration files).
*/
-public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
- private final Map<String, T> files;
- private final Collection<String> hiddenPaths;
-
- protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
- this.files = files;
- this.hiddenPaths = hiddenPaths;
- }
-
- @Override
- public final Map<String, JsonElement> loadServerConfigurations() {
- ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
-
- this.files.forEach((path, type) -> {
- try {
- JsonElement json = load(path, type);
- if (json != null) {
- delete(json, this.hiddenPaths);
- builder.put(path, json);
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- });
+public class ExcludedConfigFilter {
+ private final Collection<String> pathsToExclude;
- return builder.build();
+ public ExcludedConfigFilter(Collection<String> pathsToExclude) {
+ this.pathsToExclude = pathsToExclude;
}
/**
- * Loads a file from the system.
- *
- * @param path the name of the file to load
- * @param type the type of the file
- * @return the loaded file
- * @throws IOException if an error occurs performing i/o
- */
- protected abstract JsonElement load(String path, T type) throws IOException;
-
- /**
- * Deletes the given paths from the json element.
+ * Deletes the excluded paths from the json element.
*
* @param json the json element
- * @param paths the paths to delete
*/
- private static void delete(JsonElement json, Collection<String> paths) {
- for (String path : paths) {
+ public JsonElement apply(JsonElement json) {
+ for (String path : this.pathsToExclude) {
Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
delete(json, pathDeque);
}
+ return json;
}
private static void delete(JsonElement json, Deque<String> path) {
@@ -132,5 +94,4 @@ public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements
}
}
}
-
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
index 8fc89d7..344ba1c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
@@ -20,25 +20,38 @@
package me.lucko.spark.common.platform.serverconfig;
-import java.io.FilterReader;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
import java.io.IOException;
-import java.io.Reader;
+import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
- * A {@link Reader} that can parse a .properties file.
+ * A {@link ConfigParser} that can parse a .properties file.
*/
-public class PropertiesFileReader extends FilterReader {
+public enum PropertiesConfigParser implements ConfigParser {
+ INSTANCE;
+
+ private static final Gson GSON = new Gson();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
- public PropertiesFileReader(Reader in) {
- super(in);
+ return filter.apply(GSON.toJsonTree(values));
}
- public Map<String, Object> readProperties() throws IOException {
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
Properties properties = new Properties();
- properties.load(this);
+ properties.load(reader);
Map<String, Object> values = new HashMap<>();
properties.forEach((k, v) -> {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
index 1fc2391..485f215 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -20,40 +20,57 @@
package me.lucko.spark.common.platform.serverconfig;
+import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
+import me.lucko.spark.common.platform.MetadataProvider;
+
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
-import java.util.stream.Collectors;
/**
- * Function to export server configuration files for access within the spark viewer.
+ * Abstract implementation of {@link MetadataProvider} which
+ * provides server configuration data.
+ *
+ * <p>This implementation is able to delete hidden paths from
+ * the configurations before they are sent to the viewer.</p>
*/
-@FunctionalInterface
-public interface ServerConfigProvider {
-
- /**
- * Loads a map of the server configuration files.
- *
- * <p>The key is the name of the file and the value is a
- * {@link JsonElement} of the contents.</p>
- *
- * @return the exported server configurations
- */
- Map<String, JsonElement> loadServerConfigurations();
-
- default Map<String, String> exportServerConfigurations() {
- return loadServerConfigurations().entrySet()
- .stream()
- .collect(Collectors.toMap(
- Map.Entry::getKey,
- e -> e.getValue().toString()
- ));
+public abstract class ServerConfigProvider implements MetadataProvider {
+ private final Map<String, ConfigParser> files;
+ private final ExcludedConfigFilter hiddenPathFilters;
+
+ protected ServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
+ this.files = files;
+ this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths);
+ }
+
+ @Override
+ public final Map<String, JsonElement> get() {
+ ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
+
+ this.files.forEach((path, parser) -> {
+ try {
+ JsonElement json = parser.load(path, this.hiddenPathFilters);
+ if (json == null) {
+ return;
+ }
+ builder.put(path, json);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ });
+
+ return builder.build();
}
- /**
- * A no-op implementation
- */
- ServerConfigProvider NO_OP = Collections::emptyMap;
+ protected static List<String> getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
new file mode 100644
index 0000000..80026cd
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
@@ -0,0 +1,55 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+public abstract class AbstractChunkInfo<E> implements ChunkInfo<E> {
+ private final int x;
+ private final int z;
+
+ protected AbstractChunkInfo(int x, int z) {
+ this.x = x;
+ this.z = z;
+ }
+
+ @Override
+ public int getX() {
+ return this.x;
+ }
+
+ @Override
+ public int getZ() {
+ return this.z;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof AbstractChunkInfo)) return false;
+ AbstractChunkInfo<?> that = (AbstractChunkInfo<?>) obj;
+ return this.x == that.x && this.z == that.z;
+ }
+
+ @Override
+ public int hashCode() {
+ return this.x ^ this.z;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java
new file mode 100644
index 0000000..82cddef
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java
@@ -0,0 +1,90 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.function.Function;
+import java.util.logging.Level;
+
+/**
+ * Async-friendly wrapper around {@link WorldInfoProvider}.
+ */
+public class AsyncWorldInfoProvider {
+ private static final int TIMEOUT_SECONDS = 5;
+
+ private final SparkPlatform platform;
+ private final WorldInfoProvider provider;
+
+ public AsyncWorldInfoProvider(SparkPlatform platform, WorldInfoProvider provider) {
+ this.platform = platform;
+ this.provider = provider == WorldInfoProvider.NO_OP ? null : provider;
+ }
+
+ private <T> CompletableFuture<T> async(Function<WorldInfoProvider, T> function) {
+ if (this.provider == null) {
+ return null;
+ }
+
+ if (this.provider.mustCallSync()) {
+ SparkPlugin plugin = this.platform.getPlugin();
+ return CompletableFuture.supplyAsync(() -> function.apply(this.provider), plugin::executeSync);
+ } else {
+ return CompletableFuture.completedFuture(function.apply(this.provider));
+ }
+ }
+
+ private <T> T get(CompletableFuture<T> future) {
+ if (future == null) {
+ return null;
+ }
+
+ try {
+ return future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException(e);
+ } catch (TimeoutException e) {
+ this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
+ return null;
+ }
+ }
+
+ public CompletableFuture<WorldInfoProvider.CountsResult> pollCounts() {
+ return async(WorldInfoProvider::pollCounts);
+ }
+
+ public CompletableFuture<WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>>> pollChunks() {
+ return async(WorldInfoProvider::pollChunks);
+ }
+
+ public WorldInfoProvider.CountsResult getCounts() {
+ return get(pollCounts());
+ }
+
+ public WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> getChunks() {
+ return get(pollChunks());
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
new file mode 100644
index 0000000..2193a50
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+/**
+ * Information about a given chunk.
+ *
+ * @param <E> the type used to describe entities
+ */
+public interface ChunkInfo<E> {
+
+ int getX();
+
+ int getZ();
+
+ CountMap<E> getEntityCounts();
+
+ /**
+ * Converts entity type {@link E} to a string.
+ *
+ * @param type the entity type
+ * @return a string
+ */
+ String entityTypeName(E type);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
new file mode 100644
index 0000000..3083266
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
@@ -0,0 +1,110 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A map of (key) -> count.
+ *
+ * @param <T> the key type
+ */
+public interface CountMap<T> {
+
+ /**
+ * Increment the counter for the given key
+ *
+ * @param key the key
+ */
+ void increment(T key);
+
+ /**
+ * Add to the counter for the given key
+ *
+ * @param key the key
+ */
+ void add(T key, int delta);
+
+ AtomicInteger total();
+
+ Map<T, AtomicInteger> asMap();
+
+ /**
+ * A simple {@link CountMap} backed by the provided {@link Map}
+ *
+ * @param <T> the key type
+ */
+ class Simple<T> implements CountMap<T> {
+ private final Map<T, AtomicInteger> counts;
+ private final AtomicInteger total;
+
+ public Simple(Map<T, AtomicInteger> counts) {
+ this.counts = counts;
+ this.total = new AtomicInteger();
+ }
+
+ @Override
+ public void increment(T key) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ @Override
+ public void add(T key, int delta) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.addAndGet(delta);
+ this.total.addAndGet(delta);
+ }
+
+ @Override
+ public AtomicInteger total() {
+ return this.total;
+ }
+
+ @Override
+ public Map<T, AtomicInteger> asMap() {
+ return this.counts;
+ }
+ }
+
+ /**
+ * A {@link CountMap} backed by an {@link EnumMap}.
+ *
+ * @param <T> the key type - must be an enum
+ */
+ class EnumKeyed<T extends Enum<T>> extends Simple<T> {
+ public EnumKeyed(Class<T> keyClass) {
+ super(new EnumMap<>(keyClass));
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
new file mode 100644
index 0000000..7fb581d
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
@@ -0,0 +1,104 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Provides information about worlds.
+ */
+public interface WorldInfoProvider {
+
+ WorldInfoProvider NO_OP = new WorldInfoProvider() {
+ @Override
+ public CountsResult pollCounts() {
+ return null;
+ }
+
+ @Override
+ public ChunksResult<? extends ChunkInfo<?>> pollChunks() {
+ return null;
+ }
+ };
+
+ /**
+ * Polls for counts.
+ *
+ * @return the counts
+ */
+ CountsResult pollCounts();
+
+ /**
+ * Polls for chunk information.
+ *
+ * @return the chunk information
+ */
+ ChunksResult<? extends ChunkInfo<?>> pollChunks();
+
+ default boolean mustCallSync() {
+ return true;
+ }
+
+ final class ChunksResult<T extends ChunkInfo<?>> {
+ private final Map<String, List<T>> worlds = new HashMap<>();
+
+ public void put(String worldName, List<T> chunks) {
+ this.worlds.put(worldName, chunks);
+ }
+
+ public Map<String, List<T>> getWorlds() {
+ return this.worlds;
+ }
+ }
+
+ final class CountsResult {
+ private final int players;
+ private final int entities;
+ private final int tileEntities;
+ private final int chunks;
+
+ public CountsResult(int players, int entities, int tileEntities, int chunks) {
+ this.players = players;
+ this.entities = entities;
+ this.tileEntities = tileEntities;
+ this.chunks = chunks;
+ }
+
+ public int players() {
+ return this.players;
+ }
+
+ public int entities() {
+ return this.entities;
+ }
+
+ public int tileEntities() {
+ return this.tileEntities;
+ }
+
+ public int chunks() {
+ return this.chunks;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
new file mode 100644
index 0000000..7e63222
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
@@ -0,0 +1,189 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class WorldStatisticsProvider {
+ private final AsyncWorldInfoProvider provider;
+
+ public WorldStatisticsProvider(AsyncWorldInfoProvider provider) {
+ this.provider = provider;
+ }
+
+ public WorldStatistics getWorldStatistics() {
+ WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> result = provider.getChunks();
+ if (result == null) {
+ return null;
+ }
+
+ WorldStatistics.Builder stats = WorldStatistics.newBuilder();
+
+ AtomicInteger combinedTotal = new AtomicInteger();
+ CountMap<String> combined = new CountMap.Simple<>(new HashMap<>());
+
+ result.getWorlds().forEach((worldName, chunks) -> {
+ WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder();
+ builder.setName(worldName);
+
+ List<Region> regions = groupIntoRegions(chunks);
+
+ int total = 0;
+
+ for (Region region : regions) {
+ builder.addRegions(regionToProto(region, combined));
+ total += region.getTotalEntities().get();
+ }
+
+ builder.setTotalEntities(total);
+ combinedTotal.addAndGet(total);
+
+ stats.addWorlds(builder.build());
+ });
+
+ stats.setTotalEntities(combinedTotal.get());
+ combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get()));
+
+ return stats.build();
+ }
+
+ private static WorldStatistics.Region regionToProto(Region region, CountMap<String> combined) {
+ WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder();
+ builder.setTotalEntities(region.getTotalEntities().get());
+ for (ChunkInfo<?> chunk : region.getChunks()) {
+ builder.addChunks(chunkToProto(chunk, combined));
+ }
+ return builder.build();
+ }
+
+ private static <E> WorldStatistics.Chunk chunkToProto(ChunkInfo<E> chunk, CountMap<String> combined) {
+ WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder();
+ builder.setX(chunk.getX());
+ builder.setZ(chunk.getZ());
+ builder.setTotalEntities(chunk.getEntityCounts().total().get());
+ chunk.getEntityCounts().asMap().forEach((key, value) -> {
+ String name = chunk.entityTypeName(key);
+ int count = value.get();
+
+ if (name == null) {
+ name = "unknown[" + key.toString() + "]";
+ }
+
+ builder.putEntityCounts(name, count);
+ combined.add(name, count);
+ });
+ return builder.build();
+ }
+
+ private static List<Region> groupIntoRegions(List<? extends ChunkInfo<?>> chunks) {
+ List<Region> regions = new ArrayList<>();
+
+ for (ChunkInfo<?> chunk : chunks) {
+ CountMap<?> counts = chunk.getEntityCounts();
+ if (counts.total().get() == 0) {
+ continue;
+ }
+
+ boolean found = false;
+
+ for (Region region : regions) {
+ if (region.isAdjacent(chunk)) {
+ found = true;
+ region.add(chunk);
+
+ // if the chunk is adjacent to more than one region, merge the regions together
+ for (Iterator<Region> iterator = regions.iterator(); iterator.hasNext(); ) {
+ Region otherRegion = iterator.next();
+ if (region != otherRegion && otherRegion.isAdjacent(chunk)) {
+ iterator.remove();
+ region.merge(otherRegion);
+ }
+ }
+
+ break;
+ }
+ }
+
+ if (!found) {
+ regions.add(new Region(chunk));
+ }
+ }
+
+ return regions;
+ }
+
+ /**
+ * A map of nearby chunks grouped together by Euclidean distance.
+ */
+ private static final class Region {
+ private static final int DISTANCE_THRESHOLD = 2;
+ private final Set<ChunkInfo<?>> chunks;
+ private final AtomicInteger totalEntities;
+
+ private Region(ChunkInfo<?> initial) {
+ this.chunks = new HashSet<>();
+ this.chunks.add(initial);
+ this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get());
+ }
+
+ public Set<ChunkInfo<?>> getChunks() {
+ return this.chunks;
+ }
+
+ public AtomicInteger getTotalEntities() {
+ return this.totalEntities;
+ }
+
+ public boolean isAdjacent(ChunkInfo<?> chunk) {
+ for (ChunkInfo<?> el : this.chunks) {
+ if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public void add(ChunkInfo<?> chunk) {
+ this.chunks.add(chunk);
+ this.totalEntities.addAndGet(chunk.getEntityCounts().total().get());
+ }
+
+ public void merge(Region group) {
+ this.chunks.addAll(group.getChunks());
+ this.totalEntities.addAndGet(group.getTotalEntities().get());
+ }
+
+ private static long squaredEuclideanDistance(ChunkInfo<?> a, ChunkInfo<?> b) {
+ long dx = a.getX() - b.getX();
+ long dz = a.getZ() - b.getZ();
+ return (dx * dx) + (dz * dz);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index ce466a0..e324fd3 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -23,16 +23,22 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
+import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+import java.util.Collection;
import java.util.Comparator;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -41,6 +47,9 @@ import java.util.concurrent.CompletableFuture;
*/
public abstract class AbstractSampler implements Sampler {
+ /** The spark platform instance */
+ protected final SparkPlatform platform;
+
/** The interval to wait between sampling, in microseconds */
protected final int interval;
@@ -51,7 +60,13 @@ public abstract class AbstractSampler implements Sampler {
protected long startTime = -1;
/** The unix timestamp (in millis) when this sampler should automatically complete. */
- protected final long endTime; // -1 for nothing
+ protected final long autoEndTime; // -1 for nothing
+
+ /** If the sampler is running in the background */
+ protected boolean background;
+
+ /** Collects statistics for each window in the sample */
+ protected final WindowStatisticsCollector windowStatisticsCollector;
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture<Sampler> future = new CompletableFuture<>();
@@ -59,10 +74,13 @@ public abstract class AbstractSampler implements Sampler {
/** The garbage collector statistics when profiling started */
protected Map<String, GarbageCollectorStatistics> initialGcStats;
- protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
- this.interval = interval;
- this.threadDumper = threadDumper;
- this.endTime = endTime;
+ protected AbstractSampler(SparkPlatform platform, SamplerSettings settings) {
+ this.platform = platform;
+ this.interval = settings.interval();
+ this.threadDumper = settings.threadDumper();
+ this.autoEndTime = settings.autoEndTime();
+ this.background = settings.runningInBackground();
+ this.windowStatisticsCollector = new WindowStatisticsCollector(platform);
}
@Override
@@ -74,8 +92,13 @@ public abstract class AbstractSampler implements Sampler {
}
@Override
- public long getEndTime() {
- return this.endTime;
+ public long getAutoEndTime() {
+ return this.autoEndTime;
+ }
+
+ @Override
+ public boolean isRunningInBackground() {
+ return this.background;
}
@Override
@@ -91,6 +114,16 @@ public abstract class AbstractSampler implements Sampler {
return this.initialGcStats;
}
+ @Override
+ public void start() {
+ this.startTime = System.currentTimeMillis();
+ }
+
+ @Override
+ public void stop(boolean cancelled) {
+ this.windowStatisticsCollector.stop();
+ }
+
protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
@@ -105,6 +138,11 @@ public abstract class AbstractSampler implements Sampler {
metadata.setComment(comment);
}
+ int totalTicks = this.windowStatisticsCollector.getTotalTicks();
+ if (totalTicks != -1) {
+ metadata.setNumberOfTicks(totalTicks);
+ }
+
try {
metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
} catch (Exception e) {
@@ -119,27 +157,60 @@ public abstract class AbstractSampler implements Sampler {
try {
ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider();
- metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations());
+ if (serverConfigProvider != null) {
+ metadata.putAllServerConfigurations(serverConfigProvider.export());
+ }
} catch (Exception e) {
e.printStackTrace();
}
+ try {
+ MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider();
+ if (extraMetadataProvider != null) {
+ metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ Collection<SourceMetadata> knownSources = platform.getPlugin().getKnownSources();
+ for (SourceMetadata source : knownSources) {
+ metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto());
+ }
+
proto.setMetadata(metadata);
}
- protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator<ThreadNode> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
List<ThreadNode> data = dataAggregator.exportData();
- data.sort(outputOrder);
+ data.sort(Comparator.comparing(ThreadNode::getThreadLabel));
ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup);
+ ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(data);
+ int[] timeWindows = timeEncoder.getKeys();
+ for (int timeWindow : timeWindows) {
+ proto.addTimeWindows(timeWindow);
+ }
+
+ this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows);
+ proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export());
+
for (ThreadNode entry : data) {
- proto.addThreads(entry.toProto(mergeMode));
+ proto.addThreads(entry.toProto(mergeMode, timeEncoder));
classSourceVisitor.visit(entry);
}
- if (classSourceVisitor.hasMappings()) {
- proto.putAllClassSources(classSourceVisitor.getMapping());
+ if (classSourceVisitor.hasClassSourceMappings()) {
+ proto.putAllClassSources(classSourceVisitor.getClassSourceMapping());
+ }
+
+ if (classSourceVisitor.hasMethodSourceMappings()) {
+ proto.putAllMethodSources(classSourceVisitor.getMethodSourceMapping());
+ }
+
+ if (classSourceVisitor.hasLineSourceMappings()) {
+ proto.putAllLineSources(classSourceVisitor.getLineSourceMapping());
}
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
new file mode 100644
index 0000000..7e3b6b4
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
@@ -0,0 +1,115 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.util.Configuration;
+
+import java.util.logging.Level;
+
+public class BackgroundSamplerManager {
+
+ private static final String OPTION_ENABLED = "backgroundProfiler";
+ private static final String OPTION_ENGINE = "backgroundProfilerEngine";
+ private static final String OPTION_INTERVAL = "backgroundProfilerInterval";
+
+ private static final String MARKER_FAILED = "_marker_background_profiler_failed";
+
+ private final SparkPlatform platform;
+ private final Configuration configuration;
+ private final boolean enabled;
+
+ public BackgroundSamplerManager(SparkPlatform platform, Configuration configuration) {
+ this.platform = platform;
+ this.configuration = configuration;
+
+ PlatformInfo.Type type = this.platform.getPlugin().getPlatformInfo().getType();
+ this.enabled = type != PlatformInfo.Type.CLIENT && this.configuration.getBoolean(OPTION_ENABLED, type == PlatformInfo.Type.SERVER);
+ }
+
+ public void initialise() {
+ if (!this.enabled) {
+ return;
+ }
+
+ // are we enabling the background profiler by default for the first time?
+ boolean didEnableByDefault = false;
+ if (!this.configuration.contains(OPTION_ENABLED)) {
+ this.configuration.setBoolean(OPTION_ENABLED, true);
+ didEnableByDefault = true;
+ }
+
+ // did the background profiler fail to start on the previous attempt?
+ if (this.configuration.getBoolean(MARKER_FAILED, false)) {
+ this.platform.getPlugin().log(Level.WARNING, "It seems the background profiler failed to start when spark was last enabled. Sorry about that!");
+ this.platform.getPlugin().log(Level.WARNING, "In the future, spark will try to use the built-in Java profiling engine instead.");
+
+ this.configuration.remove(MARKER_FAILED);
+ this.configuration.setString(OPTION_ENGINE, "java");
+ this.configuration.save();
+ }
+
+ this.platform.getPlugin().log(Level.INFO, "Starting background profiler...");
+
+ if (didEnableByDefault) {
+ // set the failed marker and save before we try to start the profiler,
+ // then remove the marker afterwards if everything goes ok!
+ this.configuration.setBoolean(MARKER_FAILED, true);
+ this.configuration.save();
+ }
+
+ try {
+ startSampler();
+
+ if (didEnableByDefault) {
+ this.configuration.remove(MARKER_FAILED);
+ this.configuration.save();
+ }
+
+ } catch (Throwable e) {
+ e.printStackTrace();
+ }
+ }
+
+ public boolean restartBackgroundSampler() {
+ if (this.enabled) {
+ startSampler();
+ return true;
+ }
+ return false;
+ }
+
+ private void startSampler() {
+ boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java");
+
+ Sampler sampler = new SamplerBuilder()
+ .background(true)
+ .threadDumper(this.platform.getPlugin().getDefaultThreadDumper())
+ .threadGrouper(ThreadGrouper.BY_POOL)
+ .samplingInterval(this.configuration.getInteger(OPTION_INTERVAL, 10))
+ .forceJavaSampler(forceJavaEngine)
+ .start(this.platform);
+
+ this.platform.getSamplerContainer().setActiveSampler(sampler);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 845043f..36a63f1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -23,11 +23,9 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.node.MergeMode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.common.util.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import java.util.Comparator;
import java.util.concurrent.CompletableFuture;
/**
@@ -43,7 +41,7 @@ public interface Sampler {
/**
* Stops the sampler.
*/
- void stop();
+ void stop(boolean cancelled);
/**
* Gets the time when the sampler started (unix timestamp in millis)
@@ -57,7 +55,14 @@ public interface Sampler {
*
* @return the end time, or -1 if undefined
*/
- long getEndTime();
+ long getAutoEndTime();
+
+ /**
+ * If this sampler is running in the background. (wasn't started by a specific user)
+ *
+ * @return true if the sampler is running in the background
+ */
+ boolean isRunningInBackground();
/**
* Gets a future to encapsulate the completion of the sampler
@@ -67,6 +72,6 @@ public interface Sampler {
CompletableFuture<Sampler> getFuture();
// Methods used to export the sampler data to the web viewer.
- SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
+ SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index 88cf018..ec635ef 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -38,7 +38,8 @@ public class SamplerBuilder {
private boolean ignoreSleeping = false;
private boolean ignoreNative = false;
private boolean useAsyncProfiler = true;
- private long timeout = -1;
+ private long autoEndTime = -1;
+ private boolean background = false;
private ThreadDumper threadDumper = ThreadDumper.ALL;
private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME;
@@ -57,7 +58,12 @@ public class SamplerBuilder {
if (timeout <= 0) {
throw new IllegalArgumentException("timeout > 0");
}
- this.timeout = System.currentTimeMillis() + unit.toMillis(timeout);
+ this.autoEndTime = System.currentTimeMillis() + unit.toMillis(timeout);
+ return this;
+ }
+
+ public SamplerBuilder background(boolean background) {
+ this.background = background;
return this;
}
@@ -93,15 +99,24 @@ public class SamplerBuilder {
}
public Sampler start(SparkPlatform platform) {
+ boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null;
+ boolean canUseAsyncProfiler = this.useAsyncProfiler &&
+ !onlyTicksOverMode &&
+ !(this.ignoreSleeping || this.ignoreNative) &&
+ !(this.threadDumper instanceof ThreadDumper.Regex) &&
+ AsyncProfilerAccess.getInstance(platform).checkSupported(platform);
+
+
int intervalMicros = (int) (this.samplingInterval * 1000d);
+ SamplerSettings settings = new SamplerSettings(intervalMicros, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background);
Sampler sampler;
- if (this.ticksOver != -1 && this.tickHook != null) {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
- } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) {
- sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout);
+ if (canUseAsyncProfiler) {
+ sampler = new AsyncSampler(platform, settings);
+ } else if (onlyTicksOverMode) {
+ sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
} else {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative);
+ sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative);
}
sampler.start();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
new file mode 100644
index 0000000..15b1029
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
@@ -0,0 +1,76 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Container for the active sampler.
+ */
+public class SamplerContainer implements AutoCloseable {
+
+ private final AtomicReference<Sampler> activeSampler = new AtomicReference<>();
+
+ /**
+ * Gets the active sampler, or null if a sampler is not active.
+ *
+ * @return the active sampler
+ */
+ public Sampler getActiveSampler() {
+ return this.activeSampler.get();
+ }
+
+ /**
+ * Sets the active sampler, throwing an exception if another sampler is already active.
+ *
+ * @param sampler the sampler
+ */
+ public void setActiveSampler(Sampler sampler) {
+ if (!this.activeSampler.compareAndSet(null, sampler)) {
+ throw new IllegalStateException("Attempted to set active sampler when another was already active!");
+ }
+ }
+
+ /**
+ * Unsets the active sampler, if the provided sampler is active.
+ *
+ * @param sampler the sampler
+ */
+ public void unsetActiveSampler(Sampler sampler) {
+ this.activeSampler.compareAndSet(sampler, null);
+ }
+
+ /**
+ * Stops the active sampler, if there is one.
+ */
+ public void stopActiveSampler(boolean cancelled) {
+ Sampler sampler = this.activeSampler.getAndSet(null);
+ if (sampler != null) {
+ sampler.stop(cancelled);
+ }
+ }
+
+ @Override
+ public void close() {
+ stopActiveSampler(true);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java
new file mode 100644
index 0000000..6e55a43
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java
@@ -0,0 +1,61 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+/**
+ * Base settings for all samplers
+ */
+public class SamplerSettings {
+
+ private final int interval;
+ private final ThreadDumper threadDumper;
+ private final ThreadGrouper threadGrouper;
+ private final long autoEndTime;
+ private final boolean runningInBackground;
+
+ public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground) {
+ this.interval = interval;
+ this.threadDumper = threadDumper;
+ this.threadGrouper = threadGrouper;
+ this.autoEndTime = autoEndTime;
+ this.runningInBackground = runningInBackground;
+ }
+
+ public int interval() {
+ return this.interval;
+ }
+
+ public ThreadDumper threadDumper() {
+ return this.threadDumper;
+ }
+
+ public ThreadGrouper threadGrouper() {
+ return this.threadGrouper;
+ }
+
+ public long autoEndTime() {
+ return this.autoEndTime;
+ }
+
+ public boolean runningInBackground() {
+ return this.runningInBackground;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 9d54f50..fd0c413 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -76,17 +76,29 @@ public interface ThreadDumper {
* the game (server/client) thread.
*/
final class GameThread implements Supplier<ThreadDumper> {
+ private Supplier<Thread> threadSupplier;
private Specific dumper = null;
+ public GameThread() {
+
+ }
+
+ public GameThread(Supplier<Thread> threadSupplier) {
+ this.threadSupplier = threadSupplier;
+ }
+
@Override
public ThreadDumper get() {
+ if (this.dumper == null) {
+ setThread(this.threadSupplier.get());
+ this.threadSupplier = null;
+ }
+
return Objects.requireNonNull(this.dumper, "dumper");
}
- public void ensureSetup() {
- if (this.dumper == null) {
- this.dumper = new Specific(new long[]{Thread.currentThread().getId()});
- }
+ public void setThread(Thread thread) {
+ this.dumper = new Specific(new long[]{thread.getId()});
}
}
@@ -98,6 +110,10 @@ public interface ThreadDumper {
private Set<Thread> threads;
private Set<String> threadNamesLowerCase;
+ public Specific(Thread thread) {
+ this.ids = new long[]{thread.getId()};
+ }
+
public Specific(long[] ids) {
this.ids = ids;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
index ad9dee4..2c003e5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java
@@ -27,6 +27,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.IntPredicate;
/**
* Abstract implementation of {@link DataAggregator}.
@@ -52,6 +53,11 @@ public abstract class AbstractDataAggregator implements DataAggregator {
}
@Override
+ public void pruneData(IntPredicate timeWindowPredicate) {
+ this.threadData.values().removeIf(node -> node.removeTimeWindowsRecursively(timeWindowPredicate));
+ }
+
+ @Override
public List<ThreadNode> exportData() {
List<ThreadNode> data = new ArrayList<>(this.threadData.values());
for (ThreadNode node : data) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
index 5590a96..ed33204 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java
@@ -24,6 +24,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
import java.util.List;
+import java.util.function.IntPredicate;
/**
* Aggregates sampling data.
@@ -38,6 +39,13 @@ public interface DataAggregator {
List<ThreadNode> exportData();
/**
+ * Prunes windows of data from this aggregator if the given {@code timeWindowPredicate} returns true.
+ *
+ * @param timeWindowPredicate the predicate
+ */
+ void pruneData(IntPredicate timeWindowPredicate);
+
+ /**
* Gets metadata about the data aggregator instance.
*/
SamplerMetadata.DataAggregator getMetadata();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
index 3de3943..402330a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java
@@ -47,10 +47,10 @@ public class AsyncDataAggregator extends AbstractDataAggregator {
.build();
}
- public void insertData(ProfileSegment element) {
+ public void insertData(ProfileSegment element, int window) {
try {
ThreadNode node = getNode(this.threadGrouper.getGroup(element.getNativeThreadId(), element.getThreadName()));
- node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime());
+ node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime(), window);
} catch (Exception e) {
e.printStackTrace();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index d642a53..1480650 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -22,26 +22,39 @@ package me.lucko.spark.common.sampler.async;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Table;
+import com.google.common.io.ByteStreams;
import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.util.TemporaryFiles;
import one.profiler.AsyncProfiler;
import one.profiler.Events;
+import java.io.BufferedReader;
import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
import java.util.Locale;
+import java.util.Objects;
import java.util.logging.Level;
+import java.util.stream.Collectors;
/**
* Provides a bridge between spark and async-profiler.
*/
-public enum AsyncProfilerAccess {
- INSTANCE;
+public class AsyncProfilerAccess {
+ private static AsyncProfilerAccess instance;
+
+ // singleton, needs a SparkPlatform for first init
+ public static synchronized AsyncProfilerAccess getInstance(SparkPlatform platform) {
+ if (instance == null) {
+ Objects.requireNonNull(platform, "platform");
+ instance = new AsyncProfilerAccess(platform);
+ }
+ return instance;
+ }
/** An instance of the async-profiler Java API. */
private final AsyncProfiler profiler;
@@ -52,13 +65,13 @@ public enum AsyncProfilerAccess {
/** If profiler is null, contains the reason why setup failed */
private final Exception setupException;
- AsyncProfilerAccess() {
+ AsyncProfilerAccess(SparkPlatform platform) {
AsyncProfiler profiler;
ProfilingEvent profilingEvent = null;
Exception setupException = null;
try {
- profiler = load();
+ profiler = load(platform);
if (isEventSupported(profiler, ProfilingEvent.CPU, false)) {
profilingEvent = ProfilingEvent.CPU;
} else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) {
@@ -74,11 +87,11 @@ public enum AsyncProfilerAccess {
this.setupException = setupException;
}
- public AsyncProfiler getProfiler() {
+ public AsyncProfilerJob startNewProfilerJob() {
if (this.profiler == null) {
throw new UnsupportedOperationException("async-profiler not supported", this.setupException);
}
- return this.profiler;
+ return AsyncProfilerJob.createNew(this, this.profiler);
}
public ProfilingEvent getProfilingEvent() {
@@ -103,13 +116,18 @@ public enum AsyncProfilerAccess {
return this.profiler != null;
}
- private static AsyncProfiler load() throws Exception {
+ private static AsyncProfiler load(SparkPlatform platform) throws Exception {
// check compatibility
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
+ if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) {
+ arch = "amd64-musl";
+ }
+
Table<String, String, String> supported = ImmutableTable.<String, String, String>builder()
.put("linux", "amd64", "linux/amd64")
+ .put("linux", "amd64-musl", "linux/amd64-musl")
.put("linux", "aarch64", "linux/aarch64")
.put("macosx", "amd64", "macos")
.put("macosx", "aarch64", "macos")
@@ -127,10 +145,10 @@ public enum AsyncProfilerAccess {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
}
- Path extractPath = TemporaryFiles.create("spark-", "-libasyncProfiler.so.tmp");
+ Path extractPath = platform.getTemporaryFiles().create("spark-", "-libasyncProfiler.so.tmp");
- try (InputStream in = profilerResource.openStream()) {
- Files.copy(in, extractPath, StandardCopyOption.REPLACE_EXISTING);
+ try (InputStream in = profilerResource.openStream(); OutputStream out = Files.newOutputStream(extractPath)) {
+ ByteStreams.copy(in, out);
}
// get an instance of async-profiler
@@ -190,4 +208,20 @@ public enum AsyncProfilerAccess {
super("A runtime error occurred whilst loading the native library", cause);
}
}
+
+ // Checks if the system is using musl instead of glibc
+ private static boolean isLinuxMusl() {
+ try {
+ InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`")
+ .start()
+ .getInputStream();
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
+ String output = reader.lines().collect(Collectors.joining());
+ return output.contains("musl"); // shrug
+ } catch (Throwable e) {
+ // ignore
+ return false;
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
new file mode 100644
index 0000000..d74b75f
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
@@ -0,0 +1,276 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.async;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+
+import one.profiler.AsyncProfiler;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Predicate;
+
+/**
+ * Represents a profiling job within async-profiler.
+ *
+ * <p>Only one job can be running at a time. This is guarded by
+ * {@link #createNew(AsyncProfilerAccess, AsyncProfiler)}.</p>
+ */
+public class AsyncProfilerJob {
+
+ /**
+ * The currently active job.
+ */
+ private static final AtomicReference<AsyncProfilerJob> ACTIVE = new AtomicReference<>();
+
+ /**
+ * Creates a new {@link AsyncProfilerJob}.
+ *
+ * <p>Will throw an {@link IllegalStateException} if another job is already active.</p>
+ *
+ * @param access the profiler access object
+ * @param profiler the profiler
+ * @return the job
+ */
+ static AsyncProfilerJob createNew(AsyncProfilerAccess access, AsyncProfiler profiler) {
+ synchronized (ACTIVE) {
+ AsyncProfilerJob existing = ACTIVE.get();
+ if (existing != null) {
+ throw new IllegalStateException("Another profiler is already active: " + existing);
+ }
+
+ AsyncProfilerJob job = new AsyncProfilerJob(access, profiler);
+ ACTIVE.set(job);
+ return job;
+ }
+ }
+
+ /** The async-profiler access object */
+ private final AsyncProfilerAccess access;
+ /** The async-profiler instance */
+ private final AsyncProfiler profiler;
+
+ // Set on init
+ /** The platform */
+ private SparkPlatform platform;
+ /** The sampling interval in microseconds */
+ private int interval;
+ /** The thread dumper */
+ private ThreadDumper threadDumper;
+ /** The profiling window */
+ private int window;
+ /** If the profiler should run in quiet mode */
+ private boolean quiet;
+
+ /** The file used by async-profiler to output data */
+ private Path outputFile;
+
+ private AsyncProfilerJob(AsyncProfilerAccess access, AsyncProfiler profiler) {
+ this.access = access;
+ this.profiler = profiler;
+ }
+
+ /**
+ * Executes an async-profiler command.
+ *
+ * @param command the command
+ * @return the output
+ */
+ private String execute(String command) {
+ try {
+ return this.profiler.execute(command);
+ } catch (IOException e) {
+ throw new RuntimeException("Exception whilst executing profiler command", e);
+ }
+ }
+
+ /**
+ * Checks to ensure that this job is still active.
+ */
+ private void checkActive() {
+ if (ACTIVE.get() != this) {
+ throw new IllegalStateException("Profiler job no longer active!");
+ }
+ }
+
+ // Initialise the job
+ public void init(SparkPlatform platform, int interval, ThreadDumper threadDumper, int window, boolean quiet) {
+ this.platform = platform;
+ this.interval = interval;
+ this.threadDumper = threadDumper;
+ this.window = window;
+ this.quiet = quiet;
+ }
+
+ /**
+ * Starts the job.
+ */
+ public void start() {
+ checkActive();
+
+ try {
+ // create a new temporary output file
+ try {
+ this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp");
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to create temporary output file", e);
+ }
+
+ // construct a command to send to async-profiler
+ String command = "start,event=" + this.access.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
+ if (this.quiet) {
+ command += ",loglevel=NONE";
+ }
+ if (this.threadDumper instanceof ThreadDumper.Specific) {
+ command += ",filter";
+ }
+
+ // start the profiler
+ String resp = execute(command).trim();
+
+ if (!resp.equalsIgnoreCase("profiling started")) {
+ throw new RuntimeException("Unexpected response: " + resp);
+ }
+
+ // append threads to be profiled, if necessary
+ if (this.threadDumper instanceof ThreadDumper.Specific) {
+ ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper;
+ for (Thread thread : threadDumper.getThreads()) {
+ this.profiler.addThread(thread);
+ }
+ }
+
+ } catch (Exception e) {
+ try {
+ this.profiler.stop();
+ } catch (Exception e2) {
+ // ignore
+ }
+ close();
+
+ throw e;
+ }
+ }
+
+ /**
+ * Stops the job.
+ */
+ public void stop() {
+ checkActive();
+
+ try {
+ this.profiler.stop();
+ } catch (IllegalStateException e) {
+ if (!e.getMessage().equals("Profiler is not active")) { // ignore
+ throw e;
+ }
+ } finally {
+ close();
+ }
+ }
+
+ /**
+ * Aggregates the collected data.
+ */
+ public void aggregate(AsyncDataAggregator dataAggregator) {
+
+ Predicate<String> threadFilter;
+ if (this.threadDumper instanceof ThreadDumper.Specific) {
+ ThreadDumper.Specific specificDumper = (ThreadDumper.Specific) this.threadDumper;
+ threadFilter = n -> specificDumper.getThreadNames().contains(n.toLowerCase());
+ } else {
+ threadFilter = n -> true;
+ }
+
+ // read the jfr file produced by async-profiler
+ try (JfrReader reader = new JfrReader(this.outputFile)) {
+ readSegments(reader, threadFilter, dataAggregator, this.window);
+ } catch (Exception e) {
+ boolean fileExists;
+ try {
+ fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0;
+ } catch (IOException ex) {
+ fileExists = false;
+ }
+
+ if (fileExists) {
+ throw new JfrParsingException("Error parsing JFR data from profiler output", e);
+ } else {
+ throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e);
+ }
+ }
+
+ deleteOutputFile();
+ }
+
+ public void deleteOutputFile() {
+ try {
+ Files.deleteIfExists(this.outputFile);
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+
+ private void readSegments(JfrReader reader, Predicate<String> threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException {
+ List<JfrReader.ExecutionSample> samples = reader.readAllEvents(JfrReader.ExecutionSample.class);
+ for (int i = 0; i < samples.size(); i++) {
+ JfrReader.ExecutionSample sample = samples.get(i);
+
+ long duration;
+ if (i == 0) {
+ // we don't really know the duration of the first sample, so just use the sampling
+ // interval
+ duration = this.interval;
+ } else {
+ // calculate the duration of the sample by calculating the time elapsed since the
+ // previous sample
+ duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time);
+ }
+
+ String threadName = reader.threads.get((long) sample.tid);
+ if (threadName == null) {
+ continue;
+ }
+
+ if (!threadFilter.test(threadName)) {
+ continue;
+ }
+
+ // parse the segment and give it to the data aggregator
+ ProfileSegment segment = ProfileSegment.parseSegment(reader, sample, threadName, duration);
+ dataAggregator.insertData(segment, window);
+ }
+ }
+
+ public int getWindow() {
+ return this.window;
+ }
+
+ private void close() {
+ ACTIVE.compareAndSet(this, null);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 5cb7fdc..178f055 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -25,64 +25,43 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.AbstractSampler;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.common.sampler.ThreadGrouper;
-import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+import me.lucko.spark.common.sampler.SamplerSettings;
import me.lucko.spark.common.sampler.node.MergeMode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
-import me.lucko.spark.common.util.ClassSourceLookup;
-import me.lucko.spark.common.util.TemporaryFiles;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.window.ProfilingWindowUtils;
+import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
-import one.profiler.AsyncProfiler;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Comparator;
-import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import java.util.function.Predicate;
+import java.util.function.IntPredicate;
/**
* A sampler implementation using async-profiler.
*/
public class AsyncSampler extends AbstractSampler {
- private final AsyncProfiler profiler;
+ private final AsyncProfilerAccess profilerAccess;
/** Responsible for aggregating and then outputting collected sampling data */
private final AsyncDataAggregator dataAggregator;
- /** Flag to mark if the output has been completed */
- private boolean outputComplete = false;
-
- /** The temporary output file */
- private Path outputFile;
+ /** Mutex for the current profiler job */
+ private final Object[] currentJobMutex = new Object[0];
- /** The executor used for timeouts */
- private ScheduledExecutorService timeoutExecutor;
+ /** Current profiler job */
+ private AsyncProfilerJob currentJob;
- public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) {
- super(interval, threadDumper, endTime);
- this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler();
- this.dataAggregator = new AsyncDataAggregator(threadGrouper);
- }
+ /** The executor used for scheduling and management */
+ private ScheduledExecutorService scheduler;
- /**
- * Executes a profiler command.
- *
- * @param command the command to execute
- * @return the response
- */
- private String execute(String command) {
- try {
- return this.profiler.execute(command);
- } catch (IOException e) {
- throw new RuntimeException("Exception whilst executing profiler command", e);
- }
+ public AsyncSampler(SparkPlatform platform, SamplerSettings settings) {
+ super(platform, settings);
+ this.profilerAccess = AsyncProfilerAccess.getInstance(platform);
+ this.dataAggregator = new AsyncDataAggregator(settings.threadGrouper());
+ this.scheduler = Executors.newSingleThreadScheduledExecutor(
+ new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-worker-thread").build()
+ );
}
/**
@@ -90,51 +69,82 @@ public class AsyncSampler extends AbstractSampler {
*/
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
- try {
- this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp");
- } catch (IOException e) {
- throw new RuntimeException("Unable to create temporary output file", e);
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ this.windowStatisticsCollector.startCountingTicks(tickHook);
}
- String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString();
- if (this.threadDumper instanceof ThreadDumper.Specific) {
- command += ",filter";
- }
+ int window = ProfilingWindowUtils.windowNow();
- String resp = execute(command).trim();
- if (!resp.equalsIgnoreCase("profiling started")) {
- throw new RuntimeException("Unexpected response: " + resp);
- }
+ AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob();
+ job.init(this.platform, this.interval, this.threadDumper, window, this.background);
+ job.start();
+ this.currentJob = job;
- if (this.threadDumper instanceof ThreadDumper.Specific) {
- ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper;
- for (Thread thread : threadDumper.getThreads()) {
- this.profiler.addThread(thread);
- }
- }
+ // rotate the sampler job to put data into a new window
+ this.scheduler.scheduleAtFixedRate(
+ this::rotateProfilerJob,
+ ProfilingWindowUtils.WINDOW_SIZE_SECONDS,
+ ProfilingWindowUtils.WINDOW_SIZE_SECONDS,
+ TimeUnit.SECONDS
+ );
recordInitialGcStats();
scheduleTimeout();
}
+ private void rotateProfilerJob() {
+ try {
+ synchronized (this.currentJobMutex) {
+ AsyncProfilerJob previousJob = this.currentJob;
+ if (previousJob == null) {
+ return;
+ }
+
+ try {
+ // stop the previous job
+ previousJob.stop();
+
+ // collect statistics for the window
+ this.windowStatisticsCollector.measureNow(previousJob.getWindow());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // start a new job
+ int window = previousJob.getWindow() + 1;
+ AsyncProfilerJob newJob = this.profilerAccess.startNewProfilerJob();
+ newJob.init(this.platform, this.interval, this.threadDumper, window, this.background);
+ newJob.start();
+ this.currentJob = newJob;
+
+ // aggregate the output of the previous job
+ previousJob.aggregate(this.dataAggregator);
+
+ // prune data older than the history size
+ IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(window);
+ this.dataAggregator.pruneData(predicate);
+ this.windowStatisticsCollector.pruneStatistics(predicate);
+ }
+ } catch (Throwable e) {
+ e.printStackTrace();
+ }
+ }
+
private void scheduleTimeout() {
- if (this.endTime == -1) {
+ if (this.autoEndTime == -1) {
return;
}
- long delay = this.endTime - System.currentTimeMillis();
+ long delay = this.autoEndTime - System.currentTimeMillis();
if (delay <= 0) {
return;
}
- this.timeoutExecutor = Executors.newSingleThreadScheduledExecutor(
- new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-timeout-thread").build()
- );
-
- this.timeoutExecutor.schedule(() -> {
- stop();
+ this.scheduler.schedule(() -> {
+ stop(false);
this.future.complete(this);
}, delay, TimeUnit.MILLISECONDS);
}
@@ -143,129 +153,32 @@ public class AsyncSampler extends AbstractSampler {
* Stops the profiler.
*/
@Override
- public void stop() {
- try {
- this.profiler.stop();
- } catch (IllegalStateException e) {
- if (!e.getMessage().equals("Profiler is not active")) { // ignore
- throw e;
+ public void stop(boolean cancelled) {
+ super.stop(cancelled);
+
+ synchronized (this.currentJobMutex) {
+ this.currentJob.stop();
+ if (!cancelled) {
+ this.windowStatisticsCollector.measureNow(this.currentJob.getWindow());
+ this.currentJob.aggregate(this.dataAggregator);
+ } else {
+ this.currentJob.deleteOutputFile();
}
+ this.currentJob = null;
}
-
- if (this.timeoutExecutor != null) {
- this.timeoutExecutor.shutdown();
- this.timeoutExecutor = null;
+ if (this.scheduler != null) {
+ this.scheduler.shutdown();
+ this.scheduler = null;
}
}
@Override
- public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
- aggregateOutput();
- writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
+ writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup);
return proto.build();
}
- private void aggregateOutput() {
- if (this.outputComplete) {
- return;
- }
- this.outputComplete = true;
-
- Predicate<String> threadFilter;
- if (this.threadDumper instanceof ThreadDumper.Specific) {
- ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper;
- threadFilter = n -> threadDumper.getThreadNames().contains(n.toLowerCase());
- } else {
- threadFilter = n -> true;
- }
-
- // read the jfr file produced by async-profiler
- try (JfrReader reader = new JfrReader(this.outputFile)) {
- readSegments(reader, threadFilter);
- } catch (IOException e) {
- throw new RuntimeException("Read error", e);
- }
-
- // delete the output file after reading
- try {
- Files.deleteIfExists(this.outputFile);
- } catch (IOException e) {
- // ignore
- }
- }
-
- private void readSegments(JfrReader reader, Predicate<String> threadFilter) throws IOException {
- List<JfrReader.ExecutionSample> samples = reader.readAllEvents(JfrReader.ExecutionSample.class);
- for (int i = 0; i < samples.size(); i++) {
- JfrReader.ExecutionSample sample = samples.get(i);
-
- long duration;
- if (i == 0) {
- // we don't really know the duration of the first sample, so just use the sampling
- // interval
- duration = this.interval;
- } else {
- // calculate the duration of the sample by calculating the time elapsed since the
- // previous sample
- duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time);
- }
-
- String threadName = reader.threads.get(sample.tid);
- if (!threadFilter.test(threadName)) {
- continue;
- }
-
- // parse the segment and give it to the data aggregator
- ProfileSegment segment = parseSegment(reader, sample, threadName, duration);
- this.dataAggregator.insertData(segment);
- }
- }
-
- private static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) {
- JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId);
- int len = stackTrace.methods.length;
-
- AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len];
- for (int i = 0; i < len; i++) {
- stack[i] = parseStackFrame(reader, stackTrace.methods[i]);
- }
-
- return new ProfileSegment(sample.tid, threadName, stack, duration);
- }
-
- private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) {
- AsyncStackTraceElement result = reader.stackFrames.get(methodId);
- if (result != null) {
- return result;
- }
-
- JfrReader.MethodRef methodRef = reader.methods.get(methodId);
- JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls);
-
- byte[] className = reader.symbols.get(classRef.name);
- byte[] methodName = reader.symbols.get(methodRef.name);
-
- if (className == null || className.length == 0) {
- // native call
- result = new AsyncStackTraceElement(
- AsyncStackTraceElement.NATIVE_CALL,
- new String(methodName, StandardCharsets.UTF_8),
- null
- );
- } else {
- // java method
- byte[] methodDesc = reader.symbols.get(methodRef.sig);
- result = new AsyncStackTraceElement(
- new String(className, StandardCharsets.UTF_8).replace('/', '.'),
- new String(methodName, StandardCharsets.UTF_8),
- new String(methodDesc, StandardCharsets.UTF_8)
- );
- }
-
- reader.stackFrames.put(methodId, result);
- return result;
- }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java
new file mode 100644
index 0000000..6dab359
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java
@@ -0,0 +1,27 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.async;
+
+public class JfrParsingException extends RuntimeException {
+ public JfrParsingException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java
index 154e6fe..26debaf 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java
@@ -20,6 +20,10 @@
package me.lucko.spark.common.sampler.async;
+import me.lucko.spark.common.sampler.async.jfr.JfrReader;
+
+import java.nio.charset.StandardCharsets;
+
/**
* Represents a profile "segment".
*
@@ -58,4 +62,50 @@ public class ProfileSegment {
public long getTime() {
return this.time;
}
+
+ public static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) {
+ JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId);
+ int len = stackTrace.methods.length;
+
+ AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len];
+ for (int i = 0; i < len; i++) {
+ stack[i] = parseStackFrame(reader, stackTrace.methods[i]);
+ }
+
+ return new ProfileSegment(sample.tid, threadName, stack, duration);
+ }
+
+ private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) {
+ AsyncStackTraceElement result = reader.stackFrames.get(methodId);
+ if (result != null) {
+ return result;
+ }
+
+ JfrReader.MethodRef methodRef = reader.methods.get(methodId);
+ JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls);
+
+ byte[] className = reader.symbols.get(classRef.name);
+ byte[] methodName = reader.symbols.get(methodRef.name);
+
+ if (className == null || className.length == 0) {
+ // native call
+ result = new AsyncStackTraceElement(
+ AsyncStackTraceElement.NATIVE_CALL,
+ new String(methodName, StandardCharsets.UTF_8),
+ null
+ );
+ } else {
+ // java method
+ byte[] methodDesc = reader.symbols.get(methodRef.sig);
+ result = new AsyncStackTraceElement(
+ new String(className, StandardCharsets.UTF_8).replace('/', '.'),
+ new String(methodName, StandardCharsets.UTF_8),
+ new String(methodDesc, StandardCharsets.UTF_8)
+ );
+ }
+
+ reader.stackFrames.put(methodId, result);
+ return result;
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java
index 23223a2..60f6543 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java
@@ -37,6 +37,10 @@ public class Dictionary<T> {
size = 0;
}
+ public int size() {
+ return this.size;
+ }
+
public void put(long key, T value) {
if (key == 0) {
throw new IllegalArgumentException("Zero key not allowed");
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
index e0cc4e9..ea4985e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/JfrReader.java
@@ -51,7 +51,7 @@ public class JfrReader implements Closeable {
public final Dictionary<JfrClass> types = new Dictionary<>();
public final Map<String, JfrClass> typesByName = new HashMap<>();
- public final Dictionary<String> threads = new Dictionary<>();
+ public final Map<Long, String> threads = new HashMap<>(); // spark
public final Dictionary<ClassRef> classes = new Dictionary<>();
public final Dictionary<byte[]> symbols = new Dictionary<>();
public final Dictionary<MethodRef> methods = new Dictionary<>();
@@ -324,7 +324,7 @@ public class JfrReader implements Closeable {
}
private void readThreads(boolean hasGroup) {
- int count = threads.preallocate(getVarint());
+ int count = getVarint(); //threads.preallocate(getVarint());
for (int i = 0; i < count; i++) {
long id = getVarlong();
String osName = getString();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
index cc530d6..c51ec05 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java
@@ -66,10 +66,11 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
* Inserts sampling data into this aggregator
*
* @param threadInfo the thread info
+ * @param window the window
*/
- public abstract void insertData(ThreadInfo threadInfo);
+ public abstract void insertData(ThreadInfo threadInfo, int window);
- protected void writeData(ThreadInfo threadInfo) {
+ protected void writeData(ThreadInfo threadInfo, int window) {
if (this.ignoreSleeping && isSleeping(threadInfo)) {
return;
}
@@ -79,7 +80,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator {
try {
ThreadNode node = getNode(this.threadGrouper.getGroup(threadInfo.getThreadId(), threadInfo.getThreadName()));
- node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval);
+ node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval, window);
} catch (Exception e) {
e.printStackTrace();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index cfa0a0f..72a37e8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -25,23 +25,23 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.sampler.AbstractSampler;
-import me.lucko.spark.common.sampler.ThreadDumper;
-import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.SamplerSettings;
import me.lucko.spark.common.sampler.node.MergeMode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.window.ProfilingWindowUtils;
+import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.common.tick.TickHook;
-import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
-import java.util.Comparator;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.IntPredicate;
/**
* A sampler implementation using Java (WarmRoast).
@@ -62,26 +62,47 @@ public class JavaSampler extends AbstractSampler implements Runnable {
/** Responsible for aggregating and then outputting collected sampling data */
private final JavaDataAggregator dataAggregator;
+
+ /** The last window that was profiled */
+ private final AtomicInteger lastWindow = new AtomicInteger();
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) {
- super(interval, threadDumper, endTime);
- this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative);
+ public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative) {
+ super(platform, settings);
+ this.dataAggregator = new SimpleDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative);
}
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
- super(interval, threadDumper, endTime);
- this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold);
+ public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
+ super(platform, settings);
+ this.dataAggregator = new TickedDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold);
}
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
+
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ if (this.dataAggregator instanceof TickedDataAggregator) {
+ WindowStatisticsCollector.ExplicitTickCounter counter = this.windowStatisticsCollector.startCountingTicksExplicit(tickHook);
+ ((TickedDataAggregator) this.dataAggregator).setTickCounter(counter);
+ } else {
+ this.windowStatisticsCollector.startCountingTicks(tickHook);
+ }
+ }
+
this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS);
}
@Override
- public void stop() {
+ public void stop(boolean cancelled) {
+ super.stop(cancelled);
+
this.task.cancel(false);
+
+ if (!cancelled) {
+ // collect statistics for the final window
+ this.windowStatisticsCollector.measureNow(this.lastWindow.get());
+ }
}
@Override
@@ -89,27 +110,30 @@ public class JavaSampler extends AbstractSampler implements Runnable {
// this is effectively synchronized, the worker pool will not allow this task
// to concurrently execute.
try {
- if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) {
+ long time = System.currentTimeMillis();
+
+ if (this.autoEndTime != -1 && this.autoEndTime <= time) {
+ stop(false);
this.future.complete(this);
- stop();
return;
}
+ int window = ProfilingWindowUtils.unixMillisToWindow(time);
ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean);
- this.workerPool.execute(new InsertDataTask(this.dataAggregator, threadDumps));
+ this.workerPool.execute(new InsertDataTask(threadDumps, window));
} catch (Throwable t) {
+ stop(false);
this.future.completeExceptionally(t);
- stop();
}
}
- private static final class InsertDataTask implements Runnable {
- private final JavaDataAggregator dataAggregator;
+ private final class InsertDataTask implements Runnable {
private final ThreadInfo[] threadDumps;
+ private final int window;
- InsertDataTask(JavaDataAggregator dataAggregator, ThreadInfo[] threadDumps) {
- this.dataAggregator = dataAggregator;
+ InsertDataTask(ThreadInfo[] threadDumps, int window) {
this.threadDumps = threadDumps;
+ this.window = window;
}
@Override
@@ -118,16 +142,29 @@ public class JavaSampler extends AbstractSampler implements Runnable {
if (threadInfo.getThreadName() == null || threadInfo.getStackTrace() == null) {
continue;
}
- this.dataAggregator.insertData(threadInfo);
+ JavaSampler.this.dataAggregator.insertData(threadInfo, this.window);
+ }
+
+ // if we have just stepped over into a new window...
+ int previousWindow = JavaSampler.this.lastWindow.getAndUpdate(previous -> Math.max(this.window, previous));
+ if (previousWindow != 0 && previousWindow != this.window) {
+
+ // collect statistics for the previous window
+ JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow);
+
+ // prune data older than the history size
+ IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(this.window);
+ JavaSampler.this.dataAggregator.pruneData(predicate);
+ JavaSampler.this.windowStatisticsCollector.pruneStatistics(predicate);
}
}
}
@Override
- public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
+ public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
SamplerData.Builder proto = SamplerData.newBuilder();
writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator);
- writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup);
+ writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup);
return proto.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
index 39e21aa..54173fe 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java
@@ -44,8 +44,8 @@ public class SimpleDataAggregator extends JavaDataAggregator {
}
@Override
- public void insertData(ThreadInfo threadInfo) {
- writeData(threadInfo);
+ public void insertData(ThreadInfo threadInfo, int window) {
+ writeData(threadInfo, window);
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index e817828..d537b96 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler.java;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.sampler.window.WindowStatisticsCollector;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
@@ -47,11 +48,15 @@ public class TickedDataAggregator extends JavaDataAggregator {
/** The expected number of samples in each tick */
private final int expectedSize;
- private final Object mutex = new Object();
+ /** Counts the number of ticks aggregated */
+ private WindowStatisticsCollector.ExplicitTickCounter tickCounter;
// state
private int currentTick = -1;
- private TickList currentData = new TickList(0);
+ private TickList currentData = null;
+
+ // guards currentData
+ private final Object mutex = new Object();
public TickedDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
super(workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative);
@@ -62,23 +67,34 @@ public class TickedDataAggregator extends JavaDataAggregator {
this.expectedSize = (int) ((50 / intervalMilliseconds) + 10);
}
+ public void setTickCounter(WindowStatisticsCollector.ExplicitTickCounter tickCounter) {
+ this.tickCounter = tickCounter;
+ }
+
@Override
public SamplerMetadata.DataAggregator getMetadata() {
+ // push the current tick (so numberOfTicks is accurate)
+ synchronized (this.mutex) {
+ pushCurrentTick();
+ this.currentData = null;
+ }
+
return SamplerMetadata.DataAggregator.newBuilder()
.setType(SamplerMetadata.DataAggregator.Type.TICKED)
.setThreadGrouper(this.threadGrouper.asProto())
.setTickLengthThreshold(this.tickLengthThreshold)
+ .setNumberOfIncludedTicks(this.tickCounter.getTotalCountedTicks())
.build();
}
@Override
- public void insertData(ThreadInfo threadInfo) {
+ public void insertData(ThreadInfo threadInfo, int window) {
synchronized (this.mutex) {
int tick = this.tickHook.getCurrentTick();
- if (this.currentTick != tick) {
+ if (this.currentTick != tick || this.currentData == null) {
pushCurrentTick();
this.currentTick = tick;
- this.currentData = new TickList(this.expectedSize);
+ this.currentData = new TickList(this.expectedSize, window);
}
this.currentData.addData(threadInfo);
@@ -88,6 +104,9 @@ public class TickedDataAggregator extends JavaDataAggregator {
// guarded by 'mutex'
private void pushCurrentTick() {
TickList currentData = this.currentData;
+ if (currentData == null) {
+ return;
+ }
// approximate how long the tick lasted
int tickLengthMicros = currentData.getList().size() * this.interval;
@@ -98,6 +117,7 @@ public class TickedDataAggregator extends JavaDataAggregator {
}
this.workerPool.submit(currentData);
+ this.tickCounter.increment();
}
@Override
@@ -112,15 +132,17 @@ public class TickedDataAggregator extends JavaDataAggregator {
private final class TickList implements Runnable {
private final List<ThreadInfo> list;
+ private final int window;
- TickList(int expectedSize) {
+ TickList(int expectedSize, int window) {
this.list = new ArrayList<>(expectedSize);
+ this.window = window;
}
@Override
public void run() {
for (ThreadInfo data : this.list) {
- writeData(data);
+ writeData(data, this.window);
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
index fd2be8d..163365c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java
@@ -20,69 +20,81 @@
package me.lucko.spark.common.sampler.node;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
+
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.LongAdder;
+import java.util.function.IntPredicate;
/**
* Encapsulates a timed node in the sampling stack.
*/
public abstract class AbstractNode {
- private static final int MAX_STACK_DEPTH = 300;
+ protected static final int MAX_STACK_DEPTH = Integer.getInteger("spark.maxStackDepth", 300);
/** A map of the nodes children */
private final Map<StackTraceNode.Description, StackTraceNode> children = new ConcurrentHashMap<>();
/** The accumulated sample time for this node, measured in microseconds */
- private final LongAdder totalTime = new LongAdder();
+ // Integer key = the window (effectively System.currentTimeMillis() / 60_000)
+ // LongAdder value = accumulated time in microseconds
+ private final Map<Integer, LongAdder> times = new ConcurrentHashMap<>();
/**
- * Gets the total sample time logged for this node in milliseconds.
+ * Gets the time accumulator for a given window
*
- * @return the total time
+ * @param window the window
+ * @return the accumulator
*/
- public double getTotalTime() {
- return this.totalTime.longValue() / 1000d;
+ protected LongAdder getTimeAccumulator(int window) {
+ LongAdder adder = this.times.get(window);
+ if (adder == null) {
+ adder = new LongAdder();
+ this.times.put(window, adder);
+ }
+ return adder;
}
- public Collection<StackTraceNode> getChildren() {
- return this.children.values();
+ /**
+ * Gets the time windows that have been logged for this node.
+ *
+ * @return the time windows
+ */
+ public Set<Integer> getTimeWindows() {
+ return this.times.keySet();
}
/**
- * Logs the given stack trace against this node and its children.
+ * Removes time windows from this node if they pass the given {@code predicate} test.
*
- * @param describer the function that describes the elements of the stack
- * @param stack the stack
- * @param time the total time to log
- * @param <T> the stack trace element type
+ * @param predicate the predicate
+ * @return true if any time windows were removed
*/
- public <T> void log(StackTraceNode.Describer<T> describer, T[] stack, long time) {
- if (stack.length == 0) {
- return;
- }
-
- this.totalTime.add(time);
-
- AbstractNode node = this;
- T previousElement = null;
-
- for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) {
- T element = stack[(stack.length - 1) - offset];
+ public boolean removeTimeWindows(IntPredicate predicate) {
+ return this.times.keySet().removeIf(predicate::test);
+ }
- node = node.resolveChild(describer.describe(element, previousElement));
- node.totalTime.add(time);
+ /**
+ * Gets the encoded total sample times logged for this node in milliseconds.
+ *
+ * @return the total times
+ */
+ protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) {
+ return encoder.encode(this.times);
+ }
- previousElement = element;
- }
+ public Collection<StackTraceNode> getChildren() {
+ return this.children.values();
}
- private StackTraceNode resolveChild(StackTraceNode.Description description) {
+ protected StackTraceNode resolveChild(StackTraceNode.Description description) {
StackTraceNode result = this.children.get(description); // fast path
if (result != null) {
return result;
@@ -96,7 +108,7 @@ public abstract class AbstractNode {
* @param other the other node
*/
protected void merge(AbstractNode other) {
- this.totalTime.add(other.totalTime.longValue());
+ other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue()));
for (Map.Entry<StackTraceNode.Description, StackTraceNode> child : other.children.entrySet()) {
resolveChild(child.getKey()).merge(child.getValue());
}
@@ -123,7 +135,6 @@ public abstract class AbstractNode {
list.add(child);
}
- list.sort(null);
return list;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
index b0d9237..c0dcc5b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java
@@ -20,6 +20,7 @@
package me.lucko.spark.common.sampler.node;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
import me.lucko.spark.common.util.MethodDisambiguator;
import me.lucko.spark.proto.SparkSamplerProtos;
@@ -30,7 +31,7 @@ import java.util.Objects;
/**
* Represents a stack trace element within the {@link AbstractNode node} structure.
*/
-public final class StackTraceNode extends AbstractNode implements Comparable<StackTraceNode> {
+public final class StackTraceNode extends AbstractNode {
/**
* Magic number to denote "no present" line number for a node.
@@ -64,12 +65,16 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
return this.description.parentLineNumber;
}
- public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode) {
+ public SparkSamplerProtos.StackTraceNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder, Iterable<Integer> childrenRefs) {
SparkSamplerProtos.StackTraceNode.Builder proto = SparkSamplerProtos.StackTraceNode.newBuilder()
- .setTime(getTotalTime())
.setClassName(this.description.className)
.setMethodName(this.description.methodName);
+ double[] times = encodeTimesForProto(timeEncoder);
+ for (double time : times) {
+ proto.addTimes(time);
+ }
+
if (this.description.lineNumber >= 0) {
proto.setLineNumber(this.description.lineNumber);
}
@@ -86,27 +91,11 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
.ifPresent(proto::setMethodDesc);
}
- for (StackTraceNode child : exportChildren(mergeMode)) {
- proto.addChildren(child.toProto(mergeMode));
- }
+ proto.addAllChildrenRefs(childrenRefs);
return proto.build();
}
- @Override
- public int compareTo(StackTraceNode that) {
- if (this == that) {
- return 0;
- }
-
- int i = -Double.compare(this.getTotalTime(), that.getTotalTime());
- if (i != 0) {
- return i;
- }
-
- return this.description.compareTo(that.description);
- }
-
/**
* Function to construct a {@link StackTraceNode.Description} from a stack trace element
* of type {@code T}.
@@ -129,7 +118,7 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
/**
* Encapsulates the attributes of a {@link StackTraceNode}.
*/
- public static final class Description implements Comparable<Description> {
+ public static final class Description {
private final String className;
private final String methodName;
@@ -162,54 +151,6 @@ public final class StackTraceNode extends AbstractNode implements Comparable<Sta
this.hash = Objects.hash(this.className, this.methodName, this.methodDescription);
}
- private static <T extends Comparable<T>> int nullCompare(T a, T b) {
- if (a == null && b == null) {
- return 0;
- } else if (a == null) {
- return -1;
- } else if (b == null) {
- return 1;
- } else {
- return a.compareTo(b);
- }
- }
-
- @Override
- public int compareTo(Description that) {
- if (this == that) {
- return 0;
- }
-
- int i = this.className.compareTo(that.className);
- if (i != 0) {
- return i;
- }
-
- i = this.methodName.compareTo(that.methodName);
- if (i != 0) {
- return i;
- }
-
- i = nullCompare(this.methodDescription, that.methodDescription);
- if (i != 0) {
- return i;
- }
-
- if (this.methodDescription != null && that.methodDescription != null) {
- i = this.methodDescription.compareTo(that.methodDescription);
- if (i != 0) {
- return i;
- }
- }
-
- i = Integer.compare(this.lineNumber, that.lineNumber);
- if (i != 0) {
- return i;
- }
-
- return Integer.compare(this.parentLineNumber, that.parentLineNumber);
- }
-
@Override
public boolean equals(Object o) {
if (this == o) return true;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
index ed97443..37ff359 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java
@@ -20,8 +20,19 @@
package me.lucko.spark.common.sampler.node;
+import me.lucko.spark.common.sampler.window.ProtoTimeEncoder;
+import me.lucko.spark.common.util.IndexedListBuilder;
import me.lucko.spark.proto.SparkSamplerProtos;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.Deque;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+import java.util.function.IntPredicate;
+
/**
* The root of a sampling stack for a given thread / thread group.
*/
@@ -53,15 +64,162 @@ public final class ThreadNode extends AbstractNode {
this.label = label;
}
- public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) {
+ /**
+ * Logs the given stack trace against this node and its children.
+ *
+ * @param describer the function that describes the elements of the stack
+ * @param stack the stack
+ * @param time the total time to log
+ * @param window the window
+ * @param <T> the stack trace element type
+ */
+ public <T> void log(StackTraceNode.Describer<T> describer, T[] stack, long time, int window) {
+ if (stack.length == 0) {
+ return;
+ }
+
+ getTimeAccumulator(window).add(time);
+
+ AbstractNode node = this;
+ T previousElement = null;
+
+ for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) {
+ T element = stack[(stack.length - 1) - offset];
+
+ node = node.resolveChild(describer.describe(element, previousElement));
+ node.getTimeAccumulator(window).add(time);
+
+ previousElement = element;
+ }
+ }
+
+ /**
+ * Removes time windows that match the given {@code predicate}.
+ *
+ * @param predicate the predicate to use to test the time windows
+ * @return true if this node is now empty
+ */
+ public boolean removeTimeWindowsRecursively(IntPredicate predicate) {
+ Queue<AbstractNode> queue = new ArrayDeque<>();
+ queue.add(this);
+
+ while (!queue.isEmpty()) {
+ AbstractNode node = queue.remove();
+ Collection<StackTraceNode> children = node.getChildren();
+
+ boolean needToProcessChildren = false;
+
+ for (Iterator<StackTraceNode> it = children.iterator(); it.hasNext(); ) {
+ StackTraceNode child = it.next();
+
+ boolean windowsWereRemoved = child.removeTimeWindows(predicate);
+ boolean childIsNowEmpty = child.getTimeWindows().isEmpty();
+
+ if (childIsNowEmpty) {
+ it.remove();
+ continue;
+ }
+
+ if (windowsWereRemoved) {
+ needToProcessChildren = true;
+ }
+ }
+
+ if (needToProcessChildren) {
+ queue.addAll(children);
+ }
+ }
+
+ removeTimeWindows(predicate);
+ return getTimeWindows().isEmpty();
+ }
+
+ public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) {
SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder()
- .setName(getThreadLabel())
- .setTime(getTotalTime());
+ .setName(getThreadLabel());
+
+ double[] times = encodeTimesForProto(timeEncoder);
+ for (double time : times) {
+ proto.addTimes(time);
+ }
+
+ // When converting to a proto, we change the data structure from a recursive tree to an array.
+ // Effectively, instead of:
+ //
+ // {
+ // data: 'one',
+ // children: [
+ // {
+ // data: 'two',
+ // children: [{ data: 'four' }]
+ // },
+ // { data: 'three' }
+ // ]
+ // }
+ //
+ // we transmit:
+ //
+ // [
+ // { data: 'one', children: [1, 2] },
+ // { data: 'two', children: [3] }
+ // { data: 'three', children: [] }
+ // { data: 'four', children: [] }
+ // ]
+ //
+
+ // the flattened array of nodes
+ IndexedListBuilder<SparkSamplerProtos.StackTraceNode> nodesArray = new IndexedListBuilder<>();
+ // Perform a depth-first post order traversal of the tree
+ Deque<Node> stack = new ArrayDeque<>();
+
+ // push the thread node's children to the stack
+ List<Integer> childrenRefs = new LinkedList<>();
for (StackTraceNode child : exportChildren(mergeMode)) {
- proto.addChildren(child.toProto(mergeMode));
+ stack.push(new Node(child, childrenRefs));
+ }
+
+ Node node;
+ while (!stack.isEmpty()) {
+ node = stack.peek();
+
+ // on the first visit, just push this node's children and leave it on the stack
+ if (node.firstVisit) {
+ for (StackTraceNode child : node.stackTraceNode.exportChildren(mergeMode)) {
+ stack.push(new Node(child, node.childrenRefs));
+ }
+ node.firstVisit = false;
+ continue;
+ }
+
+ // convert StackTraceNode to a proto
+ // - at this stage, we have already visited this node's children
+ // - the refs for each child are stored in node.childrenRefs
+ SparkSamplerProtos.StackTraceNode childProto = node.stackTraceNode.toProto(mergeMode, timeEncoder, node.childrenRefs);
+
+ // add the child proto to the nodes array, and record the ref in the parent
+ int childIndex = nodesArray.add(childProto);
+ node.parentChildrenRefs.add(childIndex);
+
+ // pop from the stack
+ stack.pop();
}
+ proto.addAllChildrenRefs(childrenRefs);
+ proto.addAllChildren(nodesArray.build());
+
return proto.build();
}
+
+ private static final class Node {
+ private final StackTraceNode stackTraceNode;
+ private boolean firstVisit = true;
+ private final List<Integer> childrenRefs = new LinkedList<>();
+ private final List<Integer> parentChildrenRefs;
+
+ private Node(StackTraceNode node, List<Integer> parentChildrenRefs) {
+ this.stackTraceNode = node;
+ this.parentChildrenRefs = parentChildrenRefs;
+ }
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java
new file mode 100644
index 0000000..ab63c00
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java
@@ -0,0 +1,462 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.source;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.sampler.node.StackTraceNode;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.util.ClassFinder;
+
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.CodeSource;
+import java.security.ProtectionDomain;
+import java.util.ArrayDeque;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Queue;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * A function which defines the source of given {@link Class}es or (Mixin) method calls.
+ */
+public interface ClassSourceLookup {
+
+ /**
+ * Identify the given class.
+ *
+ * @param clazz the class
+ * @return the source of the class
+ */
+ @Nullable String identify(Class<?> clazz) throws Exception;
+
+ /**
+ * Identify the given method call.
+ *
+ * @param methodCall the method call info
+ * @return the source of the method call
+ */
+ default @Nullable String identify(MethodCall methodCall) throws Exception {
+ return null;
+ }
+
+ /**
+ * Identify the given method call.
+ *
+ * @param methodCall the method call info
+ * @return the source of the method call
+ */
+ default @Nullable String identify(MethodCallByLine methodCall) throws Exception {
+ return null;
+ }
+
+ /**
+ * A no-operation {@link ClassSourceLookup}.
+ */
+ ClassSourceLookup NO_OP = new ClassSourceLookup() {
+ @Override
+ public @Nullable String identify(Class<?> clazz) {
+ return null;
+ }
+ };
+
+ static ClassSourceLookup create(SparkPlatform platform) {
+ try {
+ return platform.createClassSourceLookup();
+ } catch (Exception e) {
+ e.printStackTrace();
+ return NO_OP;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}.
+ */
+ abstract class ByClassLoader implements ClassSourceLookup {
+
+ public abstract @Nullable String identify(ClassLoader loader) throws Exception;
+
+ @Override
+ public final @Nullable String identify(Class<?> clazz) throws Exception {
+ ClassLoader loader = clazz.getClassLoader();
+ while (loader != null) {
+ String source = identify(loader);
+ if (source != null) {
+ return source;
+ }
+ loader = loader.getParent();
+ }
+ return null;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on URL.
+ */
+ interface ByUrl extends ClassSourceLookup {
+
+ default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
+ Path path = null;
+
+ String protocol = url.getProtocol();
+ if (protocol.equals("file")) {
+ path = Paths.get(url.toURI());
+ } else if (protocol.equals("jar")) {
+ URL innerUrl = new URL(url.getPath());
+ path = Paths.get(innerUrl.getPath().split("!")[0]);
+ }
+
+ if (path != null) {
+ return identifyFile(path.toAbsolutePath().normalize());
+ }
+
+ return null;
+ }
+
+ default String identifyFile(Path path) {
+ return identifyFileName(path.getFileName().toString());
+ }
+
+ default String identifyFileName(String fileName) {
+ return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}.
+ */
+ class ByFirstUrlSource extends ClassSourceLookup.ByClassLoader implements ClassSourceLookup.ByUrl {
+ @Override
+ public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException {
+ if (loader instanceof URLClassLoader) {
+ URLClassLoader urlClassLoader = (URLClassLoader) loader;
+ URL[] urls = urlClassLoader.getURLs();
+ if (urls.length == 0) {
+ return null;
+ }
+ return identifyUrl(urls[0]);
+ }
+ return null;
+ }
+ }
+
+ /**
+ * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}.
+ */
+ class ByCodeSource implements ClassSourceLookup, ClassSourceLookup.ByUrl {
+ @Override
+ public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException {
+ ProtectionDomain protectionDomain = clazz.getProtectionDomain();
+ if (protectionDomain == null) {
+ return null;
+ }
+ CodeSource codeSource = protectionDomain.getCodeSource();
+ if (codeSource == null) {
+ return null;
+ }
+
+ URL url = codeSource.getLocation();
+ return url == null ? null : identifyUrl(url);
+ }
+ }
+
+ interface Visitor {
+ void visit(ThreadNode node);
+
+ boolean hasClassSourceMappings();
+
+ Map<String, String> getClassSourceMapping();
+
+ boolean hasMethodSourceMappings();
+
+ Map<String, String> getMethodSourceMapping();
+
+ boolean hasLineSourceMappings();
+
+ Map<String, String> getLineSourceMapping();
+ }
+
+ static Visitor createVisitor(ClassSourceLookup lookup) {
+ if (lookup == ClassSourceLookup.NO_OP) {
+ return NoOpVisitor.INSTANCE; // don't bother!
+ }
+ return new VisitorImpl(lookup);
+ }
+
+ enum NoOpVisitor implements Visitor {
+ INSTANCE;
+
+ @Override
+ public void visit(ThreadNode node) {
+
+ }
+
+ @Override
+ public boolean hasClassSourceMappings() {
+ return false;
+ }
+
+ @Override
+ public Map<String, String> getClassSourceMapping() {
+ return Collections.emptyMap();
+ }
+
+ @Override
+ public boolean hasMethodSourceMappings() {
+ return false;
+ }
+
+ @Override
+ public Map<String, String> getMethodSourceMapping() {
+ return Collections.emptyMap();
+ }
+
+ @Override
+ public boolean hasLineSourceMappings() {
+ return false;
+ }
+
+ @Override
+ public Map<String, String> getLineSourceMapping() {
+ return Collections.emptyMap();
+ }
+ }
+
+ /**
+ * Visitor which scans {@link StackTraceNode}s and accumulates class/method call identities.
+ */
+ class VisitorImpl implements Visitor {
+ private final ClassSourceLookup lookup;
+ private final ClassFinder classFinder = new ClassFinder();
+
+ private final SourcesMap<String> classSources = new SourcesMap<>(Function.identity());
+ private final SourcesMap<MethodCall> methodSources = new SourcesMap<>(MethodCall::toString);
+ private final SourcesMap<MethodCallByLine> lineSources = new SourcesMap<>(MethodCallByLine::toString);
+
+ VisitorImpl(ClassSourceLookup lookup) {
+ this.lookup = lookup;
+ }
+
+ @Override
+ public void visit(ThreadNode node) {
+ Queue<StackTraceNode> queue = new ArrayDeque<>(node.getChildren());
+ for (StackTraceNode n = queue.poll(); n != null; n = queue.poll()) {
+ visitStackNode(n);
+ queue.addAll(n.getChildren());
+ }
+ }
+
+ private void visitStackNode(StackTraceNode node) {
+ this.classSources.computeIfAbsent(
+ node.getClassName(),
+ className -> {
+ Class<?> clazz = this.classFinder.findClass(className);
+ if (clazz == null) {
+ return null;
+ }
+ return this.lookup.identify(clazz);
+ });
+
+ if (node.getMethodDescription() != null) {
+ MethodCall methodCall = new MethodCall(node.getClassName(), node.getMethodName(), node.getMethodDescription());
+ this.methodSources.computeIfAbsent(methodCall, this.lookup::identify);
+ } else {
+ MethodCallByLine methodCall = new MethodCallByLine(node.getClassName(), node.getMethodName(), node.getLineNumber());
+ this.lineSources.computeIfAbsent(methodCall, this.lookup::identify);
+ }
+ }
+
+ @Override
+ public boolean hasClassSourceMappings() {
+ return this.classSources.hasMappings();
+ }
+
+ @Override
+ public Map<String, String> getClassSourceMapping() {
+ return this.classSources.export();
+ }
+
+ @Override
+ public boolean hasMethodSourceMappings() {
+ return this.methodSources.hasMappings();
+ }
+
+ @Override
+ public Map<String, String> getMethodSourceMapping() {
+ return this.methodSources.export();
+ }
+
+ @Override
+ public boolean hasLineSourceMappings() {
+ return this.lineSources.hasMappings();
+ }
+
+ @Override
+ public Map<String, String> getLineSourceMapping() {
+ return this.lineSources.export();
+ }
+ }
+
+ final class SourcesMap<T> {
+ // <key> --> identifier (plugin name)
+ private final Map<T, String> map = new HashMap<>();
+ private final Function<? super T, String> keyToStringFunction;
+
+ private SourcesMap(Function<? super T, String> keyToStringFunction) {
+ this.keyToStringFunction = keyToStringFunction;
+ }
+
+ public void computeIfAbsent(T key, ComputeSourceFunction<T> function) {
+ if (!this.map.containsKey(key)) {
+ try {
+ this.map.put(key, function.compute(key));
+ } catch (Throwable e) {
+ this.map.put(key, null);
+ }
+ }
+ }
+
+ public boolean hasMappings() {
+ this.map.values().removeIf(Objects::isNull);
+ return !this.map.isEmpty();
+ }
+
+ public Map<String, String> export() {
+ this.map.values().removeIf(Objects::isNull);
+ if (this.keyToStringFunction.equals(Function.identity())) {
+ //noinspection unchecked
+ return (Map<String, String>) this.map;
+ } else {
+ return this.map.entrySet().stream().collect(Collectors.toMap(
+ e -> this.keyToStringFunction.apply(e.getKey()),
+ Map.Entry::getValue
+ ));
+ }
+ }
+
+ private interface ComputeSourceFunction<T> {
+ String compute(T key) throws Exception;
+ }
+ }
+
+ /**
+ * Encapsulates information about a given method call using the name + method description.
+ */
+ final class MethodCall {
+ private final String className;
+ private final String methodName;
+ private final String methodDescriptor;
+
+ public MethodCall(String className, String methodName, String methodDescriptor) {
+ this.className = className;
+ this.methodName = methodName;
+ this.methodDescriptor = methodDescriptor;
+ }
+
+ public String getClassName() {
+ return this.className;
+ }
+
+ public String getMethodName() {
+ return this.methodName;
+ }
+
+ public String getMethodDescriptor() {
+ return this.methodDescriptor;
+ }
+
+ @Override
+ public String toString() {
+ return this.className + ";" + this.methodName + ";" + this.methodDescriptor;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof MethodCall)) return false;
+ MethodCall that = (MethodCall) o;
+ return this.className.equals(that.className) &&
+ this.methodName.equals(that.methodName) &&
+ this.methodDescriptor.equals(that.methodDescriptor);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(this.className, this.methodName, this.methodDescriptor);
+ }
+ }
+
+ /**
+ * Encapsulates information about a given method call using the name + line number.
+ */
+ final class MethodCallByLine {
+ private final String className;
+ private final String methodName;
+ private final int lineNumber;
+
+ public MethodCallByLine(String className, String methodName, int lineNumber) {
+ this.className = className;
+ this.methodName = methodName;
+ this.lineNumber = lineNumber;
+ }
+
+ public String getClassName() {
+ return this.className;
+ }
+
+ public String getMethodName() {
+ return this.methodName;
+ }
+
+ public int getLineNumber() {
+ return this.lineNumber;
+ }
+
+ @Override
+ public String toString() {
+ return this.className + ";" + this.lineNumber;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof MethodCallByLine)) return false;
+ MethodCallByLine that = (MethodCallByLine) o;
+ return this.lineNumber == that.lineNumber && this.className.equals(that.className);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(this.className, this.lineNumber);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java
new file mode 100644
index 0000000..0808d66
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java
@@ -0,0 +1,81 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.source;
+
+import com.google.common.collect.ImmutableList;
+
+import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.function.Function;
+
+/**
+ * A "source" is a plugin or mod on the platform that may be identified
+ * as a source of a method call in a profile.
+ */
+public class SourceMetadata {
+
+ public static <T> List<SourceMetadata> gather(Collection<T> sources, Function<? super T, String> nameFunction, Function<? super T, String> versionFunction, Function<? super T, String> authorFunction) {
+ ImmutableList.Builder<SourceMetadata> builder = ImmutableList.builder();
+
+ for (T source : sources) {
+ String name = nameFunction.apply(source);
+ String version = versionFunction.apply(source);
+ String author = authorFunction.apply(source);
+
+ SourceMetadata metadata = new SourceMetadata(name, version, author);
+ builder.add(metadata);
+ }
+
+ return builder.build();
+ }
+
+ private final String name;
+ private final String version;
+ private final String author;
+
+ public SourceMetadata(String name, String version, String author) {
+ this.name = name;
+ this.version = version;
+ this.author = author;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public String getVersion() {
+ return this.version;
+ }
+
+ public String getAuthor() {
+ return this.author;
+ }
+
+ public SamplerMetadata.SourceMetadata toProto() {
+ return SamplerMetadata.SourceMetadata.newBuilder()
+ .setName(this.name)
+ .setVersion(this.version)
+ .build();
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java
new file mode 100644
index 0000000..be6f08a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java
@@ -0,0 +1,70 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.window;
+
+import me.lucko.spark.common.sampler.aggregator.DataAggregator;
+
+import java.util.function.IntPredicate;
+
+public enum ProfilingWindowUtils {
+ ;
+
+ /**
+ * The size/duration of a profiling window in seconds.
+ * (1 window = 1 minute)
+ */
+ public static final int WINDOW_SIZE_SECONDS = 60;
+
+ /**
+ * The number of windows to record in continuous profiling before data is dropped.
+ * (60 windows * 1 minute = 1 hour of profiling data)
+ */
+ public static final int HISTORY_SIZE = Integer.getInteger("spark.continuousProfilingHistorySize", 60);
+
+ /**
+ * Gets the profiling window for the given time in unix-millis.
+ *
+ * @param time the time in milliseconds
+ * @return the window
+ */
+ public static int unixMillisToWindow(long time) {
+ return (int) (time / (WINDOW_SIZE_SECONDS * 1000L));
+ }
+
+ /**
+ * Gets the window at the current time.
+ *
+ * @return the window
+ */
+ public static int windowNow() {
+ return unixMillisToWindow(System.currentTimeMillis());
+ }
+
+ /**
+ * Gets a prune predicate that can be passed to {@link DataAggregator#pruneData(IntPredicate)}.
+ *
+ * @return the prune predicate
+ */
+ public static IntPredicate keepHistoryBefore(int currentWindow) {
+ // windows that were earlier than (currentWindow minus history size) should be pruned
+ return window -> window < (currentWindow - HISTORY_SIZE);
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java
new file mode 100644
index 0000000..03da075
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java
@@ -0,0 +1,93 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.window;
+
+import me.lucko.spark.common.sampler.async.jfr.Dictionary;
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.LongAdder;
+import java.util.stream.IntStream;
+
+/**
+ * Encodes a map of int->double into a double array.
+ */
+public class ProtoTimeEncoder {
+ /** A sorted array of all possible keys to encode */
+ private final int[] keys;
+ /** A map of key value -> index in the keys array */
+ private final Map<Integer, Integer> keysToIndex;
+
+ public ProtoTimeEncoder(List<ThreadNode> sourceData) {
+ // get an array of all keys that show up in the source data
+ this.keys = sourceData.stream()
+ .map(n -> n.getTimeWindows().stream().mapToInt(i -> i))
+ .reduce(IntStream.empty(), IntStream::concat)
+ .distinct()
+ .sorted()
+ .toArray();
+
+ // construct a reverse index lookup
+ this.keysToIndex = new HashMap<>(this.keys.length);
+ for (int i = 0; i < this.keys.length; i++) {
+ this.keysToIndex.put(this.keys[i], i);
+ }
+ }
+
+ /**
+ * Gets an array of the keys that could be encoded by this encoder.
+ *
+ * @return an array of keys
+ */
+ public int[] getKeys() {
+ return this.keys;
+ }
+
+ /**
+ * Encode a {@link Dictionary} (map) of times/durations into a double array.
+ *
+ * @param times a dictionary of times (unix-time millis -> duration in microseconds)
+ * @return the times encoded as a double array
+ */
+ public double[] encode(Map<Integer, LongAdder> times) {
+ // construct an array of values - length needs to exactly match the
+ // number of keys, even if some values are zero.
+ double[] array = new double[this.keys.length];
+
+ times.forEach((key, value) -> {
+ // get the index for the given key
+ Integer idx = this.keysToIndex.get(key);
+ if (idx == null) {
+ throw new RuntimeException("No index for key " + key + " in " + this.keysToIndex.keySet());
+ }
+
+ // convert the duration from microseconds -> milliseconds
+ double durationInMilliseconds = value.longValue() / 1000d;
+
+ // store in the array
+ array[idx] = durationInMilliseconds;
+ });
+
+ return array;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
new file mode 100644
index 0000000..ce65013
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
@@ -0,0 +1,287 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler.window;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.util.RollingAverage;
+import me.lucko.spark.proto.SparkProtos;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.IntPredicate;
+
+/**
+ * Collects statistics for each profiling window.
+ */
+public class WindowStatisticsCollector {
+ private static final SparkProtos.WindowStatistics ZERO = SparkProtos.WindowStatistics.newBuilder().build();
+
+ /** The platform */
+ private final SparkPlatform platform;
+
+ /** Map of profiling window -> statistics */
+ private final Map<Integer, SparkProtos.WindowStatistics> stats;
+
+ private TickCounter tickCounter;
+
+ public WindowStatisticsCollector(SparkPlatform platform) {
+ this.platform = platform;
+ this.stats = new ConcurrentHashMap<>();
+ }
+
+ /**
+ * Indicates to the statistics collector that it should count the number
+ * of ticks in each window using the provided {@link TickHook}.
+ *
+ * @param hook the tick hook
+ */
+ public void startCountingTicks(TickHook hook) {
+ this.tickCounter = new NormalTickCounter(this.platform, hook);
+ }
+
+ /**
+ * Indicates to the statistics collector that it should count the number
+ * of ticks in each window, according to how many times the
+ * {@link ExplicitTickCounter#increment()} method is called.
+ *
+ * @param hook the tick hook
+ * @return the counter
+ */
+ public ExplicitTickCounter startCountingTicksExplicit(TickHook hook) {
+ ExplicitTickCounter counter = new ExplicitTickCounter(this.platform, hook);
+ this.tickCounter = counter;
+ return counter;
+ }
+
+ public void stop() {
+ if (this.tickCounter != null) {
+ this.tickCounter.stop();
+ }
+ }
+
+ /**
+ * Gets the total number of ticks that have passed between the time
+ * when the profiler started and stopped.
+ *
+ * <p>Importantly, note that this metric is different to the total number of ticks in a window
+ * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()}) or the total number
+ * of observed ticks if the 'only-ticks-over' aggregator is being used
+ * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()}
+ * and {@link ExplicitTickCounter#getTotalCountedTicks()}.</p>
+ *
+ * @return the total number of ticks in the profile
+ */
+ public int getTotalTicks() {
+ return this.tickCounter == null ? -1 : this.tickCounter.getTotalTicks();
+ }
+
+ /**
+ * Measures statistics for the given window if none have been recorded yet.
+ *
+ * @param window the window
+ */
+ public void measureNow(int window) {
+ this.stats.computeIfAbsent(window, w -> measure());
+ }
+
+ /**
+ * Ensures that the exported map has statistics (even if they are zeroed) for all windows.
+ *
+ * @param windows the expected windows
+ */
+ public void ensureHasStatisticsForAllWindows(int[] windows) {
+ for (int window : windows) {
+ this.stats.computeIfAbsent(window, w -> ZERO);
+ }
+ }
+
+ public void pruneStatistics(IntPredicate predicate) {
+ this.stats.keySet().removeIf(predicate::test);
+ }
+
+ public Map<Integer, SparkProtos.WindowStatistics> export() {
+ return this.stats;
+ }
+
+ /**
+ * Measures current statistics, where possible averaging over the last minute. (1 min = 1 window)
+ *
+ * @return the current statistics
+ */
+ private SparkProtos.WindowStatistics measure() {
+ SparkProtos.WindowStatistics.Builder builder = SparkProtos.WindowStatistics.newBuilder();
+
+ TickStatistics tickStatistics = this.platform.getTickStatistics();
+ if (tickStatistics != null) {
+ builder.setTps(tickStatistics.tps1Min());
+
+ RollingAverage mspt = tickStatistics.duration1Min();
+ if (mspt != null) {
+ builder.setMsptMedian(mspt.median());
+ builder.setMsptMax(mspt.max());
+ }
+ }
+
+ if (this.tickCounter != null) {
+ int ticks = this.tickCounter.getCountedTicksThisWindowAndReset();
+ builder.setTicks(ticks);
+ }
+
+ builder.setCpuProcess(CpuMonitor.processLoad1MinAvg());
+ builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg());
+
+ try {
+ AsyncWorldInfoProvider worldInfoProvider = new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider());
+ WorldInfoProvider.CountsResult counts = worldInfoProvider.getCounts();
+ if (counts != null) {
+ builder.setPlayers(counts.players());
+ builder.setEntities(counts.entities());
+ builder.setTileEntities(counts.tileEntities());
+ builder.setChunks(counts.chunks());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return builder.build();
+ }
+
+ /**
+ * Responsible for counting the number of ticks in a profile/window.
+ */
+ public interface TickCounter {
+
+ /**
+ * Stop the counter.
+ */
+ void stop();
+
+ /**
+ * Get the total number of ticks.
+ *
+ * <p>See {@link WindowStatisticsCollector#getTotalTicks()} for a longer explanation
+ * of what this means exactly.</p>
+ *
+ * @return the total ticks
+ */
+ int getTotalTicks();
+
+ /**
+ * Gets the total number of ticks counted in the last window,
+ * and resets the counter to zero.
+ *
+ * @return the number of ticks counted since the last time this method was called
+ */
+ int getCountedTicksThisWindowAndReset();
+ }
+
+ private static abstract class BaseTickCounter implements TickCounter {
+ protected final SparkPlatform platform;
+ protected final TickHook tickHook;
+
+ /** The game tick when sampling first began */
+ private final int startTick;
+
+ /** The game tick when sampling stopped */
+ private int stopTick = -1;
+
+ BaseTickCounter(SparkPlatform platform, TickHook tickHook) {
+ this.platform = platform;
+ this.tickHook = tickHook;
+ this.startTick = this.tickHook.getCurrentTick();
+ }
+
+ @Override
+ public void stop() {
+ this.stopTick = this.tickHook.getCurrentTick();
+ }
+
+ @Override
+ public int getTotalTicks() {
+ if (this.startTick == -1) {
+ throw new IllegalStateException("start tick not recorded");
+ }
+ if (this.stopTick == -1) {
+ throw new IllegalStateException("stop tick not recorded");
+ }
+
+ return this.stopTick - this.startTick;
+ }
+ }
+
+ /**
+ * Counts the number of ticks in a window using a {@link TickHook}.
+ */
+ public static final class NormalTickCounter extends BaseTickCounter {
+ private int last;
+
+ NormalTickCounter(SparkPlatform platform, TickHook tickHook) {
+ super(platform, tickHook);
+ this.last = this.tickHook.getCurrentTick();
+ }
+
+ @Override
+ public int getCountedTicksThisWindowAndReset() {
+ synchronized (this) {
+ int now = this.tickHook.getCurrentTick();
+ int ticks = now - this.last;
+ this.last = now;
+ return ticks;
+ }
+ }
+ }
+
+ /**
+ * Counts the number of ticks in a window according to the number of times
+ * {@link #increment()} is called.
+ *
+ * Used by the {@link me.lucko.spark.common.sampler.java.TickedDataAggregator}.
+ */
+ public static final class ExplicitTickCounter extends BaseTickCounter {
+ private final AtomicInteger counted = new AtomicInteger();
+ private final AtomicInteger total = new AtomicInteger();
+
+ ExplicitTickCounter(SparkPlatform platform, TickHook tickHook) {
+ super(platform, tickHook);
+ }
+
+ public void increment() {
+ this.counted.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ public int getTotalCountedTicks() {
+ return this.total.get();
+ }
+
+ @Override
+ public int getCountedTicksThisWindowAndReset() {
+ return this.counted.getAndSet(0);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
index c2ca1b1..e69b94e 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java
@@ -22,73 +22,66 @@ package me.lucko.spark.common.util;
import com.google.protobuf.AbstractMessageLite;
-import okhttp3.MediaType;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-
-import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
import java.util.zip.GZIPOutputStream;
/**
* Utility for posting content to bytebin.
*/
-public class BytebinClient extends AbstractHttpClient {
+public class BytebinClient {
/** The bytebin URL */
private final String url;
/** The client user agent */
private final String userAgent;
- /**
- * Creates a new bytebin instance
- *
- * @param url the bytebin url
- * @param userAgent the client user agent string
- */
- public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) {
- super(okHttpClient);
+ public BytebinClient(String url, String userAgent) {
this.url = url + (url.endsWith("/") ? "" : "/");
this.userAgent = userAgent;
}
- /**
- * POSTs GZIP compressed content to bytebin.
- *
- * @param buf the compressed content
- * @param contentType the type of the content
- * @return the key of the resultant content
- * @throws IOException if an error occurs
- */
- public Content postContent(byte[] buf, MediaType contentType) throws IOException {
- RequestBody body = RequestBody.create(contentType, buf);
+ private Content postContent(String contentType, Consumer<OutputStream> consumer) throws IOException {
+ URL url = new URL(this.url + "post");
+ HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+ try {
+ connection.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(10));
+ connection.setReadTimeout((int) TimeUnit.SECONDS.toMillis(10));
+
+ connection.setDoOutput(true);
+ connection.setRequestMethod("POST");
+ connection.setRequestProperty("Content-Type", contentType);
+ connection.setRequestProperty("User-Agent", this.userAgent);
+ connection.setRequestProperty("Content-Encoding", "gzip");
- Request.Builder requestBuilder = new Request.Builder()
- .url(this.url + "post")
- .header("User-Agent", this.userAgent)
- .header("Content-Encoding", "gzip");
+ connection.connect();
+ try (OutputStream output = connection.getOutputStream()) {
+ consumer.accept(output);
+ }
- Request request = requestBuilder.post(body).build();
- try (Response response = makeHttpRequest(request)) {
- String key = response.header("Location");
+ String key = connection.getHeaderField("Location");
if (key == null) {
throw new IllegalStateException("Key not returned");
}
return new Content(key);
+ } finally {
+ connection.getInputStream().close();
+ connection.disconnect();
}
}
- public Content postContent(AbstractMessageLite<?, ?> proto, MediaType contentType) throws IOException {
- ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
- try (OutputStream out = new GZIPOutputStream(byteOut)) {
- proto.writeTo(out);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- return postContent(byteOut.toByteArray(), contentType);
+ public Content postContent(AbstractMessageLite<?, ?> proto, String contentType) throws IOException {
+ return postContent(contentType, outputStream -> {
+ try (OutputStream out = new GZIPOutputStream(outputStream)) {
+ proto.writeTo(out);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ });
}
public static final class Content {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
deleted file mode 100644
index bd9ec37..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.util;
-
-import me.lucko.spark.common.sampler.node.StackTraceNode;
-import me.lucko.spark.common.sampler.node.ThreadNode;
-
-import org.checkerframework.checker.nullness.qual.Nullable;
-
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.security.CodeSource;
-import java.security.ProtectionDomain;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- * A function which defines the source of given {@link Class}es.
- */
-public interface ClassSourceLookup {
-
- /**
- * Identify the given class.
- *
- * @param clazz the class
- * @return the source of the class
- */
- @Nullable String identify(Class<?> clazz) throws Exception;
-
- /**
- * A no-operation {@link ClassSourceLookup}.
- */
- ClassSourceLookup NO_OP = new ClassSourceLookup() {
- @Override
- public @Nullable String identify(Class<?> clazz) {
- return null;
- }
- };
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}.
- */
- abstract class ByClassLoader implements ClassSourceLookup {
-
- public abstract @Nullable String identify(ClassLoader loader) throws Exception;
-
- @Override
- public final @Nullable String identify(Class<?> clazz) throws Exception {
- ClassLoader loader = clazz.getClassLoader();
- while (loader != null) {
- String source = identify(loader);
- if (source != null) {
- return source;
- }
- loader = loader.getParent();
- }
- return null;
- }
- }
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on URL.
- */
- interface ByUrl extends ClassSourceLookup {
-
- default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException {
- Path path = null;
-
- String protocol = url.getProtocol();
- if (protocol.equals("file")) {
- path = Paths.get(url.toURI());
- } else if (protocol.equals("jar")) {
- URL innerUrl = new URL(url.getPath());
- path = Paths.get(innerUrl.getPath().split("!")[0]);
- }
-
- if (path != null) {
- return identifyFile(path.toAbsolutePath().normalize());
- }
-
- return null;
- }
-
- default String identifyFile(Path path) {
- return identifyFileName(path.getFileName().toString());
- }
-
- default String identifyFileName(String fileName) {
- return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null;
- }
- }
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}.
- */
- class ByFirstUrlSource extends ByClassLoader implements ByUrl {
- @Override
- public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException {
- if (loader instanceof URLClassLoader) {
- URLClassLoader urlClassLoader = (URLClassLoader) loader;
- URL[] urls = urlClassLoader.getURLs();
- if (urls.length == 0) {
- return null;
- }
- return identifyUrl(urls[0]);
- }
- return null;
- }
- }
-
- /**
- * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}.
- */
- class ByCodeSource implements ClassSourceLookup, ByUrl {
- @Override
- public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException {
- ProtectionDomain protectionDomain = clazz.getProtectionDomain();
- if (protectionDomain == null) {
- return null;
- }
- CodeSource codeSource = protectionDomain.getCodeSource();
- if (codeSource == null) {
- return null;
- }
-
- URL url = codeSource.getLocation();
- return url == null ? null : identifyUrl(url);
- }
- }
-
- interface Visitor {
- void visit(ThreadNode node);
-
- boolean hasMappings();
-
- Map<String, String> getMapping();
- }
-
- static Visitor createVisitor(ClassSourceLookup lookup) {
- if (lookup == ClassSourceLookup.NO_OP) {
- return NoOpVisitor.INSTANCE; // don't bother!
- }
- return new VisitorImpl(lookup);
- }
-
- enum NoOpVisitor implements Visitor {
- INSTANCE;
-
- @Override
- public void visit(ThreadNode node) {
-
- }
-
- @Override
- public boolean hasMappings() {
- return false;
- }
-
- @Override
- public Map<String, String> getMapping() {
- return Collections.emptyMap();
- }
- }
-
- /**
- * Visitor which scans {@link StackTraceNode}s and accumulates class identities.
- */
- class VisitorImpl implements Visitor {
- private final ClassSourceLookup lookup;
- private final ClassFinder classFinder = new ClassFinder();
-
- // class name --> identifier (plugin name)
- private final Map<String, String> map = new HashMap<>();
-
- VisitorImpl(ClassSourceLookup lookup) {
- this.lookup = lookup;
- }
-
- @Override
- public void visit(ThreadNode node) {
- for (StackTraceNode child : node.getChildren()) {
- visitStackNode(child);
- }
- }
-
- @Override
- public boolean hasMappings() {
- return !this.map.isEmpty();
- }
-
- @Override
- public Map<String, String> getMapping() {
- this.map.values().removeIf(Objects::isNull);
- return this.map;
- }
-
- private void visitStackNode(StackTraceNode node) {
- String className = node.getClassName();
- if (!this.map.containsKey(className)) {
- try {
- Class<?> clazz = this.classFinder.findClass(className);
- Objects.requireNonNull(clazz);
- this.map.put(className, this.lookup.identify(clazz));
- } catch (Throwable e) {
- this.map.put(className, null);
- }
- }
-
- // recursively
- for (StackTraceNode child : node.getChildren()) {
- visitStackNode(child);
- }
- }
- }
-
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
index 9295c25..c8100e1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java
@@ -20,10 +20,6 @@
package me.lucko.spark.common.util;
-import org.tukaani.xz.LZMA2Options;
-import org.tukaani.xz.LZMAOutputStream;
-import org.tukaani.xz.XZOutputStream;
-
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -46,35 +42,35 @@ public enum Compression {
}
return compressedFile;
}
- },
- XZ {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
- },
- LZMA {
- @Override
- public Path compress(Path file, LongConsumer progressHandler) throws IOException {
- Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
- try (InputStream in = Files.newInputStream(file)) {
- try (OutputStream out = Files.newOutputStream(compressedFile)) {
- try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
- copy(in, compressionOut, progressHandler);
- }
- }
- }
- return compressedFile;
- }
};
+ // XZ {
+ // @Override
+ // public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
+ // try (InputStream in = Files.newInputStream(file)) {
+ // try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ // try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
+ // copy(in, compressionOut, progressHandler);
+ // }
+ // }
+ // }
+ // return compressedFile;
+ // }
+ // },
+ // LZMA {
+ // @Override
+ // public Path compress(Path file, LongConsumer progressHandler) throws IOException {
+ // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
+ // try (InputStream in = Files.newInputStream(file)) {
+ // try (OutputStream out = Files.newOutputStream(compressedFile)) {
+ // try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
+ // copy(in, compressionOut, progressHandler);
+ // }
+ // }
+ // }
+ // return compressedFile;
+ // }
+ // };
public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
index 7588645..32f3bc6 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
@@ -20,32 +20,58 @@
package me.lucko.spark.common.util;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
import com.google.gson.JsonPrimitive;
import java.io.BufferedReader;
+import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
public final class Configuration {
- private static final JsonParser PARSER = new JsonParser();
+ private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create();
- private final JsonObject root;
+ private final Path file;
+ private JsonObject root;
public Configuration(Path file) {
+ this.file = file;
+ load();
+ }
+
+ public void load() {
JsonObject root = null;
- if (Files.exists(file)) {
- try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) {
- root = PARSER.parse(reader).getAsJsonObject();
+ if (Files.exists(this.file)) {
+ try (BufferedReader reader = Files.newBufferedReader(this.file, StandardCharsets.UTF_8)) {
+ root = GSON.fromJson(reader, JsonObject.class);
} catch (IOException e) {
e.printStackTrace();
}
}
- this.root = root != null ? root : new JsonObject();
+ if (root == null) {
+ root = new JsonObject();
+ root.addProperty("_header", "spark configuration file - https://spark.lucko.me/docs/Configuration");
+ }
+ this.root = root;
+ }
+
+ public void save() {
+ try {
+ Files.createDirectories(this.file.getParent());
+ } catch (IOException e) {
+ // ignore
+ }
+
+ try (BufferedWriter writer = Files.newBufferedWriter(this.file, StandardCharsets.UTF_8)) {
+ GSON.toJson(this.root, writer);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
public String getString(String path, String def) {
@@ -67,4 +93,34 @@ public final class Configuration {
return val.isBoolean() ? val.getAsBoolean() : def;
}
+ public int getInteger(String path, int def) {
+ JsonElement el = this.root.get(path);
+ if (el == null || !el.isJsonPrimitive()) {
+ return def;
+ }
+
+ JsonPrimitive val = el.getAsJsonPrimitive();
+ return val.isBoolean() ? val.getAsInt() : def;
+ }
+
+ public void setString(String path, String value) {
+ this.root.add(path, new JsonPrimitive(value));
+ }
+
+ public void setBoolean(String path, boolean value) {
+ this.root.add(path, new JsonPrimitive(value));
+ }
+
+ public void setInteger(String path, int value) {
+ this.root.add(path, new JsonPrimitive(value));
+ }
+
+ public boolean contains(String path) {
+ return this.root.has(path);
+ }
+
+ public void remove(String path) {
+ this.root.remove(path);
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
index c4a3d66..1ee3b0f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java
@@ -62,4 +62,24 @@ public enum FormatUtil {
.append(Component.text(unit))
.build();
}
+
+ public static String formatSeconds(long seconds) {
+ if (seconds <= 0) {
+ return "0s";
+ }
+
+ long second = seconds;
+ long minute = second / 60;
+ second = second % 60;
+
+ StringBuilder sb = new StringBuilder();
+ if (minute != 0) {
+ sb.append(minute).append("m ");
+ }
+ if (second != 0) {
+ sb.append(second).append("s ");
+ }
+
+ return sb.toString().trim();
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java
index 8ece3d4..b2315f9 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java
@@ -20,27 +20,24 @@
package me.lucko.spark.common.util;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.Response;
+import java.util.ArrayList;
+import java.util.List;
-import java.io.IOException;
-
-public class AbstractHttpClient {
-
- /** The http client */
- protected final OkHttpClient okHttp;
+/**
+ * List builder that returns the index of the inserted element.
+ *
+ * @param <T> generic type
+ */
+public class IndexedListBuilder<T> {
+ private int i = 0;
+ private final List<T> nodes = new ArrayList<>();
- public AbstractHttpClient(OkHttpClient okHttp) {
- this.okHttp = okHttp;
+ public int add(T node) {
+ this.nodes.add(node);
+ return this.i++;
}
- protected Response makeHttpRequest(Request request) throws IOException {
- Response response = this.okHttp.newCall(request).execute();
- if (!response.isSuccessful()) {
- response.close();
- throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
- }
- return response;
+ public List<T> build() {
+ return this.nodes;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
new file mode 100644
index 0000000..be5bbc2
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
@@ -0,0 +1,191 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
+
+import java.util.Locale;
+import java.util.function.BiFunction;
+
+public enum SparkPlaceholder {
+
+ TPS((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "5s":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
+ case "10s":
+ return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
+ case "1m":
+ return StatisticFormatter.formatTps(tickStatistics.tps1Min());
+ case "5m":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Min());
+ case "15m":
+ return StatisticFormatter.formatTps(tickStatistics.tps15Min());
+ }
+
+ return null;
+ }),
+
+ TICKDURATION((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
+ case "1m":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
+ }
+
+ return null;
+ }),
+
+ CPU_SYSTEM((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
+ }
+
+ return null;
+ }),
+
+ CPU_PROCESS((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
+ }
+
+ return null;
+ });
+
+ private final String name;
+ private final BiFunction<SparkPlatform, String, TextComponent> function;
+
+ SparkPlaceholder(BiFunction<SparkPlatform, String, TextComponent> function) {
+ this.name = name().toLowerCase(Locale.ROOT);
+ this.function = function;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public TextComponent resolve(SparkPlatform platform, String arg) {
+ return this.function.apply(platform, arg);
+ }
+
+ public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) {
+ String[] parts = placeholder.split("_");
+
+ if (parts.length == 0) {
+ return null;
+ }
+
+ String label = parts[0];
+
+ if (label.equals("tps")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TPS.resolve(platform, arg);
+ }
+
+ if (label.equals("tickduration")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TICKDURATION.resolve(platform, arg);
+ }
+
+ if (label.equals("cpu") && parts.length >= 2) {
+ String type = parts[1];
+ String arg = parts.length < 3 ? null : parts[2];
+
+ if (type.equals("system")) {
+ return CPU_SYSTEM.resolve(platform, arg);
+ }
+ if (type.equals("process")) {
+ return CPU_PROCESS.resolve(platform, arg);
+ }
+ }
+
+ return null;
+ }
+
+ public static String resolveFormattingCode(SparkPlatform platform, String placeholder) {
+ TextComponent result = resolveComponent(platform, placeholder);
+ if (result == null) {
+ return null;
+ }
+ return LegacyComponentSerializer.legacySection().serialize(result);
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java
index 8a4a621..91a474c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java
@@ -20,10 +20,18 @@
package me.lucko.spark.common.util;
+import com.google.common.collect.ImmutableList;
+
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.attribute.FileAttribute;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
import java.util.Collections;
+import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
@@ -32,23 +40,47 @@ import java.util.Set;
* Utility for handling temporary files.
*/
public final class TemporaryFiles {
- private TemporaryFiles() {}
- private static final Set<Path> DELETE_SET = Collections.synchronizedSet(new HashSet<>());
+ public static final FileAttribute<?>[] OWNER_ONLY_FILE_PERMISSIONS;
+
+ static {
+ boolean isPosix = FileSystems.getDefault().supportedFileAttributeViews().contains("posix");
+ if (isPosix) {
+ OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[]{PosixFilePermissions.asFileAttribute(EnumSet.of(
+ PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE
+ ))};
+ } else {
+ OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[0];
+ }
+ }
+
+ private final Path tmpDirectory;
+ private final Set<Path> files = Collections.synchronizedSet(new HashSet<>());
- public static Path create(String prefix, String suffix) throws IOException {
- return register(Files.createTempFile(prefix, suffix));
+ public TemporaryFiles(Path tmpDirectory) {
+ this.tmpDirectory = tmpDirectory;
}
- public static Path register(Path path) {
+ public Path create(String prefix, String suffix) throws IOException {
+ Path file;
+ if (ensureDirectoryIsReady()) {
+ String name = prefix + Long.toHexString(System.nanoTime()) + suffix;
+ file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS);
+ } else {
+ file = Files.createTempFile(prefix, suffix);
+ }
+ return register(file);
+ }
+
+ public Path register(Path path) {
path.toFile().deleteOnExit();
- DELETE_SET.add(path);
+ this.files.add(path);
return path;
}
- public static void deleteTemporaryFiles() {
- synchronized (DELETE_SET) {
- for (Iterator<Path> iterator = DELETE_SET.iterator(); iterator.hasNext(); ) {
+ public void deleteTemporaryFiles() {
+ synchronized (this.files) {
+ for (Iterator<Path> iterator = this.files.iterator(); iterator.hasNext(); ) {
Path path = iterator.next();
try {
Files.deleteIfExists(path);
@@ -60,4 +92,35 @@ public final class TemporaryFiles {
}
}
+ private boolean ensureDirectoryIsReady() {
+ if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) {
+ return false;
+ }
+
+ if (Files.isDirectory(this.tmpDirectory)) {
+ return true;
+ }
+
+ try {
+ Files.createDirectories(this.tmpDirectory);
+
+ Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of(
+ "# What is this directory?",
+ "",
+ "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ",
+ "* Previously, a temporary directory provided by the operating system was used for this purpose. ",
+ "* However, this proved to be unreliable in some circumstances, so spark now stores temporary data here instead!",
+ "",
+ "spark will automatically cleanup the contents of this directory. " ,
+ "(but if for some reason it doesn't, if the server is stopped, you can freely delete any files ending in .tmp)",
+ "",
+ "tl;dr: spark uses this folder to store some temporary data."
+ ), StandardCharsets.UTF_8);
+
+ return true;
+ } catch (IOException e) {
+ return false;
+ }
+ }
+
}