From 7d6808cbcfbb0f61f93e536d36968eeda5bd302c Mon Sep 17 00:00:00 2001 From: Luck Date: Wed, 6 Jun 2018 15:39:16 +0100 Subject: Convert to Gradle --- .gitignore | 168 +--------- build.gradle | 30 ++ bukkit/pom.xml | 62 ---- .../me/lucko/spark/bukkit/BukkitTickCounter.java | 55 --- .../me/lucko/spark/bukkit/SparkBukkitPlugin.java | 73 ---- bukkit/src/main/resources/plugin.yml | 10 - bungeecord/pom.xml | 55 --- .../spark/bungeecord/SparkBungeeCordPlugin.java | 82 ----- bungeecord/src/main/resources/bungee.yml | 5 - common/pom.xml | 63 ---- .../java/me/lucko/spark/common/CommandHandler.java | 372 --------------------- .../java/me/lucko/spark/common/TickMonitor.java | 88 ----- .../java/me/lucko/spark/common/http/Bytebin.java | 54 --- .../me/lucko/spark/common/http/HttpClient.java | 113 ------- .../lucko/spark/profiler/AsyncDataAggregator.java | 77 ----- .../me/lucko/spark/profiler/DataAggregator.java | 32 -- .../main/java/me/lucko/spark/profiler/Sampler.java | 170 ---------- .../me/lucko/spark/profiler/SamplerBuilder.java | 63 ---- .../java/me/lucko/spark/profiler/StackNode.java | 141 -------- .../me/lucko/spark/profiler/StackTraceNode.java | 71 ---- .../java/me/lucko/spark/profiler/ThreadDumper.java | 77 ----- .../me/lucko/spark/profiler/ThreadGrouper.java | 52 --- .../java/me/lucko/spark/profiler/TickCounter.java | 39 --- .../lucko/spark/profiler/TickedDataAggregator.java | 147 -------- gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 54413 bytes gradle/wrapper/gradle-wrapper.properties | 5 + gradlew | 172 ++++++++++ gradlew.bat | 84 +++++ pom.xml | 108 ------ settings.gradle | 2 + spark-bukkit/build.gradle | 11 + .../me/lucko/spark/bukkit/BukkitTickCounter.java | 55 +++ .../me/lucko/spark/bukkit/SparkBukkitPlugin.java | 73 ++++ spark-bukkit/src/main/resources/plugin.yml | 10 + spark-bungeecord/build.gradle | 11 + .../spark/bungeecord/SparkBungeeCordPlugin.java | 82 +++++ spark-bungeecord/src/main/resources/bungee.yml | 5 + spark-common/build.gradle | 6 + .../java/me/lucko/spark/common/CommandHandler.java | 372 +++++++++++++++++++++ .../java/me/lucko/spark/common/TickMonitor.java | 88 +++++ .../java/me/lucko/spark/common/http/Bytebin.java | 54 +++ .../me/lucko/spark/common/http/HttpClient.java | 113 +++++++ .../lucko/spark/profiler/AsyncDataAggregator.java | 77 +++++ .../me/lucko/spark/profiler/DataAggregator.java | 32 ++ .../main/java/me/lucko/spark/profiler/Sampler.java | 170 ++++++++++ .../me/lucko/spark/profiler/SamplerBuilder.java | 63 ++++ .../java/me/lucko/spark/profiler/StackNode.java | 141 ++++++++ .../me/lucko/spark/profiler/StackTraceNode.java | 71 ++++ .../java/me/lucko/spark/profiler/ThreadDumper.java | 77 +++++ .../me/lucko/spark/profiler/ThreadGrouper.java | 52 +++ .../java/me/lucko/spark/profiler/TickCounter.java | 39 +++ .../lucko/spark/profiler/TickedDataAggregator.java | 147 ++++++++ spark-sponge/build.gradle | 14 + .../me/lucko/spark/sponge/SparkSpongePlugin.java | 145 ++++++++ .../me/lucko/spark/sponge/SpongeTickCounter.java | 54 +++ spark-universal/build.gradle | 29 ++ sponge/pom.xml | 75 ----- .../me/lucko/spark/sponge/utils/PomData.java | 8 - .../me/lucko/spark/sponge/SparkSpongePlugin.java | 146 -------- .../me/lucko/spark/sponge/SpongeTickCounter.java | 54 --- universal/pom.xml | 102 ------ 61 files changed, 2289 insertions(+), 2557 deletions(-) create mode 100644 build.gradle delete mode 100644 bukkit/pom.xml delete mode 100644 bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java delete mode 100644 bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java delete mode 100644 bukkit/src/main/resources/plugin.yml delete mode 100644 bungeecord/pom.xml delete mode 100644 bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java delete mode 100644 bungeecord/src/main/resources/bungee.yml delete mode 100644 common/pom.xml delete mode 100644 common/src/main/java/me/lucko/spark/common/CommandHandler.java delete mode 100644 common/src/main/java/me/lucko/spark/common/TickMonitor.java delete mode 100644 common/src/main/java/me/lucko/spark/common/http/Bytebin.java delete mode 100644 common/src/main/java/me/lucko/spark/common/http/HttpClient.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/DataAggregator.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/Sampler.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/StackNode.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/TickCounter.java delete mode 100644 common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java create mode 100644 gradle/wrapper/gradle-wrapper.jar create mode 100644 gradle/wrapper/gradle-wrapper.properties create mode 100644 gradlew create mode 100644 gradlew.bat delete mode 100644 pom.xml create mode 100644 settings.gradle create mode 100644 spark-bukkit/build.gradle create mode 100644 spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java create mode 100644 spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java create mode 100644 spark-bukkit/src/main/resources/plugin.yml create mode 100644 spark-bungeecord/build.gradle create mode 100644 spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java create mode 100644 spark-bungeecord/src/main/resources/bungee.yml create mode 100644 spark-common/build.gradle create mode 100644 spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/TickMonitor.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/DataAggregator.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/Sampler.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/StackNode.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/TickCounter.java create mode 100644 spark-common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java create mode 100644 spark-sponge/build.gradle create mode 100644 spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java create mode 100644 spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java create mode 100644 spark-universal/build.gradle delete mode 100644 sponge/pom.xml delete mode 100644 sponge/src/main/java-templates/me/lucko/spark/sponge/utils/PomData.java delete mode 100644 sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java delete mode 100644 sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java delete mode 100644 universal/pom.xml diff --git a/.gitignore b/.gitignore index ba4c623..70e36dc 100644 --- a/.gitignore +++ b/.gitignore @@ -1,164 +1,6 @@ -# Created by https://www.gitignore.io/ - -### Intellij ### -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff: -.idea/ -*.iws +/.gradle/ +/.idea/ +/build/ /out/ -*.iml -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - - -### Maven ### -target/ -pom.xml.tag -pom.xml.releaseBackup -pom.xml.versionsBackup -pom.xml.next -release.properties -dependency-reduced-pom.xml -buildNumber.properties -.mvn/timing.properties - - -### Eclipse ### - -.metadata -bin/ -tmp/ -*.tmp -*.bak -*.swp -*~.nib -local.properties -.settings/ -.loadpath -.recommenders - -# Eclipse Core -.project - -# External tool builders -.externalToolBuilders/ - -# Locally stored "Eclipse launch configurations" -*.launch - -# PyDev specific (Python IDE for Eclipse) -*.pydevproject - -# CDT-specific (C/C++ Development Tooling) -.cproject - -# JDT-specific (Eclipse Java Development Tools) -.classpath - -# Java annotation processor (APT) -.factorypath - -# PDT-specific (PHP Development Tools) -.buildpath - -# sbteclipse plugin -.target - -# Tern plugin -.tern-project - -# TeXlipse plugin -.texlipse - -# STS (Spring Tool Suite) -.springBeans - -# Code Recommenders -.recommenders/ - - -### Linux ### -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - - -### macOS ### -*.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon -# Thumbnails -._* -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - - -### Windows ### -# Windows image file caches -Thumbs.db -ehthumbs.db - -# Folder config file -Desktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msm -*.msp - -# Windows shortcuts -*.lnk - - -### Java ### -*.class - -# Mobile Tools for Java (J2ME) -.mtj.tmp/ - -# Package Files # -*.jar -*.war -*.ear - -# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml -hs_err_pid* +/run/ +/*.iml \ No newline at end of file diff --git a/build.gradle b/build.gradle new file mode 100644 index 0000000..0128f6e --- /dev/null +++ b/build.gradle @@ -0,0 +1,30 @@ +allprojects { + group = 'me.lucko' + version = '1.0-SNAPSHOT' +} + +subprojects { + apply plugin: 'java' + apply plugin: 'maven' + + ext { + pluginVersion = '1.0.4' + } + + sourceCompatibility = 1.8 + targetCompatibility = 1.8 + + tasks.withType(JavaCompile) { + options.encoding = 'UTF-8' + } + + repositories { + mavenLocal() + mavenCentral() + + maven { url "https://repo.lucko.me/" } + maven { url "https://hub.spigotmc.org/nexus/content/repositories/snapshots/" } + maven { url "https://repo.spongepowered.org/maven" } + } + +} diff --git a/bukkit/pom.xml b/bukkit/pom.xml deleted file mode 100644 index 3e24269..0000000 --- a/bukkit/pom.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - spark-parent - me.lucko - 1.0-SNAPSHOT - - 4.0.0 - - spark-bukkit - jar - - - clean package - spark-bukkit - - - src/main/resources - true - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${compiler.version} - - 1.8 - 1.8 - - - - - - - - - me.lucko - spark-common - ${project.version} - provided - - - - - org.spigotmc - spigot-api - 1.12.2-R0.1-SNAPSHOT - provided - - - - - - spigot-repo - https://hub.spigotmc.org/nexus/content/repositories/snapshots/ - - - - diff --git a/bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java b/bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java deleted file mode 100644 index 61a7690..0000000 --- a/bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java +++ /dev/null @@ -1,55 +0,0 @@ -package me.lucko.spark.bukkit; - -import me.lucko.spark.profiler.TickCounter; - -import org.bukkit.plugin.Plugin; -import org.bukkit.scheduler.BukkitTask; - -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.atomic.LongAdder; - -public class BukkitTickCounter implements TickCounter, Runnable { - private final Plugin plugin; - private BukkitTask task; - - private final Set tasks = new HashSet<>(); - private final LongAdder tick = new LongAdder(); - - public BukkitTickCounter(Plugin plugin) { - this.plugin = plugin; - } - - @Override - public void run() { - this.tick.increment(); - for (Runnable r : this.tasks){ - r.run(); - } - } - - @Override - public void start() { - this.task = this.plugin.getServer().getScheduler().runTaskTimer(this.plugin, this, 1, 1); - } - - @Override - public void close() { - this.task.cancel(); - } - - @Override - public long getCurrentTick() { - return this.tick.longValue(); - } - - @Override - public void addTickTask(Runnable runnable) { - this.tasks.add(runnable); - } - - @Override - public void removeTickTask(Runnable runnable) { - this.tasks.remove(runnable); - } -} diff --git a/bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java deleted file mode 100644 index dc432c5..0000000 --- a/bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java +++ /dev/null @@ -1,73 +0,0 @@ -package me.lucko.spark.bukkit; - -import me.lucko.spark.common.CommandHandler; -import me.lucko.spark.profiler.ThreadDumper; -import me.lucko.spark.profiler.TickCounter; - -import org.bukkit.ChatColor; -import org.bukkit.command.Command; -import org.bukkit.command.CommandSender; -import org.bukkit.entity.Player; -import org.bukkit.plugin.java.JavaPlugin; - -public class SparkBukkitPlugin extends JavaPlugin { - - private final CommandHandler commandHandler = new CommandHandler() { - - private String colorize(String message) { - return ChatColor.translateAlternateColorCodes('&', message); - } - - private void broadcast(String msg) { - getServer().getConsoleSender().sendMessage(msg); - for (Player player : getServer().getOnlinePlayers()) { - if (player.hasPermission("spark.profiler")) { - player.sendMessage(msg); - } - } - } - - @Override - protected void sendMessage(CommandSender sender, String message) { - sender.sendMessage(colorize(message)); - } - - @Override - protected void sendMessage(String message) { - String msg = colorize(message); - broadcast(msg); - } - - @Override - protected void sendLink(String url) { - String msg = colorize("&7" + url); - broadcast(msg); - } - - @Override - protected void runAsync(Runnable r) { - getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r); - } - - @Override - protected ThreadDumper getDefaultThreadDumper() { - return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); - } - - @Override - protected TickCounter newTickCounter() { - return new BukkitTickCounter(SparkBukkitPlugin.this); - } - }; - - @Override - public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { - if (!sender.hasPermission("spark.profiler")) { - sender.sendMessage(ChatColor.RED + "You do not have permission to use this command."); - return true; - } - - this.commandHandler.handleCommand(sender, args); - return true; - } -} diff --git a/bukkit/src/main/resources/plugin.yml b/bukkit/src/main/resources/plugin.yml deleted file mode 100644 index bd549b5..0000000 --- a/bukkit/src/main/resources/plugin.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: spark -version: 1.0.4 -description: ${project.description} -authors: [Luck, sk89q] -main: me.lucko.spark.bukkit.SparkBukkitPlugin - -commands: - spark: - description: Main plugin command - aliases: [profiler] \ No newline at end of file diff --git a/bungeecord/pom.xml b/bungeecord/pom.xml deleted file mode 100644 index a8c2db9..0000000 --- a/bungeecord/pom.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - spark-parent - me.lucko - 1.0-SNAPSHOT - - 4.0.0 - - spark-bungeecord - jar - - - clean package - spark-bungeecord - - - src/main/resources - true - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${compiler.version} - - 1.8 - 1.8 - - - - - - - - - me.lucko - spark-common - ${project.version} - provided - - - - - net.md-5 - bungeecord-api - 1.12-SNAPSHOT - provided - - - - diff --git a/bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java b/bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java deleted file mode 100644 index 59bab67..0000000 --- a/bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java +++ /dev/null @@ -1,82 +0,0 @@ -package me.lucko.spark.bungeecord; - -import me.lucko.spark.common.CommandHandler; -import me.lucko.spark.profiler.ThreadDumper; -import me.lucko.spark.profiler.TickCounter; - -import net.md_5.bungee.api.ChatColor; -import net.md_5.bungee.api.CommandSender; -import net.md_5.bungee.api.chat.BaseComponent; -import net.md_5.bungee.api.chat.ClickEvent; -import net.md_5.bungee.api.chat.TextComponent; -import net.md_5.bungee.api.connection.ProxiedPlayer; -import net.md_5.bungee.api.plugin.Command; -import net.md_5.bungee.api.plugin.Plugin; - -public class SparkBungeeCordPlugin extends Plugin { - - private final CommandHandler commandHandler = new CommandHandler() { - private BaseComponent[] colorize(String message) { - return TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message)); - } - - private void broadcast(BaseComponent... msg) { - getProxy().getConsole().sendMessage(msg); - for (ProxiedPlayer player : getProxy().getPlayers()) { - if (player.hasPermission("spark.profiler")) { - player.sendMessage(msg); - } - } - } - - @Override - protected void sendMessage(CommandSender sender, String message) { - sender.sendMessage(colorize(message)); - } - - @Override - protected void sendMessage(String message) { - broadcast(colorize(message)); - } - - @Override - protected void sendLink(String url) { - TextComponent component = new TextComponent(url); - component.setColor(ChatColor.GRAY); - component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url)); - broadcast(component); - } - - @Override - protected void runAsync(Runnable r) { - getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r); - } - - @Override - protected ThreadDumper getDefaultThreadDumper() { - return new ThreadDumper.All(); - } - - @Override - protected TickCounter newTickCounter() { - throw new UnsupportedOperationException(); - } - }; - - @Override - public void onEnable() { - getProxy().getPluginManager().registerCommand(this, new Command("sparkbungee", null, "gprofiler") { - @Override - public void execute(CommandSender sender, String[] args) { - if (!sender.hasPermission("spark.profiler")) { - TextComponent msg = new TextComponent("You do not have permission to use this command."); - msg.setColor(ChatColor.RED); - sender.sendMessage(msg); - return; - } - - SparkBungeeCordPlugin.this.commandHandler.handleCommand(sender, args); - } - }); - } -} diff --git a/bungeecord/src/main/resources/bungee.yml b/bungeecord/src/main/resources/bungee.yml deleted file mode 100644 index 429cf0b..0000000 --- a/bungeecord/src/main/resources/bungee.yml +++ /dev/null @@ -1,5 +0,0 @@ -name: spark -version: 1.0.4 -description: ${project.description} -author: Luck, sk89q -main: me.lucko.spark.bungeecord.SparkBungeeCordPlugin diff --git a/common/pom.xml b/common/pom.xml deleted file mode 100644 index 5f48fdf..0000000 --- a/common/pom.xml +++ /dev/null @@ -1,63 +0,0 @@ - - - - spark-parent - me.lucko - 1.0-SNAPSHOT - - 4.0.0 - - spark-common - jar - - - clean package - spark-common - - - org.apache.maven.plugins - maven-compiler-plugin - ${compiler.version} - - 1.8 - 1.8 - - - - - - - - - com.google.code.gson - gson - 2.7 - provided - - - - com.google.guava - guava - 19.0 - provided - - - - - com.squareup.okhttp3 - okhttp - 3.10.0 - compile - - - - com.squareup.okio - okio - 1.14.0 - compile - - - - diff --git a/common/src/main/java/me/lucko/spark/common/CommandHandler.java b/common/src/main/java/me/lucko/spark/common/CommandHandler.java deleted file mode 100644 index 898bba7..0000000 --- a/common/src/main/java/me/lucko/spark/common/CommandHandler.java +++ /dev/null @@ -1,372 +0,0 @@ -package me.lucko.spark.common; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.SetMultimap; -import com.google.common.collect.Sets; - -import me.lucko.spark.common.http.Bytebin; -import me.lucko.spark.profiler.Sampler; -import me.lucko.spark.profiler.SamplerBuilder; -import me.lucko.spark.profiler.ThreadDumper; -import me.lucko.spark.profiler.ThreadGrouper; -import me.lucko.spark.profiler.TickCounter; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Set; -import java.util.Timer; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -/** - * Abstract command handling class used by all platforms. - * - * @param the sender (e.g. CommandSender) type used by the platform - */ -public abstract class CommandHandler { - - /** The URL of the viewer frontend */ - private static final String VIEWER_URL = "https://sparkprofiler.github.io/?"; - /** The prefix used in all messages */ - private static final String PREFIX = "&8[&fspark&8] &7"; - - /** - * The {@link Timer} being used by the {@link #activeSampler}. - */ - private final Timer samplingThread = new Timer("spark-sampling-thread", true); - - /** Guards {@link #activeSampler} */ - private final Object[] activeSamplerMutex = new Object[0]; - /** The WarmRoast instance currently running, if any */ - private Sampler activeSampler = null; - /** The tick monitor instance currently running, if any */ - private ReportingTickMonitor activeTickMonitor = null; - - - // abstract methods implemented by each platform - - protected abstract void sendMessage(T sender, String message); - protected abstract void sendMessage(String message); - protected abstract void sendLink(String url); - protected abstract void runAsync(Runnable r); - protected abstract ThreadDumper getDefaultThreadDumper(); - protected abstract TickCounter newTickCounter(); - - private void sendPrefixedMessage(T sender, String message) { - sendMessage(sender, PREFIX + message); - } - - private void sendPrefixedMessage(String message) { - sendMessage(PREFIX + message); - } - - public void handleCommand(T sender, String[] args) { - try { - if (args.length == 0) { - sendInfo(sender); - return; - } - - List arguments = new ArrayList<>(Arrays.asList(args)); - switch (arguments.remove(0).toLowerCase()) { - case "start": - handleStart(sender, arguments); - break; - case "info": - handleInfo(sender); - break; - case "cancel": - handleCancel(sender); - break; - case "stop": - case "upload": - case "paste": - handleStop(sender); - break; - case "monitoring": - handleMonitoring(sender, arguments); - break; - default: - sendInfo(sender); - break; - } - } catch (IllegalArgumentException e) { - sendMessage(sender, "&c" + e.getMessage()); - } - } - - private void sendInfo(T sender) { - sendPrefixedMessage(sender, "&fspark profiler &7v1.0"); - sendMessage(sender, "&b&l> &7/profiler start"); - sendMessage(sender, " &8[&7--timeout&8 ]"); - sendMessage(sender, " &8[&7--thread&8 ]"); - sendMessage(sender, " &8[&7--not-combined]"); - sendMessage(sender, " &8[&7--interval&8 ]"); - sendMessage(sender, " &8[&7--only-ticks-over&8 ]"); - sendMessage(sender, "&b&l> &7/profiler info"); - sendMessage(sender, "&b&l> &7/profiler stop"); - sendMessage(sender, "&b&l> &7/profiler cancel"); - sendMessage(sender, "&b&l> &7/profiler monitoring"); - sendMessage(sender, " &8[&7--threshold&8 ]"); - } - - private void handleStart(T sender, List args) { - SetMultimap arguments = parseArguments(args); - - int timeoutSeconds = parseInt(arguments, "timeout", "d"); - if (timeoutSeconds != -1 && timeoutSeconds <= 10) { - sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10."); - return; - } - - if (timeoutSeconds != -1 && timeoutSeconds < 30) { - sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds."); - } - - int intervalMillis = parseInt(arguments, "interval", "i"); - if (intervalMillis <= 0) { - intervalMillis = 4; - } - - Set threads = Sets.union(arguments.get("thread"), arguments.get("t")); - ThreadDumper threadDumper; - if (threads.isEmpty()) { - // use the server thread - threadDumper = getDefaultThreadDumper(); - } else if (threads.contains("*")) { - threadDumper = ThreadDumper.ALL; - } else { - threadDumper = new ThreadDumper.Specific(threads); - } - - ThreadGrouper threadGrouper; - if (arguments.containsKey("not-combined")) { - threadGrouper = ThreadGrouper.BY_NAME; - } else { - threadGrouper = ThreadGrouper.BY_POOL; - } - - int ticksOver = parseInt(arguments, "only-ticks-over", "o"); - TickCounter tickCounter = null; - if (ticksOver != -1) { - try { - tickCounter = newTickCounter(); - } catch (UnsupportedOperationException e) { - sendPrefixedMessage(sender, "&cTick counting is not supported on BungeeCord!"); - return; - } - } - - Sampler sampler; - synchronized (this.activeSamplerMutex) { - if (this.activeSampler != null) { - sendPrefixedMessage(sender, "&7An active sampler is already running."); - return; - } - - sendPrefixedMessage("&7Initializing a new profiler, please wait..."); - - SamplerBuilder builder = new SamplerBuilder(); - builder.threadDumper(threadDumper); - builder.threadGrouper(threadGrouper); - if (timeoutSeconds != -1) { - builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); - } - builder.samplingInterval(intervalMillis); - if (ticksOver != -1) { - builder.ticksOver(ticksOver, tickCounter); - } - sampler = this.activeSampler = builder.start(this.samplingThread); - - sendPrefixedMessage("&bProfiler now active!"); - if (timeoutSeconds == -1) { - sendPrefixedMessage("&7Use '/profiler stop' to stop profiling and upload the results."); - } else { - sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."); - } - } - - CompletableFuture future = sampler.getFuture(); - - // send message if profiling fails - future.whenCompleteAsync((s, throwable) -> { - if (throwable != null) { - sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString()); - throwable.printStackTrace(); - } - }); - - // set activeSampler to null when complete. - future.whenCompleteAsync((s, throwable) -> { - synchronized (this.activeSamplerMutex) { - if (sampler == this.activeSampler) { - this.activeSampler = null; - } - } - }); - - // await the result - if (timeoutSeconds != -1) { - future.thenAcceptAsync(s -> { - sendPrefixedMessage("&7The active sampling operation has completed! Uploading results..."); - handleUpload(s); - }); - } - } - - private void handleInfo(T sender) { - synchronized (this.activeSamplerMutex) { - if (this.activeSampler == null) { - sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); - } else { - long timeout = this.activeSampler.getEndTime(); - if (timeout == -1) { - sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout."); - } else { - long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds."); - } - - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; - sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far."); - } - } - } - - private void handleStop(T sender) { - synchronized (this.activeSamplerMutex) { - if (this.activeSampler == null) { - sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); - } else { - this.activeSampler.cancel(); - sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results..."); - handleUpload(this.activeSampler); - this.activeSampler = null; - } - } - } - - private void handleCancel(T sender) { - synchronized (this.activeSamplerMutex) { - if (this.activeSampler == null) { - sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); - } else { - this.activeSampler.cancel(); - this.activeSampler = null; - sendPrefixedMessage("&bThe active sampling task has been cancelled."); - } - } - } - - private void handleUpload(Sampler sampler) { - runAsync(() -> { - byte[] output = sampler.formCompressedDataPayload(); - try { - String pasteId = Bytebin.postCompressedContent(output); - sendPrefixedMessage("&bSampling results:"); - sendLink(VIEWER_URL + pasteId); - } catch (IOException e) { - sendPrefixedMessage("&cAn error occurred whilst uploading the results."); - e.printStackTrace(); - } - }); - } - - private void handleMonitoring(T sender, List args) { - SetMultimap arguments = parseArguments(args); - - if (this.activeTickMonitor == null) { - - int threshold = parseInt(arguments, "threshold", "t"); - if (threshold == -1) { - threshold = 100; - } - - try { - TickCounter tickCounter = newTickCounter(); - this.activeTickMonitor = new ReportingTickMonitor(tickCounter, threshold); - } catch (UnsupportedOperationException e) { - sendPrefixedMessage(sender, "&cNot supported on BungeeCord!"); - } - } else { - this.activeTickMonitor.close(); - this.activeTickMonitor = null; - sendPrefixedMessage("&7Tick monitor disabled."); - } - } - - private class ReportingTickMonitor extends TickMonitor { - public ReportingTickMonitor(TickCounter tickCounter, int percentageChangeThreshold) { - super(tickCounter, percentageChangeThreshold); - } - - @Override - protected void sendMessage(String message) { - sendPrefixedMessage(message); - } - } - - private int parseInt(SetMultimap arguments, String longArg, String shortArg) { - Iterator it = Sets.union(arguments.get(longArg), arguments.get(shortArg)).iterator(); - if (it.hasNext()) { - try { - return Math.abs(Integer.parseInt(it.next())); - } catch (NumberFormatException e) { - throw new IllegalArgumentException("Invalid input for '" + longArg + "' argument. Please specify a number!"); - } - } - return -1; // undefined - } - - private static final Pattern FLAG_REGEX = Pattern.compile("--(.+)$|-([a-zA-z])$"); - - private static SetMultimap parseArguments(List args) { - SetMultimap arguments = HashMultimap.create(); - - String flag = null; - List value = null; - - for (int i = 0; i < args.size(); i++) { - String arg = args.get(i); - - Matcher matcher = FLAG_REGEX.matcher(arg); - boolean matches = matcher.matches(); - - if (flag == null || matches) { - if (!matches) { - throw new IllegalArgumentException("Expected flag at position " + i + " but got '" + arg + "' instead!"); - } - - String match = matcher.group(1); - if (match == null) { - match = matcher.group(2); - } - - // store existing value, if present - if (flag != null) { - arguments.put(flag, value.stream().collect(Collectors.joining(" "))); - } - - flag = match.toLowerCase(); - value = new ArrayList<>(); - } else { - // part of a value - value.add(arg); - } - } - - // store remaining value, if present - if (flag != null) { - arguments.put(flag, value.stream().collect(Collectors.joining(" "))); - } - - return arguments; - } - -} diff --git a/common/src/main/java/me/lucko/spark/common/TickMonitor.java b/common/src/main/java/me/lucko/spark/common/TickMonitor.java deleted file mode 100644 index a30a4db..0000000 --- a/common/src/main/java/me/lucko/spark/common/TickMonitor.java +++ /dev/null @@ -1,88 +0,0 @@ -package me.lucko.spark.common; - -import me.lucko.spark.profiler.TickCounter; - -import java.text.DecimalFormat; -import java.util.DoubleSummaryStatistics; - -public abstract class TickMonitor implements Runnable { - private static final DecimalFormat df = new DecimalFormat("#.##"); - - private final TickCounter tickCounter; - private final int percentageChangeThreshold; - - // data - private double lastTickTime = 0; - private State state = null; - private DoubleSummaryStatistics averageTickTime = new DoubleSummaryStatistics(); - private double avg; - - public TickMonitor(TickCounter tickCounter, int percentageChangeThreshold) { - this.tickCounter = tickCounter; - this.percentageChangeThreshold = percentageChangeThreshold; - - this.tickCounter.start(); - this.tickCounter.addTickTask(this); - } - - protected abstract void sendMessage(String message); - - public void close() { - this.tickCounter.close(); - } - - @Override - public void run() { - double now = ((double) System.nanoTime()) / 1000000d; - - // init - if (this.state == null) { - this.state = State.SETUP; - this.lastTickTime = now; - sendMessage("Tick monitor started. Before the monitor becomes fully active, the server's " + - "average tick rate will be calculated over a period of 120 ticks (approx 6 seconds)."); - return; - } - - // find the diff - double diff = now - this.lastTickTime; - this.lastTickTime = now; - - // form averages - if (this.state == State.SETUP) { - this.averageTickTime.accept(diff); - - // move onto the next state - if (this.averageTickTime.getCount() >= 120) { - - sendMessage("&bAnalysis is now complete."); - sendMessage("&f> &7Max: " + df.format(this.averageTickTime.getMax()) + "ms"); - sendMessage("&f> &7Min: " + df.format(this.averageTickTime.getMin()) + "ms"); - sendMessage("&f> &7Avg: " + df.format(this.averageTickTime.getAverage()) + "ms"); - sendMessage("Starting now, any ticks with >" + this.percentageChangeThreshold + "% increase in " + - "duration compared to the average will be reported."); - - this.avg = this.averageTickTime.getAverage(); - this.state = State.MONITORING; - } - } - - if (this.state == State.MONITORING) { - double increase = diff - this.avg; - if (increase <= 0) { - return; - } - - double percentageChange = (increase * 100d) / this.avg; - if (percentageChange > this.percentageChangeThreshold) { - sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7lasted &b" + df.format(diff) + "&7 milliseconds. " + - "&7(&b" + df.format(percentageChange) + "% &7increase from average)"); - } - } - } - - private enum State { - SETUP, - MONITORING - } -} diff --git a/common/src/main/java/me/lucko/spark/common/http/Bytebin.java b/common/src/main/java/me/lucko/spark/common/http/Bytebin.java deleted file mode 100644 index 3cd5e4c..0000000 --- a/common/src/main/java/me/lucko/spark/common/http/Bytebin.java +++ /dev/null @@ -1,54 +0,0 @@ -package me.lucko.spark.common.http; - -import com.google.gson.Gson; -import com.google.gson.JsonObject; - -import okhttp3.MediaType; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; -import okhttp3.ResponseBody; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; - -/** - * Utility for uploading JSON data to bytebin. - */ -public final class Bytebin { - - /** Media type for JSON data */ - private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8"); - /** The URL used to upload sampling data */ - private static final String UPLOAD_ENDPOINT = "https://bytebin.lucko.me/post"; - - public static String postCompressedContent(byte[] buf) throws IOException { - RequestBody body = RequestBody.create(JSON_TYPE, buf); - - Request.Builder requestBuilder = new Request.Builder() - .url(UPLOAD_ENDPOINT) - .header("Content-Encoding", "gzip") - .post(body); - - Request request = requestBuilder.build(); - try (Response response = HttpClient.makeCall(request)) { - try (ResponseBody responseBody = response.body()) { - if (responseBody == null) { - throw new RuntimeException("No response"); - } - - try (InputStream inputStream = responseBody.byteStream()) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { - JsonObject object = new Gson().fromJson(reader, JsonObject.class); - return object.get("key").getAsString(); - } - } - } - } - } - - private Bytebin() {} -} diff --git a/common/src/main/java/me/lucko/spark/common/http/HttpClient.java b/common/src/main/java/me/lucko/spark/common/http/HttpClient.java deleted file mode 100644 index 61db597..0000000 --- a/common/src/main/java/me/lucko/spark/common/http/HttpClient.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * This file is part of LuckPerms, licensed under the MIT License. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package me.lucko.spark.common.http; - -import okhttp3.Interceptor; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; -import okhttp3.ResponseBody; - -import java.io.IOException; -import java.net.Proxy; -import java.net.ProxySelector; -import java.net.SocketAddress; -import java.net.URI; -import java.util.Collections; -import java.util.List; - -/** - * Utility class for making http requests. - */ -public final class HttpClient { - private static OkHttpClient client = null; - - private static synchronized OkHttpClient getClient() { - if (client == null) { - client = new OkHttpClient.Builder() - .proxySelector(new NullSafeProxySelector()) - .addInterceptor(new UserAgentInterceptor()) - .build(); - } - return client; - } - - public static Response makeCall(Request request) throws IOException { - Response response = getClient().newCall(request).execute(); - if (!response.isSuccessful()) { - throw exceptionForUnsuccessfulResponse(response); - } - return response; - } - - private static RuntimeException exceptionForUnsuccessfulResponse(Response response) { - String msg = ""; - try (ResponseBody responseBody = response.body()) { - if (responseBody != null) { - msg = responseBody.string(); - } - } catch (IOException e) { - // ignore - } - return new RuntimeException("Got response: " + response.code() + " - " + response.message() + " - " + msg); - } - - private static final class UserAgentInterceptor implements Interceptor { - @Override - public Response intercept(Chain chain) throws IOException { - Request orig = chain.request(); - Request modified = orig.newBuilder() - .header("User-Agent", "spark-plugin") - .build(); - - return chain.proceed(modified); - } - } - - // sometimes ProxySelector#getDefault returns null, and okhttp doesn't like that - private static final class NullSafeProxySelector extends ProxySelector { - private static final List DIRECT = Collections.singletonList(Proxy.NO_PROXY); - - @Override - public List select(URI uri) { - ProxySelector def = ProxySelector.getDefault(); - if (def == null) { - return DIRECT; - } - return def.select(uri); - } - - @Override - public void connectFailed(URI uri, SocketAddress sa, IOException ioe) { - ProxySelector def = ProxySelector.getDefault(); - if (def != null) { - def.connectFailed(uri, sa, ioe); - } - } - } - - private HttpClient() {} -} \ No newline at end of file diff --git a/common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java b/common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java deleted file mode 100644 index 9a4090e..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java +++ /dev/null @@ -1,77 +0,0 @@ -package me.lucko.spark.profiler; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; - -/** - * Implementation of {@link DataAggregator} that makes use of a "worker" thread pool for inserting - * data. - */ -public class AsyncDataAggregator implements DataAggregator { - - /** A map of root stack nodes for each thread with sampling data */ - private final Map threadData = new ConcurrentHashMap<>(); - - /** The worker pool for inserting stack nodes */ - private final ExecutorService workerPool; - - /** The instance used to group threads together */ - private final ThreadGrouper threadGrouper; - - /** The interval to wait between sampling, in milliseconds */ - private final int interval; - - public AsyncDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval) { - this.workerPool = workerPool; - this.threadGrouper = threadGrouper; - this.interval = interval; - } - - @Override - public void insertData(String threadName, StackTraceElement[] stack) { - // form the queued data - QueuedThreadInfo queuedData = new QueuedThreadInfo(threadName, stack); - // schedule insertion of the data - this.workerPool.execute(queuedData); - } - - @Override - public Map getData() { - // wait for all pending data to be inserted - this.workerPool.shutdown(); - try { - this.workerPool.awaitTermination(15, TimeUnit.SECONDS); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - return this.threadData; - } - - void insertData(QueuedThreadInfo data) { - try { - String group = this.threadGrouper.getGroup(data.threadName); - StackNode node = this.threadData.computeIfAbsent(group, StackNode::new); - node.log(data.stack, this.interval); - } catch (Exception e) { - e.printStackTrace(); - } - } - - private final class QueuedThreadInfo implements Runnable { - private final String threadName; - private final StackTraceElement[] stack; - - QueuedThreadInfo(String threadName, StackTraceElement[] stack) { - this.threadName = threadName; - this.stack = stack; - } - - @Override - public void run() { - insertData(this); - } - } -} diff --git a/common/src/main/java/me/lucko/spark/profiler/DataAggregator.java b/common/src/main/java/me/lucko/spark/profiler/DataAggregator.java deleted file mode 100644 index 1afa52c..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/DataAggregator.java +++ /dev/null @@ -1,32 +0,0 @@ -package me.lucko.spark.profiler; - -import java.util.Map; - -/** - * Aggregates sampling data. - */ -public interface DataAggregator { - - /** - * Called before the sampler begins to insert data - */ - default void start() { - - } - - /** - * Forms the output data - * - * @return the output data - */ - Map getData(); - - /** - * Inserts sampling data into this aggregator - * - * @param threadName the name of the thread - * @param stack the call stack - */ - void insertData(String threadName, StackTraceElement[] stack); - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/Sampler.java b/common/src/main/java/me/lucko/spark/profiler/Sampler.java deleted file mode 100644 index 3476f03..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/Sampler.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * WarmRoast - * Copyright (C) 2013 Albert Pham - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . -*/ - -package me.lucko.spark.profiler; - -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.google.gson.stream.JsonWriter; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.lang.management.ManagementFactory; -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.zip.GZIPOutputStream; - -/** - * Main sampler class. - */ -public class Sampler extends TimerTask { - private static final AtomicInteger THREAD_ID = new AtomicInteger(0); - - /** The worker pool for inserting stack nodes */ - private final ExecutorService workerPool = Executors.newFixedThreadPool( - 6, new ThreadFactoryBuilder().setNameFormat("spark-worker-" + THREAD_ID.getAndIncrement()).build() - ); - - /** The thread management interface for the current JVM */ - private final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); - /** The instance used to generate thread information for use in sampling */ - private final ThreadDumper threadDumper; - /** Responsible for aggregating and then outputting collected sampling data */ - private final DataAggregator dataAggregator; - - /** A future to encapsulation the completion of this sampler instance */ - private final CompletableFuture future = new CompletableFuture<>(); - - /** The interval to wait between sampling, in milliseconds */ - private final int interval; - /** The time when sampling first began */ - private long startTime = -1; - /** The unix timestamp (in millis) when this sampler should automatically complete.*/ - private final long endTime; // -1 for nothing - - public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - this.threadDumper = threadDumper; - this.dataAggregator = new AsyncDataAggregator(this.workerPool, threadGrouper, interval); - this.interval = interval; - this.endTime = endTime; - } - - public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, TickCounter tickCounter, int tickLengthThreshold) { - this.threadDumper = threadDumper; - this.dataAggregator = new TickedDataAggregator(this.workerPool, tickCounter, threadGrouper, interval, tickLengthThreshold); - this.interval = interval; - this.endTime = endTime; - } - - /** - * Starts the sampler. - * - * @param samplingThread the timer to schedule the sampling on - */ - public void start(Timer samplingThread) { - this.startTime = System.currentTimeMillis(); - this.dataAggregator.start(); - samplingThread.scheduleAtFixedRate(this, 0, this.interval); - } - - public long getStartTime() { - if (this.startTime == -1) { - throw new IllegalStateException("Not yet started"); - } - return this.startTime; - } - - public long getEndTime() { - return this.endTime; - } - - public CompletableFuture getFuture() { - return this.future; - } - - @Override - public void run() { - try { - if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) { - this.future.complete(this); - cancel(); - return; - } - - ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean); - for (ThreadInfo threadInfo : threadDumps) { - String threadName = threadInfo.getThreadName(); - StackTraceElement[] stack = threadInfo.getStackTrace(); - - if (threadName == null || stack == null) { - continue; - } - - this.dataAggregator.insertData(threadName, stack); - } - } catch (Throwable t) { - this.future.completeExceptionally(t); - cancel(); - } - } - - private void writeOutput(JsonWriter writer) throws IOException { - writer.beginObject(); - - writer.name("threads").beginArray(); - - List> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); - data.sort(Map.Entry.comparingByKey()); - - for (Map.Entry entry : data) { - writer.beginObject(); - writer.name("threadName").value(entry.getKey()); - writer.name("totalTime").value(entry.getValue().getTotalTime()); - writer.name("rootNode"); - entry.getValue().serializeTo(writer); - writer.endObject(); - } - - writer.endArray(); - writer.endObject(); - } - - public byte[] formCompressedDataPayload() { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) { - try (JsonWriter jsonWriter = new JsonWriter(writer)) { - writeOutput(jsonWriter); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - return byteOut.toByteArray(); - } - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java b/common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java deleted file mode 100644 index 7db0515..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java +++ /dev/null @@ -1,63 +0,0 @@ -package me.lucko.spark.profiler; - -import java.util.Timer; -import java.util.concurrent.TimeUnit; - -/** - * Builds {@link Sampler} instances. - */ -public class SamplerBuilder { - - private int samplingInterval = 4; - private long timeout = -1; - private ThreadDumper threadDumper = ThreadDumper.ALL; - private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; - - private int ticksOver = -1; - private TickCounter tickCounter = null; - - public SamplerBuilder() { - } - - public SamplerBuilder samplingInterval(int samplingInterval) { - this.samplingInterval = samplingInterval; - return this; - } - - public SamplerBuilder completeAfter(long timeout, TimeUnit unit) { - if (timeout <= 0) { - throw new IllegalArgumentException("timeout > 0"); - } - this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); - return this; - } - - public SamplerBuilder threadDumper(ThreadDumper threadDumper) { - this.threadDumper = threadDumper; - return this; - } - - public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { - this.threadGrouper = threadGrouper; - return this; - } - - public SamplerBuilder ticksOver(int ticksOver, TickCounter tickCounter) { - this.ticksOver = ticksOver; - this.tickCounter = tickCounter; - return this; - } - - public Sampler start(Timer samplingThread) { - Sampler sampler; - if (this.ticksOver != -1 && this.tickCounter != null) { - sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout, this.tickCounter, this.ticksOver); - } else { - sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout); - } - - sampler.start(samplingThread); - return sampler; - } - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/StackNode.java b/common/src/main/java/me/lucko/spark/profiler/StackNode.java deleted file mode 100644 index 575400a..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/StackNode.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * WarmRoast - * Copyright (C) 2013 Albert Pham - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . -*/ - -package me.lucko.spark.profiler; - -import com.google.gson.stream.JsonWriter; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.LongAdder; - -/** - * Represents a node in the overall sampling stack. - * - *

The base implementation of this class is only used for the root of node structures. The - * {@link StackTraceNode} class is used for representing method calls in the structure.

- */ -public class StackNode implements Comparable { - - private static final int MAX_STACK_DEPTH = 300; - - /** - * The name of this node - */ - private final String name; - - /** - * A map of this nodes children - */ - private final Map children = new ConcurrentHashMap<>(); - - /** - * The accumulated sample time for this node - */ - private final LongAdder totalTime = new LongAdder(); - - public StackNode(String name) { - this.name = name; - } - - public String getName() { - return this.name; - } - - public Collection getChildren() { - if (this.children.isEmpty()) { - return Collections.emptyList(); - } - - List list = new ArrayList<>(this.children.values()); - list.sort(null); - return list; - } - - private StackNode resolveChild(String name) { - return this.children.computeIfAbsent(name, StackNode::new); - } - - private StackNode resolveChild(String className, String methodName) { - return this.children.computeIfAbsent(StackTraceNode.formName(className, methodName), name -> new StackTraceNode(className, methodName)); - } - - public long getTotalTime() { - return this.totalTime.longValue(); - } - - public void accumulateTime(long time) { - this.totalTime.add(time); - } - - private void log(StackTraceElement[] elements, int skip, long time) { - accumulateTime(time); - - if (skip >= MAX_STACK_DEPTH) { - return; - } - - if (elements.length - skip == 0) { - return; - } - - StackTraceElement bottom = elements[elements.length - (skip + 1)]; - resolveChild(bottom.getClassName(), bottom.getMethodName()).log(elements, skip + 1, time); - } - - public void log(StackTraceElement[] elements, long time) { - log(elements, 0, time); - } - - @Override - public int compareTo(StackNode o) { - return getName().compareTo(o.getName()); - } - - public void serializeTo(JsonWriter writer) throws IOException { - writer.beginObject(); - - // append metadata about this node - appendMetadata(writer); - - // include the total time recorded for this node - writer.name("totalTime").value(getTotalTime()); - - // append child nodes, if any are present - Collection childNodes = getChildren(); - if (!childNodes.isEmpty()) { - writer.name("children").beginArray(); - for (StackNode child : childNodes) { - child.serializeTo(writer); - } - writer.endArray(); - } - - writer.endObject(); - } - - protected void appendMetadata(JsonWriter writer) throws IOException { - writer.name("name").value(getName()); - } - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java b/common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java deleted file mode 100644 index d46a547..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * WarmRoast - * Copyright (C) 2013 Albert Pham - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . -*/ - -package me.lucko.spark.profiler; - -import com.google.gson.stream.JsonWriter; - -import java.io.IOException; - -/** - * Represents a {@link StackNode node} for a method call. - */ -public class StackTraceNode extends StackNode { - - /** - * Forms the {@link StackNode#getName()} for a {@link StackTraceNode}. - * - * @param className the name of the class - * @param methodName the name of the method - * @return the name - */ - static String formName(String className, String methodName) { - return className + "." + methodName + "()"; - } - - /** The name of the class */ - private final String className; - /** The name of the method */ - private final String methodName; - - public StackTraceNode(String className, String methodName) { - super(formName(className, methodName)); - this.className = className; - this.methodName = methodName; - } - - public String getClassName() { - return this.className; - } - - public String getMethodName() { - return this.methodName; - } - - @Override - protected void appendMetadata(JsonWriter writer) throws IOException { - writer.name("className").value(this.className); - writer.name("methodName").value(this.methodName); - } - - @Override - public int compareTo(StackNode that) { - return Long.compare(that.getTotalTime(), this.getTotalTime()); - } - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java b/common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java deleted file mode 100644 index 68d7dc9..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * WarmRoast - * Copyright (C) 2013 Albert Pham - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.profiler; - -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being - * sampled. - */ -@FunctionalInterface -public interface ThreadDumper { - - /** - * Generates {@link ThreadInfo} data for the sampled threads. - * - * @param threadBean the thread bean instance to obtain the data from - * @return an array of generated thread info instances - */ - ThreadInfo[] dumpThreads(ThreadMXBean threadBean); - - /** - * Implementation of {@link ThreadDumper} that generates data for all threads. - */ - ThreadDumper ALL = new All(); - - final class All implements ThreadDumper { - @Override - public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { - return threadBean.dumpAllThreads(false, false); - } - } - - /** - * Implementation of {@link ThreadDumper} that generates data for a specific set of threads. - */ - final class Specific implements ThreadDumper { - private final long[] ids; - - public Specific(long[] ids) { - this.ids = ids; - } - - public Specific(Set names) { - Set threadNamesLower = names.stream().map(String::toLowerCase).collect(Collectors.toSet()); - this.ids = Thread.getAllStackTraces().keySet().stream() - .filter(t -> threadNamesLower.contains(t.getName().toLowerCase())) - .mapToLong(Thread::getId) - .toArray(); - } - - @Override - public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { - return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE); - } - } - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java b/common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java deleted file mode 100644 index 56a6cc4..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java +++ /dev/null @@ -1,52 +0,0 @@ -package me.lucko.spark.profiler; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Function for grouping threads together - */ -@FunctionalInterface -public interface ThreadGrouper { - - /** - * Gets the group for the given thread. - * - * @param threadName the name of the thread - * @return the group - */ - String getGroup(String threadName); - - /** - * Implementation of {@link ThreadGrouper} that just groups by thread name. - */ - ThreadGrouper BY_NAME = new ByName(); - - final class ByName implements ThreadGrouper { - @Override - public String getGroup(String threadName) { - return threadName; - } - } - - /** - * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool - * the thread originated from. - */ - ThreadGrouper BY_POOL = new ByPool(); - - final class ByPool implements ThreadGrouper { - private static final Pattern THREAD_POOL_PATTERN = Pattern.compile("^(.*)[-#] ?\\d+$"); - - @Override - public String getGroup(String threadName) { - Matcher matcher = THREAD_POOL_PATTERN.matcher(threadName); - if (!matcher.matches()) { - return threadName; - } - - return matcher.group(1).trim() + " (Combined)"; - } - } - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/TickCounter.java b/common/src/main/java/me/lucko/spark/profiler/TickCounter.java deleted file mode 100644 index 53a9c27..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/TickCounter.java +++ /dev/null @@ -1,39 +0,0 @@ -package me.lucko.spark.profiler; - -/** - * A hook with the game's "tick loop". - */ -public interface TickCounter { - - /** - * Starts the counter - */ - void start(); - - /** - * Stops the counter - */ - void close(); - - /** - * Gets the current tick number - * - * @return the current tick - */ - long getCurrentTick(); - - /** - * Adds a task to be called each time the tick increments - * - * @param runnable the task - */ - void addTickTask(Runnable runnable); - - /** - * Removes a tick task - * - * @param runnable the task - */ - void removeTickTask(Runnable runnable); - -} diff --git a/common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java b/common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java deleted file mode 100644 index abca4b3..0000000 --- a/common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java +++ /dev/null @@ -1,147 +0,0 @@ -package me.lucko.spark.profiler; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; - -/** - * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" - * which exceed a certain threshold in duration. - */ -public class TickedDataAggregator implements DataAggregator { - - /** A map of root stack nodes for each thread with sampling data */ - private final Map threadData = new ConcurrentHashMap<>(); - - /** The worker pool for inserting stack nodes */ - private final ExecutorService workerPool; - - /** Used to monitor the current "tick" of the server */ - private final TickCounter tickCounter; - - /** The instance used to group threads together */ - private final ThreadGrouper threadGrouper; - - /** The interval to wait between sampling, in milliseconds */ - private final int interval; - - /** Tick durations under this threshold will not be inserted */ - private final int tickLengthThreshold; - - /** The expected number of samples in each tick */ - private final int expectedSize; - - // state - private long currentTick = -1; - private TickList currentData = new TickList(0); - - public TickedDataAggregator(ExecutorService workerPool, TickCounter tickCounter, ThreadGrouper threadGrouper, int interval, int tickLengthThreshold) { - this.workerPool = workerPool; - this.tickCounter = tickCounter; - this.threadGrouper = threadGrouper; - this.interval = interval; - this.tickLengthThreshold = tickLengthThreshold; - // 50 millis in a tick, plus 10 so we have a bit of room to go over - this.expectedSize = (50 / interval) + 10; - } - - // this is effectively synchronized by the Timer instance in Sampler - @Override - public void insertData(String threadName, StackTraceElement[] stack) { - long tick = this.tickCounter.getCurrentTick(); - if (this.currentTick != tick) { - pushCurrentTick(); - this.currentTick = tick; - this.currentData = new TickList(this.expectedSize); - } - - // form the queued data - QueuedThreadInfo queuedData = new QueuedThreadInfo(threadName, stack); - // insert it - this.currentData.addData(queuedData); - } - - private void pushCurrentTick() { - TickList currentData = this.currentData; - - // approximate how long the tick lasted - int tickLengthMillis = currentData.getList().size() * this.interval; - - // don't push data below the threshold - if (tickLengthMillis < this.tickLengthThreshold) { - return; - } - - this.workerPool.submit(currentData); - } - - @Override - public void start() { - this.tickCounter.start(); - } - - @Override - public Map getData() { - // push the current tick - pushCurrentTick(); - - // close the tick counter - this.tickCounter.close(); - - // wait for all pending data to be inserted - this.workerPool.shutdown(); - try { - this.workerPool.awaitTermination(15, TimeUnit.SECONDS); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - return this.threadData; - } - - void insertData(List dataList) { - for (QueuedThreadInfo data : dataList) { - try { - String group = this.threadGrouper.getGroup(data.threadName); - StackNode node = this.threadData.computeIfAbsent(group, StackNode::new); - node.log(data.stack, this.interval); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - - private final class TickList implements Runnable { - private final List list; - - TickList(int expectedSize) { - this.list = new ArrayList<>(expectedSize); - } - - @Override - public void run() { - insertData(this.list); - } - - public List getList() { - return this.list; - } - - public void addData(QueuedThreadInfo data) { - this.list.add(data); - } - } - - private static final class QueuedThreadInfo { - private final String threadName; - private final StackTraceElement[] stack; - - QueuedThreadInfo(String threadName, StackTraceElement[] stack) { - this.threadName = threadName; - this.stack = stack; - } - } -} diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..1948b90 Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..d2c45a4 --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-4.8-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100644 index 0000000..cccdd3d --- /dev/null +++ b/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..f955316 --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 766d814..0000000 --- a/pom.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - 4.0.0 - - me.lucko - spark-parent - 1.0-SNAPSHOT - - - common - bukkit - bungeecord - sponge - universal - - - spark - Spark is a CPU profiling plugin based on sk89q's WarmRoast profiler. - https://github.com/lucko/spark - - - scm:git:https://github.com/lucko/spark.git - scm:git:git@github.com:lucko/spark.git - https://github.com/lucko/spark - - - pom - - - UTF-8 - - - true - true - - - 3.7.0 - 3.1.0 - - - - - luck-snapshots - https://nexus.lucko.me/repository/maven-snapshots/ - - - luck-releases - https://nexus.lucko.me/repository/maven-releases/ - - - - - Jenkins - https://ci.lucko.me/job/spark - - - - GitHub - https://github.com/lucko/spark/issues - - - - - sign - - - - org.apache.maven.plugins - maven-gpg-plugin - 1.6 - - - sign-artifacts - verify - - sign - - - - - - - - - ossrh - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - - - - - - - luck-repo - https://repo.lucko.me/ - - - - diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 0000000..13da274 --- /dev/null +++ b/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'spark' +include 'spark-common', 'spark-bukkit', 'spark-bungeecord', 'spark-sponge', 'spark-universal' \ No newline at end of file diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle new file mode 100644 index 0000000..e2f4da0 --- /dev/null +++ b/spark-bukkit/build.gradle @@ -0,0 +1,11 @@ +dependencies { + compile project(':spark-common') + compileOnly 'org.spigotmc:spigot-api:1.12.2-R0.1-SNAPSHOT' +} + +processResources { + from(sourceSets.main.resources.srcDirs) { + expand 'pluginVersion': project.pluginVersion + include 'plugin.yml' + } +} \ No newline at end of file diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java new file mode 100644 index 0000000..61a7690 --- /dev/null +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitTickCounter.java @@ -0,0 +1,55 @@ +package me.lucko.spark.bukkit; + +import me.lucko.spark.profiler.TickCounter; + +import org.bukkit.plugin.Plugin; +import org.bukkit.scheduler.BukkitTask; + +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.atomic.LongAdder; + +public class BukkitTickCounter implements TickCounter, Runnable { + private final Plugin plugin; + private BukkitTask task; + + private final Set tasks = new HashSet<>(); + private final LongAdder tick = new LongAdder(); + + public BukkitTickCounter(Plugin plugin) { + this.plugin = plugin; + } + + @Override + public void run() { + this.tick.increment(); + for (Runnable r : this.tasks){ + r.run(); + } + } + + @Override + public void start() { + this.task = this.plugin.getServer().getScheduler().runTaskTimer(this.plugin, this, 1, 1); + } + + @Override + public void close() { + this.task.cancel(); + } + + @Override + public long getCurrentTick() { + return this.tick.longValue(); + } + + @Override + public void addTickTask(Runnable runnable) { + this.tasks.add(runnable); + } + + @Override + public void removeTickTask(Runnable runnable) { + this.tasks.remove(runnable); + } +} diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java new file mode 100644 index 0000000..dc432c5 --- /dev/null +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java @@ -0,0 +1,73 @@ +package me.lucko.spark.bukkit; + +import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.profiler.ThreadDumper; +import me.lucko.spark.profiler.TickCounter; + +import org.bukkit.ChatColor; +import org.bukkit.command.Command; +import org.bukkit.command.CommandSender; +import org.bukkit.entity.Player; +import org.bukkit.plugin.java.JavaPlugin; + +public class SparkBukkitPlugin extends JavaPlugin { + + private final CommandHandler commandHandler = new CommandHandler() { + + private String colorize(String message) { + return ChatColor.translateAlternateColorCodes('&', message); + } + + private void broadcast(String msg) { + getServer().getConsoleSender().sendMessage(msg); + for (Player player : getServer().getOnlinePlayers()) { + if (player.hasPermission("spark.profiler")) { + player.sendMessage(msg); + } + } + } + + @Override + protected void sendMessage(CommandSender sender, String message) { + sender.sendMessage(colorize(message)); + } + + @Override + protected void sendMessage(String message) { + String msg = colorize(message); + broadcast(msg); + } + + @Override + protected void sendLink(String url) { + String msg = colorize("&7" + url); + broadcast(msg); + } + + @Override + protected void runAsync(Runnable r) { + getServer().getScheduler().runTaskAsynchronously(SparkBukkitPlugin.this, r); + } + + @Override + protected ThreadDumper getDefaultThreadDumper() { + return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); + } + + @Override + protected TickCounter newTickCounter() { + return new BukkitTickCounter(SparkBukkitPlugin.this); + } + }; + + @Override + public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { + if (!sender.hasPermission("spark.profiler")) { + sender.sendMessage(ChatColor.RED + "You do not have permission to use this command."); + return true; + } + + this.commandHandler.handleCommand(sender, args); + return true; + } +} diff --git a/spark-bukkit/src/main/resources/plugin.yml b/spark-bukkit/src/main/resources/plugin.yml new file mode 100644 index 0000000..94c61a3 --- /dev/null +++ b/spark-bukkit/src/main/resources/plugin.yml @@ -0,0 +1,10 @@ +name: spark +version: ${pluginVersion} +description: Spark is a CPU profiling plugin based on sk89q's WarmRoast profiler +authors: [Luck, sk89q] +main: me.lucko.spark.bukkit.SparkBukkitPlugin + +commands: + spark: + description: Main plugin command + aliases: [profiler] \ No newline at end of file diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle new file mode 100644 index 0000000..bb87a73 --- /dev/null +++ b/spark-bungeecord/build.gradle @@ -0,0 +1,11 @@ +dependencies { + compile project(':spark-common') + compileOnly 'net.md-5:bungeecord-api:1.12-SNAPSHOT' +} + +processResources { + from(sourceSets.main.resources.srcDirs) { + expand 'pluginVersion': project.pluginVersion + include 'bungee.yml' + } +} \ No newline at end of file diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java new file mode 100644 index 0000000..59bab67 --- /dev/null +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java @@ -0,0 +1,82 @@ +package me.lucko.spark.bungeecord; + +import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.profiler.ThreadDumper; +import me.lucko.spark.profiler.TickCounter; + +import net.md_5.bungee.api.ChatColor; +import net.md_5.bungee.api.CommandSender; +import net.md_5.bungee.api.chat.BaseComponent; +import net.md_5.bungee.api.chat.ClickEvent; +import net.md_5.bungee.api.chat.TextComponent; +import net.md_5.bungee.api.connection.ProxiedPlayer; +import net.md_5.bungee.api.plugin.Command; +import net.md_5.bungee.api.plugin.Plugin; + +public class SparkBungeeCordPlugin extends Plugin { + + private final CommandHandler commandHandler = new CommandHandler() { + private BaseComponent[] colorize(String message) { + return TextComponent.fromLegacyText(ChatColor.translateAlternateColorCodes('&', message)); + } + + private void broadcast(BaseComponent... msg) { + getProxy().getConsole().sendMessage(msg); + for (ProxiedPlayer player : getProxy().getPlayers()) { + if (player.hasPermission("spark.profiler")) { + player.sendMessage(msg); + } + } + } + + @Override + protected void sendMessage(CommandSender sender, String message) { + sender.sendMessage(colorize(message)); + } + + @Override + protected void sendMessage(String message) { + broadcast(colorize(message)); + } + + @Override + protected void sendLink(String url) { + TextComponent component = new TextComponent(url); + component.setColor(ChatColor.GRAY); + component.setClickEvent(new ClickEvent(ClickEvent.Action.OPEN_URL, url)); + broadcast(component); + } + + @Override + protected void runAsync(Runnable r) { + getProxy().getScheduler().runAsync(SparkBungeeCordPlugin.this, r); + } + + @Override + protected ThreadDumper getDefaultThreadDumper() { + return new ThreadDumper.All(); + } + + @Override + protected TickCounter newTickCounter() { + throw new UnsupportedOperationException(); + } + }; + + @Override + public void onEnable() { + getProxy().getPluginManager().registerCommand(this, new Command("sparkbungee", null, "gprofiler") { + @Override + public void execute(CommandSender sender, String[] args) { + if (!sender.hasPermission("spark.profiler")) { + TextComponent msg = new TextComponent("You do not have permission to use this command."); + msg.setColor(ChatColor.RED); + sender.sendMessage(msg); + return; + } + + SparkBungeeCordPlugin.this.commandHandler.handleCommand(sender, args); + } + }); + } +} diff --git a/spark-bungeecord/src/main/resources/bungee.yml b/spark-bungeecord/src/main/resources/bungee.yml new file mode 100644 index 0000000..fa65fbc --- /dev/null +++ b/spark-bungeecord/src/main/resources/bungee.yml @@ -0,0 +1,5 @@ +name: spark +version: ${pluginVersion} +description: Spark is a CPU profiling plugin based on sk89q's WarmRoast profiler +author: Luck, sk89q +main: me.lucko.spark.bungeecord.SparkBungeeCordPlugin diff --git a/spark-common/build.gradle b/spark-common/build.gradle new file mode 100644 index 0000000..d59ce24 --- /dev/null +++ b/spark-common/build.gradle @@ -0,0 +1,6 @@ +dependencies { + compile 'com.squareup.okhttp3:okhttp:3.10.0' + compile 'com.squareup.okio:okio:1.14.0' + compileOnly 'com.google.code.gson:gson:2.7' + compileOnly 'com.google.guava:guava:19.0' +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java b/spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java new file mode 100644 index 0000000..898bba7 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/CommandHandler.java @@ -0,0 +1,372 @@ +package me.lucko.spark.common; + +import com.google.common.collect.HashMultimap; +import com.google.common.collect.SetMultimap; +import com.google.common.collect.Sets; + +import me.lucko.spark.common.http.Bytebin; +import me.lucko.spark.profiler.Sampler; +import me.lucko.spark.profiler.SamplerBuilder; +import me.lucko.spark.profiler.ThreadDumper; +import me.lucko.spark.profiler.ThreadGrouper; +import me.lucko.spark.profiler.TickCounter; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.Timer; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * Abstract command handling class used by all platforms. + * + * @param the sender (e.g. CommandSender) type used by the platform + */ +public abstract class CommandHandler { + + /** The URL of the viewer frontend */ + private static final String VIEWER_URL = "https://sparkprofiler.github.io/?"; + /** The prefix used in all messages */ + private static final String PREFIX = "&8[&fspark&8] &7"; + + /** + * The {@link Timer} being used by the {@link #activeSampler}. + */ + private final Timer samplingThread = new Timer("spark-sampling-thread", true); + + /** Guards {@link #activeSampler} */ + private final Object[] activeSamplerMutex = new Object[0]; + /** The WarmRoast instance currently running, if any */ + private Sampler activeSampler = null; + /** The tick monitor instance currently running, if any */ + private ReportingTickMonitor activeTickMonitor = null; + + + // abstract methods implemented by each platform + + protected abstract void sendMessage(T sender, String message); + protected abstract void sendMessage(String message); + protected abstract void sendLink(String url); + protected abstract void runAsync(Runnable r); + protected abstract ThreadDumper getDefaultThreadDumper(); + protected abstract TickCounter newTickCounter(); + + private void sendPrefixedMessage(T sender, String message) { + sendMessage(sender, PREFIX + message); + } + + private void sendPrefixedMessage(String message) { + sendMessage(PREFIX + message); + } + + public void handleCommand(T sender, String[] args) { + try { + if (args.length == 0) { + sendInfo(sender); + return; + } + + List arguments = new ArrayList<>(Arrays.asList(args)); + switch (arguments.remove(0).toLowerCase()) { + case "start": + handleStart(sender, arguments); + break; + case "info": + handleInfo(sender); + break; + case "cancel": + handleCancel(sender); + break; + case "stop": + case "upload": + case "paste": + handleStop(sender); + break; + case "monitoring": + handleMonitoring(sender, arguments); + break; + default: + sendInfo(sender); + break; + } + } catch (IllegalArgumentException e) { + sendMessage(sender, "&c" + e.getMessage()); + } + } + + private void sendInfo(T sender) { + sendPrefixedMessage(sender, "&fspark profiler &7v1.0"); + sendMessage(sender, "&b&l> &7/profiler start"); + sendMessage(sender, " &8[&7--timeout&8 ]"); + sendMessage(sender, " &8[&7--thread&8 ]"); + sendMessage(sender, " &8[&7--not-combined]"); + sendMessage(sender, " &8[&7--interval&8 ]"); + sendMessage(sender, " &8[&7--only-ticks-over&8 ]"); + sendMessage(sender, "&b&l> &7/profiler info"); + sendMessage(sender, "&b&l> &7/profiler stop"); + sendMessage(sender, "&b&l> &7/profiler cancel"); + sendMessage(sender, "&b&l> &7/profiler monitoring"); + sendMessage(sender, " &8[&7--threshold&8 ]"); + } + + private void handleStart(T sender, List args) { + SetMultimap arguments = parseArguments(args); + + int timeoutSeconds = parseInt(arguments, "timeout", "d"); + if (timeoutSeconds != -1 && timeoutSeconds <= 10) { + sendPrefixedMessage(sender, "&cThe specified timeout is not long enough for accurate results to be formed. Please choose a value greater than 10."); + return; + } + + if (timeoutSeconds != -1 && timeoutSeconds < 30) { + sendPrefixedMessage(sender, "&7The accuracy of the output will significantly improve when sampling is able to run for longer periods. Consider setting a timeout value over 30 seconds."); + } + + int intervalMillis = parseInt(arguments, "interval", "i"); + if (intervalMillis <= 0) { + intervalMillis = 4; + } + + Set threads = Sets.union(arguments.get("thread"), arguments.get("t")); + ThreadDumper threadDumper; + if (threads.isEmpty()) { + // use the server thread + threadDumper = getDefaultThreadDumper(); + } else if (threads.contains("*")) { + threadDumper = ThreadDumper.ALL; + } else { + threadDumper = new ThreadDumper.Specific(threads); + } + + ThreadGrouper threadGrouper; + if (arguments.containsKey("not-combined")) { + threadGrouper = ThreadGrouper.BY_NAME; + } else { + threadGrouper = ThreadGrouper.BY_POOL; + } + + int ticksOver = parseInt(arguments, "only-ticks-over", "o"); + TickCounter tickCounter = null; + if (ticksOver != -1) { + try { + tickCounter = newTickCounter(); + } catch (UnsupportedOperationException e) { + sendPrefixedMessage(sender, "&cTick counting is not supported on BungeeCord!"); + return; + } + } + + Sampler sampler; + synchronized (this.activeSamplerMutex) { + if (this.activeSampler != null) { + sendPrefixedMessage(sender, "&7An active sampler is already running."); + return; + } + + sendPrefixedMessage("&7Initializing a new profiler, please wait..."); + + SamplerBuilder builder = new SamplerBuilder(); + builder.threadDumper(threadDumper); + builder.threadGrouper(threadGrouper); + if (timeoutSeconds != -1) { + builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); + } + builder.samplingInterval(intervalMillis); + if (ticksOver != -1) { + builder.ticksOver(ticksOver, tickCounter); + } + sampler = this.activeSampler = builder.start(this.samplingThread); + + sendPrefixedMessage("&bProfiler now active!"); + if (timeoutSeconds == -1) { + sendPrefixedMessage("&7Use '/profiler stop' to stop profiling and upload the results."); + } else { + sendPrefixedMessage("&7The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds."); + } + } + + CompletableFuture future = sampler.getFuture(); + + // send message if profiling fails + future.whenCompleteAsync((s, throwable) -> { + if (throwable != null) { + sendPrefixedMessage("&cSampling operation failed unexpectedly. Error: " + throwable.toString()); + throwable.printStackTrace(); + } + }); + + // set activeSampler to null when complete. + future.whenCompleteAsync((s, throwable) -> { + synchronized (this.activeSamplerMutex) { + if (sampler == this.activeSampler) { + this.activeSampler = null; + } + } + }); + + // await the result + if (timeoutSeconds != -1) { + future.thenAcceptAsync(s -> { + sendPrefixedMessage("&7The active sampling operation has completed! Uploading results..."); + handleUpload(s); + }); + } + } + + private void handleInfo(T sender) { + synchronized (this.activeSamplerMutex) { + if (this.activeSampler == null) { + sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + } else { + long timeout = this.activeSampler.getEndTime(); + if (timeout == -1) { + sendPrefixedMessage(sender, "&7There is an active sampler currently running, with no defined timeout."); + } else { + long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; + sendPrefixedMessage(sender, "&7There is an active sampler currently running, due to timeout in " + timeoutDiff + " seconds."); + } + + long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + sendPrefixedMessage(sender, "&7It has been sampling for " + runningTime + " seconds so far."); + } + } + } + + private void handleStop(T sender) { + synchronized (this.activeSamplerMutex) { + if (this.activeSampler == null) { + sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + } else { + this.activeSampler.cancel(); + sendPrefixedMessage("&7The active sampling operation has been stopped! Uploading results..."); + handleUpload(this.activeSampler); + this.activeSampler = null; + } + } + } + + private void handleCancel(T sender) { + synchronized (this.activeSamplerMutex) { + if (this.activeSampler == null) { + sendPrefixedMessage(sender, "&7There isn't an active sampling task running."); + } else { + this.activeSampler.cancel(); + this.activeSampler = null; + sendPrefixedMessage("&bThe active sampling task has been cancelled."); + } + } + } + + private void handleUpload(Sampler sampler) { + runAsync(() -> { + byte[] output = sampler.formCompressedDataPayload(); + try { + String pasteId = Bytebin.postCompressedContent(output); + sendPrefixedMessage("&bSampling results:"); + sendLink(VIEWER_URL + pasteId); + } catch (IOException e) { + sendPrefixedMessage("&cAn error occurred whilst uploading the results."); + e.printStackTrace(); + } + }); + } + + private void handleMonitoring(T sender, List args) { + SetMultimap arguments = parseArguments(args); + + if (this.activeTickMonitor == null) { + + int threshold = parseInt(arguments, "threshold", "t"); + if (threshold == -1) { + threshold = 100; + } + + try { + TickCounter tickCounter = newTickCounter(); + this.activeTickMonitor = new ReportingTickMonitor(tickCounter, threshold); + } catch (UnsupportedOperationException e) { + sendPrefixedMessage(sender, "&cNot supported on BungeeCord!"); + } + } else { + this.activeTickMonitor.close(); + this.activeTickMonitor = null; + sendPrefixedMessage("&7Tick monitor disabled."); + } + } + + private class ReportingTickMonitor extends TickMonitor { + public ReportingTickMonitor(TickCounter tickCounter, int percentageChangeThreshold) { + super(tickCounter, percentageChangeThreshold); + } + + @Override + protected void sendMessage(String message) { + sendPrefixedMessage(message); + } + } + + private int parseInt(SetMultimap arguments, String longArg, String shortArg) { + Iterator it = Sets.union(arguments.get(longArg), arguments.get(shortArg)).iterator(); + if (it.hasNext()) { + try { + return Math.abs(Integer.parseInt(it.next())); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid input for '" + longArg + "' argument. Please specify a number!"); + } + } + return -1; // undefined + } + + private static final Pattern FLAG_REGEX = Pattern.compile("--(.+)$|-([a-zA-z])$"); + + private static SetMultimap parseArguments(List args) { + SetMultimap arguments = HashMultimap.create(); + + String flag = null; + List value = null; + + for (int i = 0; i < args.size(); i++) { + String arg = args.get(i); + + Matcher matcher = FLAG_REGEX.matcher(arg); + boolean matches = matcher.matches(); + + if (flag == null || matches) { + if (!matches) { + throw new IllegalArgumentException("Expected flag at position " + i + " but got '" + arg + "' instead!"); + } + + String match = matcher.group(1); + if (match == null) { + match = matcher.group(2); + } + + // store existing value, if present + if (flag != null) { + arguments.put(flag, value.stream().collect(Collectors.joining(" "))); + } + + flag = match.toLowerCase(); + value = new ArrayList<>(); + } else { + // part of a value + value.add(arg); + } + } + + // store remaining value, if present + if (flag != null) { + arguments.put(flag, value.stream().collect(Collectors.joining(" "))); + } + + return arguments; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/TickMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/TickMonitor.java new file mode 100644 index 0000000..a30a4db --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/TickMonitor.java @@ -0,0 +1,88 @@ +package me.lucko.spark.common; + +import me.lucko.spark.profiler.TickCounter; + +import java.text.DecimalFormat; +import java.util.DoubleSummaryStatistics; + +public abstract class TickMonitor implements Runnable { + private static final DecimalFormat df = new DecimalFormat("#.##"); + + private final TickCounter tickCounter; + private final int percentageChangeThreshold; + + // data + private double lastTickTime = 0; + private State state = null; + private DoubleSummaryStatistics averageTickTime = new DoubleSummaryStatistics(); + private double avg; + + public TickMonitor(TickCounter tickCounter, int percentageChangeThreshold) { + this.tickCounter = tickCounter; + this.percentageChangeThreshold = percentageChangeThreshold; + + this.tickCounter.start(); + this.tickCounter.addTickTask(this); + } + + protected abstract void sendMessage(String message); + + public void close() { + this.tickCounter.close(); + } + + @Override + public void run() { + double now = ((double) System.nanoTime()) / 1000000d; + + // init + if (this.state == null) { + this.state = State.SETUP; + this.lastTickTime = now; + sendMessage("Tick monitor started. Before the monitor becomes fully active, the server's " + + "average tick rate will be calculated over a period of 120 ticks (approx 6 seconds)."); + return; + } + + // find the diff + double diff = now - this.lastTickTime; + this.lastTickTime = now; + + // form averages + if (this.state == State.SETUP) { + this.averageTickTime.accept(diff); + + // move onto the next state + if (this.averageTickTime.getCount() >= 120) { + + sendMessage("&bAnalysis is now complete."); + sendMessage("&f> &7Max: " + df.format(this.averageTickTime.getMax()) + "ms"); + sendMessage("&f> &7Min: " + df.format(this.averageTickTime.getMin()) + "ms"); + sendMessage("&f> &7Avg: " + df.format(this.averageTickTime.getAverage()) + "ms"); + sendMessage("Starting now, any ticks with >" + this.percentageChangeThreshold + "% increase in " + + "duration compared to the average will be reported."); + + this.avg = this.averageTickTime.getAverage(); + this.state = State.MONITORING; + } + } + + if (this.state == State.MONITORING) { + double increase = diff - this.avg; + if (increase <= 0) { + return; + } + + double percentageChange = (increase * 100d) / this.avg; + if (percentageChange > this.percentageChangeThreshold) { + sendMessage("&7Tick &8#" + this.tickCounter.getCurrentTick() + " &7lasted &b" + df.format(diff) + "&7 milliseconds. " + + "&7(&b" + df.format(percentageChange) + "% &7increase from average)"); + } + } + } + + private enum State { + SETUP, + MONITORING + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java b/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java new file mode 100644 index 0000000..3cd5e4c --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java @@ -0,0 +1,54 @@ +package me.lucko.spark.common.http; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; + +import okhttp3.MediaType; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; +import okhttp3.ResponseBody; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; + +/** + * Utility for uploading JSON data to bytebin. + */ +public final class Bytebin { + + /** Media type for JSON data */ + private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8"); + /** The URL used to upload sampling data */ + private static final String UPLOAD_ENDPOINT = "https://bytebin.lucko.me/post"; + + public static String postCompressedContent(byte[] buf) throws IOException { + RequestBody body = RequestBody.create(JSON_TYPE, buf); + + Request.Builder requestBuilder = new Request.Builder() + .url(UPLOAD_ENDPOINT) + .header("Content-Encoding", "gzip") + .post(body); + + Request request = requestBuilder.build(); + try (Response response = HttpClient.makeCall(request)) { + try (ResponseBody responseBody = response.body()) { + if (responseBody == null) { + throw new RuntimeException("No response"); + } + + try (InputStream inputStream = responseBody.byteStream()) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { + JsonObject object = new Gson().fromJson(reader, JsonObject.class); + return object.get("key").getAsString(); + } + } + } + } + } + + private Bytebin() {} +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java new file mode 100644 index 0000000..61db597 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java @@ -0,0 +1,113 @@ +/* + * This file is part of LuckPerms, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.common.http; + +import okhttp3.Interceptor; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.ResponseBody; + +import java.io.IOException; +import java.net.Proxy; +import java.net.ProxySelector; +import java.net.SocketAddress; +import java.net.URI; +import java.util.Collections; +import java.util.List; + +/** + * Utility class for making http requests. + */ +public final class HttpClient { + private static OkHttpClient client = null; + + private static synchronized OkHttpClient getClient() { + if (client == null) { + client = new OkHttpClient.Builder() + .proxySelector(new NullSafeProxySelector()) + .addInterceptor(new UserAgentInterceptor()) + .build(); + } + return client; + } + + public static Response makeCall(Request request) throws IOException { + Response response = getClient().newCall(request).execute(); + if (!response.isSuccessful()) { + throw exceptionForUnsuccessfulResponse(response); + } + return response; + } + + private static RuntimeException exceptionForUnsuccessfulResponse(Response response) { + String msg = ""; + try (ResponseBody responseBody = response.body()) { + if (responseBody != null) { + msg = responseBody.string(); + } + } catch (IOException e) { + // ignore + } + return new RuntimeException("Got response: " + response.code() + " - " + response.message() + " - " + msg); + } + + private static final class UserAgentInterceptor implements Interceptor { + @Override + public Response intercept(Chain chain) throws IOException { + Request orig = chain.request(); + Request modified = orig.newBuilder() + .header("User-Agent", "spark-plugin") + .build(); + + return chain.proceed(modified); + } + } + + // sometimes ProxySelector#getDefault returns null, and okhttp doesn't like that + private static final class NullSafeProxySelector extends ProxySelector { + private static final List DIRECT = Collections.singletonList(Proxy.NO_PROXY); + + @Override + public List select(URI uri) { + ProxySelector def = ProxySelector.getDefault(); + if (def == null) { + return DIRECT; + } + return def.select(uri); + } + + @Override + public void connectFailed(URI uri, SocketAddress sa, IOException ioe) { + ProxySelector def = ProxySelector.getDefault(); + if (def != null) { + def.connectFailed(uri, sa, ioe); + } + } + } + + private HttpClient() {} +} \ No newline at end of file diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java new file mode 100644 index 0000000..9a4090e --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/AsyncDataAggregator.java @@ -0,0 +1,77 @@ +package me.lucko.spark.profiler; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; + +/** + * Implementation of {@link DataAggregator} that makes use of a "worker" thread pool for inserting + * data. + */ +public class AsyncDataAggregator implements DataAggregator { + + /** A map of root stack nodes for each thread with sampling data */ + private final Map threadData = new ConcurrentHashMap<>(); + + /** The worker pool for inserting stack nodes */ + private final ExecutorService workerPool; + + /** The instance used to group threads together */ + private final ThreadGrouper threadGrouper; + + /** The interval to wait between sampling, in milliseconds */ + private final int interval; + + public AsyncDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval) { + this.workerPool = workerPool; + this.threadGrouper = threadGrouper; + this.interval = interval; + } + + @Override + public void insertData(String threadName, StackTraceElement[] stack) { + // form the queued data + QueuedThreadInfo queuedData = new QueuedThreadInfo(threadName, stack); + // schedule insertion of the data + this.workerPool.execute(queuedData); + } + + @Override + public Map getData() { + // wait for all pending data to be inserted + this.workerPool.shutdown(); + try { + this.workerPool.awaitTermination(15, TimeUnit.SECONDS); + } catch (InterruptedException e) { + e.printStackTrace(); + } + + return this.threadData; + } + + void insertData(QueuedThreadInfo data) { + try { + String group = this.threadGrouper.getGroup(data.threadName); + StackNode node = this.threadData.computeIfAbsent(group, StackNode::new); + node.log(data.stack, this.interval); + } catch (Exception e) { + e.printStackTrace(); + } + } + + private final class QueuedThreadInfo implements Runnable { + private final String threadName; + private final StackTraceElement[] stack; + + QueuedThreadInfo(String threadName, StackTraceElement[] stack) { + this.threadName = threadName; + this.stack = stack; + } + + @Override + public void run() { + insertData(this); + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/profiler/DataAggregator.java new file mode 100644 index 0000000..1afa52c --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/DataAggregator.java @@ -0,0 +1,32 @@ +package me.lucko.spark.profiler; + +import java.util.Map; + +/** + * Aggregates sampling data. + */ +public interface DataAggregator { + + /** + * Called before the sampler begins to insert data + */ + default void start() { + + } + + /** + * Forms the output data + * + * @return the output data + */ + Map getData(); + + /** + * Inserts sampling data into this aggregator + * + * @param threadName the name of the thread + * @param stack the call stack + */ + void insertData(String threadName, StackTraceElement[] stack); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/profiler/Sampler.java new file mode 100644 index 0000000..3476f03 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/Sampler.java @@ -0,0 +1,170 @@ +/* + * WarmRoast + * Copyright (C) 2013 Albert Pham + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . +*/ + +package me.lucko.spark.profiler; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.gson.stream.JsonWriter; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.zip.GZIPOutputStream; + +/** + * Main sampler class. + */ +public class Sampler extends TimerTask { + private static final AtomicInteger THREAD_ID = new AtomicInteger(0); + + /** The worker pool for inserting stack nodes */ + private final ExecutorService workerPool = Executors.newFixedThreadPool( + 6, new ThreadFactoryBuilder().setNameFormat("spark-worker-" + THREAD_ID.getAndIncrement()).build() + ); + + /** The thread management interface for the current JVM */ + private final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); + /** The instance used to generate thread information for use in sampling */ + private final ThreadDumper threadDumper; + /** Responsible for aggregating and then outputting collected sampling data */ + private final DataAggregator dataAggregator; + + /** A future to encapsulation the completion of this sampler instance */ + private final CompletableFuture future = new CompletableFuture<>(); + + /** The interval to wait between sampling, in milliseconds */ + private final int interval; + /** The time when sampling first began */ + private long startTime = -1; + /** The unix timestamp (in millis) when this sampler should automatically complete.*/ + private final long endTime; // -1 for nothing + + public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + this.threadDumper = threadDumper; + this.dataAggregator = new AsyncDataAggregator(this.workerPool, threadGrouper, interval); + this.interval = interval; + this.endTime = endTime; + } + + public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, TickCounter tickCounter, int tickLengthThreshold) { + this.threadDumper = threadDumper; + this.dataAggregator = new TickedDataAggregator(this.workerPool, tickCounter, threadGrouper, interval, tickLengthThreshold); + this.interval = interval; + this.endTime = endTime; + } + + /** + * Starts the sampler. + * + * @param samplingThread the timer to schedule the sampling on + */ + public void start(Timer samplingThread) { + this.startTime = System.currentTimeMillis(); + this.dataAggregator.start(); + samplingThread.scheduleAtFixedRate(this, 0, this.interval); + } + + public long getStartTime() { + if (this.startTime == -1) { + throw new IllegalStateException("Not yet started"); + } + return this.startTime; + } + + public long getEndTime() { + return this.endTime; + } + + public CompletableFuture getFuture() { + return this.future; + } + + @Override + public void run() { + try { + if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) { + this.future.complete(this); + cancel(); + return; + } + + ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean); + for (ThreadInfo threadInfo : threadDumps) { + String threadName = threadInfo.getThreadName(); + StackTraceElement[] stack = threadInfo.getStackTrace(); + + if (threadName == null || stack == null) { + continue; + } + + this.dataAggregator.insertData(threadName, stack); + } + } catch (Throwable t) { + this.future.completeExceptionally(t); + cancel(); + } + } + + private void writeOutput(JsonWriter writer) throws IOException { + writer.beginObject(); + + writer.name("threads").beginArray(); + + List> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); + data.sort(Map.Entry.comparingByKey()); + + for (Map.Entry entry : data) { + writer.beginObject(); + writer.name("threadName").value(entry.getKey()); + writer.name("totalTime").value(entry.getValue().getTotalTime()); + writer.name("rootNode"); + entry.getValue().serializeTo(writer); + writer.endObject(); + } + + writer.endArray(); + writer.endObject(); + } + + public byte[] formCompressedDataPayload() { + ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); + try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) { + try (JsonWriter jsonWriter = new JsonWriter(writer)) { + writeOutput(jsonWriter); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + return byteOut.toByteArray(); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java new file mode 100644 index 0000000..7db0515 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/SamplerBuilder.java @@ -0,0 +1,63 @@ +package me.lucko.spark.profiler; + +import java.util.Timer; +import java.util.concurrent.TimeUnit; + +/** + * Builds {@link Sampler} instances. + */ +public class SamplerBuilder { + + private int samplingInterval = 4; + private long timeout = -1; + private ThreadDumper threadDumper = ThreadDumper.ALL; + private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; + + private int ticksOver = -1; + private TickCounter tickCounter = null; + + public SamplerBuilder() { + } + + public SamplerBuilder samplingInterval(int samplingInterval) { + this.samplingInterval = samplingInterval; + return this; + } + + public SamplerBuilder completeAfter(long timeout, TimeUnit unit) { + if (timeout <= 0) { + throw new IllegalArgumentException("timeout > 0"); + } + this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); + return this; + } + + public SamplerBuilder threadDumper(ThreadDumper threadDumper) { + this.threadDumper = threadDumper; + return this; + } + + public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { + this.threadGrouper = threadGrouper; + return this; + } + + public SamplerBuilder ticksOver(int ticksOver, TickCounter tickCounter) { + this.ticksOver = ticksOver; + this.tickCounter = tickCounter; + return this; + } + + public Sampler start(Timer samplingThread) { + Sampler sampler; + if (this.ticksOver != -1 && this.tickCounter != null) { + sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout, this.tickCounter, this.ticksOver); + } else { + sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout); + } + + sampler.start(samplingThread); + return sampler; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/StackNode.java b/spark-common/src/main/java/me/lucko/spark/profiler/StackNode.java new file mode 100644 index 0000000..575400a --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/StackNode.java @@ -0,0 +1,141 @@ +/* + * WarmRoast + * Copyright (C) 2013 Albert Pham + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . +*/ + +package me.lucko.spark.profiler; + +import com.google.gson.stream.JsonWriter; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.LongAdder; + +/** + * Represents a node in the overall sampling stack. + * + *

The base implementation of this class is only used for the root of node structures. The + * {@link StackTraceNode} class is used for representing method calls in the structure.

+ */ +public class StackNode implements Comparable { + + private static final int MAX_STACK_DEPTH = 300; + + /** + * The name of this node + */ + private final String name; + + /** + * A map of this nodes children + */ + private final Map children = new ConcurrentHashMap<>(); + + /** + * The accumulated sample time for this node + */ + private final LongAdder totalTime = new LongAdder(); + + public StackNode(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public Collection getChildren() { + if (this.children.isEmpty()) { + return Collections.emptyList(); + } + + List list = new ArrayList<>(this.children.values()); + list.sort(null); + return list; + } + + private StackNode resolveChild(String name) { + return this.children.computeIfAbsent(name, StackNode::new); + } + + private StackNode resolveChild(String className, String methodName) { + return this.children.computeIfAbsent(StackTraceNode.formName(className, methodName), name -> new StackTraceNode(className, methodName)); + } + + public long getTotalTime() { + return this.totalTime.longValue(); + } + + public void accumulateTime(long time) { + this.totalTime.add(time); + } + + private void log(StackTraceElement[] elements, int skip, long time) { + accumulateTime(time); + + if (skip >= MAX_STACK_DEPTH) { + return; + } + + if (elements.length - skip == 0) { + return; + } + + StackTraceElement bottom = elements[elements.length - (skip + 1)]; + resolveChild(bottom.getClassName(), bottom.getMethodName()).log(elements, skip + 1, time); + } + + public void log(StackTraceElement[] elements, long time) { + log(elements, 0, time); + } + + @Override + public int compareTo(StackNode o) { + return getName().compareTo(o.getName()); + } + + public void serializeTo(JsonWriter writer) throws IOException { + writer.beginObject(); + + // append metadata about this node + appendMetadata(writer); + + // include the total time recorded for this node + writer.name("totalTime").value(getTotalTime()); + + // append child nodes, if any are present + Collection childNodes = getChildren(); + if (!childNodes.isEmpty()) { + writer.name("children").beginArray(); + for (StackNode child : childNodes) { + child.serializeTo(writer); + } + writer.endArray(); + } + + writer.endObject(); + } + + protected void appendMetadata(JsonWriter writer) throws IOException { + writer.name("name").value(getName()); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java new file mode 100644 index 0000000..d46a547 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/StackTraceNode.java @@ -0,0 +1,71 @@ +/* + * WarmRoast + * Copyright (C) 2013 Albert Pham + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . +*/ + +package me.lucko.spark.profiler; + +import com.google.gson.stream.JsonWriter; + +import java.io.IOException; + +/** + * Represents a {@link StackNode node} for a method call. + */ +public class StackTraceNode extends StackNode { + + /** + * Forms the {@link StackNode#getName()} for a {@link StackTraceNode}. + * + * @param className the name of the class + * @param methodName the name of the method + * @return the name + */ + static String formName(String className, String methodName) { + return className + "." + methodName + "()"; + } + + /** The name of the class */ + private final String className; + /** The name of the method */ + private final String methodName; + + public StackTraceNode(String className, String methodName) { + super(formName(className, methodName)); + this.className = className; + this.methodName = methodName; + } + + public String getClassName() { + return this.className; + } + + public String getMethodName() { + return this.methodName; + } + + @Override + protected void appendMetadata(JsonWriter writer) throws IOException { + writer.name("className").value(this.className); + writer.name("methodName").value(this.methodName); + } + + @Override + public int compareTo(StackNode that) { + return Long.compare(that.getTotalTime(), this.getTotalTime()); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java new file mode 100644 index 0000000..68d7dc9 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/ThreadDumper.java @@ -0,0 +1,77 @@ +/* + * WarmRoast + * Copyright (C) 2013 Albert Pham + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.profiler; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being + * sampled. + */ +@FunctionalInterface +public interface ThreadDumper { + + /** + * Generates {@link ThreadInfo} data for the sampled threads. + * + * @param threadBean the thread bean instance to obtain the data from + * @return an array of generated thread info instances + */ + ThreadInfo[] dumpThreads(ThreadMXBean threadBean); + + /** + * Implementation of {@link ThreadDumper} that generates data for all threads. + */ + ThreadDumper ALL = new All(); + + final class All implements ThreadDumper { + @Override + public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { + return threadBean.dumpAllThreads(false, false); + } + } + + /** + * Implementation of {@link ThreadDumper} that generates data for a specific set of threads. + */ + final class Specific implements ThreadDumper { + private final long[] ids; + + public Specific(long[] ids) { + this.ids = ids; + } + + public Specific(Set names) { + Set threadNamesLower = names.stream().map(String::toLowerCase).collect(Collectors.toSet()); + this.ids = Thread.getAllStackTraces().keySet().stream() + .filter(t -> threadNamesLower.contains(t.getName().toLowerCase())) + .mapToLong(Thread::getId) + .toArray(); + } + + @Override + public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { + return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE); + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java new file mode 100644 index 0000000..56a6cc4 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/ThreadGrouper.java @@ -0,0 +1,52 @@ +package me.lucko.spark.profiler; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Function for grouping threads together + */ +@FunctionalInterface +public interface ThreadGrouper { + + /** + * Gets the group for the given thread. + * + * @param threadName the name of the thread + * @return the group + */ + String getGroup(String threadName); + + /** + * Implementation of {@link ThreadGrouper} that just groups by thread name. + */ + ThreadGrouper BY_NAME = new ByName(); + + final class ByName implements ThreadGrouper { + @Override + public String getGroup(String threadName) { + return threadName; + } + } + + /** + * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool + * the thread originated from. + */ + ThreadGrouper BY_POOL = new ByPool(); + + final class ByPool implements ThreadGrouper { + private static final Pattern THREAD_POOL_PATTERN = Pattern.compile("^(.*)[-#] ?\\d+$"); + + @Override + public String getGroup(String threadName) { + Matcher matcher = THREAD_POOL_PATTERN.matcher(threadName); + if (!matcher.matches()) { + return threadName; + } + + return matcher.group(1).trim() + " (Combined)"; + } + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/TickCounter.java b/spark-common/src/main/java/me/lucko/spark/profiler/TickCounter.java new file mode 100644 index 0000000..53a9c27 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/TickCounter.java @@ -0,0 +1,39 @@ +package me.lucko.spark.profiler; + +/** + * A hook with the game's "tick loop". + */ +public interface TickCounter { + + /** + * Starts the counter + */ + void start(); + + /** + * Stops the counter + */ + void close(); + + /** + * Gets the current tick number + * + * @return the current tick + */ + long getCurrentTick(); + + /** + * Adds a task to be called each time the tick increments + * + * @param runnable the task + */ + void addTickTask(Runnable runnable); + + /** + * Removes a tick task + * + * @param runnable the task + */ + void removeTickTask(Runnable runnable); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java new file mode 100644 index 0000000..abca4b3 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/profiler/TickedDataAggregator.java @@ -0,0 +1,147 @@ +package me.lucko.spark.profiler; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; + +/** + * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" + * which exceed a certain threshold in duration. + */ +public class TickedDataAggregator implements DataAggregator { + + /** A map of root stack nodes for each thread with sampling data */ + private final Map threadData = new ConcurrentHashMap<>(); + + /** The worker pool for inserting stack nodes */ + private final ExecutorService workerPool; + + /** Used to monitor the current "tick" of the server */ + private final TickCounter tickCounter; + + /** The instance used to group threads together */ + private final ThreadGrouper threadGrouper; + + /** The interval to wait between sampling, in milliseconds */ + private final int interval; + + /** Tick durations under this threshold will not be inserted */ + private final int tickLengthThreshold; + + /** The expected number of samples in each tick */ + private final int expectedSize; + + // state + private long currentTick = -1; + private TickList currentData = new TickList(0); + + public TickedDataAggregator(ExecutorService workerPool, TickCounter tickCounter, ThreadGrouper threadGrouper, int interval, int tickLengthThreshold) { + this.workerPool = workerPool; + this.tickCounter = tickCounter; + this.threadGrouper = threadGrouper; + this.interval = interval; + this.tickLengthThreshold = tickLengthThreshold; + // 50 millis in a tick, plus 10 so we have a bit of room to go over + this.expectedSize = (50 / interval) + 10; + } + + // this is effectively synchronized by the Timer instance in Sampler + @Override + public void insertData(String threadName, StackTraceElement[] stack) { + long tick = this.tickCounter.getCurrentTick(); + if (this.currentTick != tick) { + pushCurrentTick(); + this.currentTick = tick; + this.currentData = new TickList(this.expectedSize); + } + + // form the queued data + QueuedThreadInfo queuedData = new QueuedThreadInfo(threadName, stack); + // insert it + this.currentData.addData(queuedData); + } + + private void pushCurrentTick() { + TickList currentData = this.currentData; + + // approximate how long the tick lasted + int tickLengthMillis = currentData.getList().size() * this.interval; + + // don't push data below the threshold + if (tickLengthMillis < this.tickLengthThreshold) { + return; + } + + this.workerPool.submit(currentData); + } + + @Override + public void start() { + this.tickCounter.start(); + } + + @Override + public Map getData() { + // push the current tick + pushCurrentTick(); + + // close the tick counter + this.tickCounter.close(); + + // wait for all pending data to be inserted + this.workerPool.shutdown(); + try { + this.workerPool.awaitTermination(15, TimeUnit.SECONDS); + } catch (InterruptedException e) { + e.printStackTrace(); + } + + return this.threadData; + } + + void insertData(List dataList) { + for (QueuedThreadInfo data : dataList) { + try { + String group = this.threadGrouper.getGroup(data.threadName); + StackNode node = this.threadData.computeIfAbsent(group, StackNode::new); + node.log(data.stack, this.interval); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + private final class TickList implements Runnable { + private final List list; + + TickList(int expectedSize) { + this.list = new ArrayList<>(expectedSize); + } + + @Override + public void run() { + insertData(this.list); + } + + public List getList() { + return this.list; + } + + public void addData(QueuedThreadInfo data) { + this.list.add(data); + } + } + + private static final class QueuedThreadInfo { + private final String threadName; + private final StackTraceElement[] stack; + + QueuedThreadInfo(String threadName, StackTraceElement[] stack) { + this.threadName = threadName; + this.stack = stack; + } + } +} diff --git a/spark-sponge/build.gradle b/spark-sponge/build.gradle new file mode 100644 index 0000000..c2f0efc --- /dev/null +++ b/spark-sponge/build.gradle @@ -0,0 +1,14 @@ +plugins { + id 'ninja.miserable.blossom' version '1.0.1' +} + +dependencies { + compile project(':spark-common') + compileOnly 'org.spongepowered:spongeapi:8.0.0-SNAPSHOT' + annotationProcessor 'org.spongepowered:spongeapi:8.0.0-SNAPSHOT' +} + +blossom { + replaceTokenIn('src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java') + replaceToken '@version@', project.pluginVersion +} \ No newline at end of file diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java new file mode 100644 index 0000000..845d2c1 --- /dev/null +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java @@ -0,0 +1,145 @@ +package me.lucko.spark.sponge; + +import com.google.inject.Inject; + +import me.lucko.spark.common.CommandHandler; +import me.lucko.spark.profiler.ThreadDumper; +import me.lucko.spark.profiler.TickCounter; + +import org.spongepowered.api.Game; +import org.spongepowered.api.Sponge; +import org.spongepowered.api.command.CommandCallable; +import org.spongepowered.api.command.CommandResult; +import org.spongepowered.api.command.CommandSource; +import org.spongepowered.api.entity.living.player.Player; +import org.spongepowered.api.event.Listener; +import org.spongepowered.api.event.game.state.GameStartedServerEvent; +import org.spongepowered.api.plugin.Plugin; +import org.spongepowered.api.scheduler.AsynchronousExecutor; +import org.spongepowered.api.scheduler.SpongeExecutorService; +import org.spongepowered.api.text.Text; +import org.spongepowered.api.text.action.TextActions; +import org.spongepowered.api.text.format.TextColors; +import org.spongepowered.api.text.serializer.TextSerializers; +import org.spongepowered.api.world.Location; +import org.spongepowered.api.world.World; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import javax.annotation.Nullable; + +@Plugin( + id = "spark", + name = "spark", + version = "@version@", + description = "Spark is a CPU profiling plugin based on sk89q's WarmRoast profiler", + authors = {"Luck", "sk89q"} +) +public class SparkSpongePlugin implements CommandCallable { + + private final CommandHandler commandHandler = new CommandHandler() { + private Text colorize(String message) { + return TextSerializers.FORMATTING_CODE.deserialize(message); + } + + private void broadcast(Text msg) { + Sponge.getServer().getConsole().sendMessage(msg); + for (Player player : Sponge.getServer().getOnlinePlayers()) { + if (player.hasPermission("spark.profiler")) { + player.sendMessage(msg); + } + } + } + + @Override + protected void sendMessage(CommandSource sender, String message) { + sender.sendMessage(colorize(message)); + } + + @Override + protected void sendMessage(String message) { + Text msg = colorize(message); + broadcast(msg); + } + + @Override + protected void sendLink(String url) { + try { + Text msg = Text.builder(url) + .color(TextColors.GRAY) + .onClick(TextActions.openUrl(new URL(url))) + .build(); + broadcast(msg); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + } + + @Override + protected void runAsync(Runnable r) { + asyncExecutor.execute(r); + } + + @Override + protected ThreadDumper getDefaultThreadDumper() { + return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); + } + + @Override + protected TickCounter newTickCounter() { + return new SpongeTickCounter(SparkSpongePlugin.this); + } + }; + + @Inject + @AsynchronousExecutor + private SpongeExecutorService asyncExecutor; + + @Inject + private Game game; + + @Listener + public void onServerStart(GameStartedServerEvent event) { + game.getCommandManager().register(this, this, "spark", "profiler"); + } + + @Override + public CommandResult process(CommandSource source, String arguments) { + if (!testPermission(source)) { + source.sendMessage(Text.builder("You do not have permission to use this command.").color(TextColors.RED).build()); + return CommandResult.empty(); + } + + commandHandler.handleCommand(source, arguments.split(" ")); + return CommandResult.empty(); + } + + @Override + public List getSuggestions(CommandSource source, String arguments, @Nullable Location targetPosition) { + return Collections.emptyList(); + } + + @Override + public boolean testPermission(CommandSource source) { + return source.hasPermission("spark.profiler"); + } + + @Override + public Optional getShortDescription(CommandSource source) { + return Optional.of(Text.of("Main spark plugin command")); + } + + @Override + public Optional getHelp(CommandSource source) { + return Optional.of(Text.of("Run '/profiler' to view usage.")); + } + + @Override + public Text getUsage(CommandSource source) { + return Text.of("Run '/profiler' to view usage."); + } +} diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java new file mode 100644 index 0000000..bda2a69 --- /dev/null +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java @@ -0,0 +1,54 @@ +package me.lucko.spark.sponge; + +import me.lucko.spark.profiler.TickCounter; + +import org.spongepowered.api.scheduler.Task; + +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.atomic.LongAdder; + +public class SpongeTickCounter implements TickCounter, Runnable { + private final SparkSpongePlugin plugin; + private Task task; + + private final Set tasks = new HashSet<>(); + private final LongAdder tick = new LongAdder(); + + public SpongeTickCounter(SparkSpongePlugin plugin) { + this.plugin = plugin; + } + + @Override + public void run() { + this.tick.increment(); + for (Runnable r : this.tasks){ + r.run(); + } + } + + @Override + public void start() { + this.task = Task.builder().intervalTicks(1).name("spark-ticker").execute(this).submit(this.plugin); + } + + @Override + public void close() { + this.task.cancel(); + } + + @Override + public long getCurrentTick() { + return this.tick.longValue(); + } + + @Override + public void addTickTask(Runnable runnable) { + this.tasks.add(runnable); + } + + @Override + public void removeTickTask(Runnable runnable) { + this.tasks.remove(runnable); + } +} diff --git a/spark-universal/build.gradle b/spark-universal/build.gradle new file mode 100644 index 0000000..d0583ac --- /dev/null +++ b/spark-universal/build.gradle @@ -0,0 +1,29 @@ +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' + } +} + +apply plugin: 'com.github.johnrengelman.shadow' + +dependencies { + compile project(':spark-common') + compile project(':spark-bukkit') + compile project(':spark-bungeecord') + compile project(':spark-sponge') +} + +shadowJar { + archiveName = 'spark.jar' + + relocate 'okio', 'me.lucko.spark.lib.okio' + relocate 'okhttp3', 'me.lucko.spark.lib.okhttp3' +} + +artifacts { + archives shadowJar + shadow shadowJar +} \ No newline at end of file diff --git a/sponge/pom.xml b/sponge/pom.xml deleted file mode 100644 index d926108..0000000 --- a/sponge/pom.xml +++ /dev/null @@ -1,75 +0,0 @@ - - - - spark-parent - me.lucko - 1.0-SNAPSHOT - - 4.0.0 - - spark-sponge - jar - - - clean package - spark-sponge - - - src/main/resources - true - - - - - org.apache.maven.plugins - maven-compiler-plugin - ${compiler.version} - - 1.8 - 1.8 - - - - org.codehaus.mojo - templating-maven-plugin - 1.0.0 - - - filter-src - - filter-sources - - - - - - - - - - - me.lucko - spark-common - ${project.version} - provided - - - - - org.spongepowered - spongeapi - 8.0.0-SNAPSHOT - provided - - - - - - sponge-repo - https://repo.spongepowered.org/maven - - - - diff --git a/sponge/src/main/java-templates/me/lucko/spark/sponge/utils/PomData.java b/sponge/src/main/java-templates/me/lucko/spark/sponge/utils/PomData.java deleted file mode 100644 index d81e8f5..0000000 --- a/sponge/src/main/java-templates/me/lucko/spark/sponge/utils/PomData.java +++ /dev/null @@ -1,8 +0,0 @@ -package me.lucko.spark.sponge.utils; - -public class PomData { - - public static final String DESCRIPTION = "${project.description}"; - public static final String VERSION = "${project.version}"; - -} diff --git a/sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java b/sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java deleted file mode 100644 index c6e1e18..0000000 --- a/sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java +++ /dev/null @@ -1,146 +0,0 @@ -package me.lucko.spark.sponge; - -import com.google.inject.Inject; - -import me.lucko.spark.common.CommandHandler; -import me.lucko.spark.profiler.ThreadDumper; -import me.lucko.spark.profiler.TickCounter; -import me.lucko.spark.sponge.utils.PomData; - -import org.spongepowered.api.Game; -import org.spongepowered.api.Sponge; -import org.spongepowered.api.command.CommandCallable; -import org.spongepowered.api.command.CommandResult; -import org.spongepowered.api.command.CommandSource; -import org.spongepowered.api.entity.living.player.Player; -import org.spongepowered.api.event.Listener; -import org.spongepowered.api.event.game.state.GameStartedServerEvent; -import org.spongepowered.api.plugin.Plugin; -import org.spongepowered.api.scheduler.AsynchronousExecutor; -import org.spongepowered.api.scheduler.SpongeExecutorService; -import org.spongepowered.api.text.Text; -import org.spongepowered.api.text.action.TextActions; -import org.spongepowered.api.text.format.TextColors; -import org.spongepowered.api.text.serializer.TextSerializers; -import org.spongepowered.api.world.Location; -import org.spongepowered.api.world.World; - -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Collections; -import java.util.List; -import java.util.Optional; - -import javax.annotation.Nullable; - -@Plugin( - id = "spark", - name = "spark", - version = "1.0.4", - description = PomData.DESCRIPTION, - authors = {"Luck", "sk89q"} -) -public class SparkSpongePlugin implements CommandCallable { - - private final CommandHandler commandHandler = new CommandHandler() { - private Text colorize(String message) { - return TextSerializers.FORMATTING_CODE.deserialize(message); - } - - private void broadcast(Text msg) { - Sponge.getServer().getConsole().sendMessage(msg); - for (Player player : Sponge.getServer().getOnlinePlayers()) { - if (player.hasPermission("spark.profiler")) { - player.sendMessage(msg); - } - } - } - - @Override - protected void sendMessage(CommandSource sender, String message) { - sender.sendMessage(colorize(message)); - } - - @Override - protected void sendMessage(String message) { - Text msg = colorize(message); - broadcast(msg); - } - - @Override - protected void sendLink(String url) { - try { - Text msg = Text.builder(url) - .color(TextColors.GRAY) - .onClick(TextActions.openUrl(new URL(url))) - .build(); - broadcast(msg); - } catch (MalformedURLException e) { - e.printStackTrace(); - } - } - - @Override - protected void runAsync(Runnable r) { - asyncExecutor.execute(r); - } - - @Override - protected ThreadDumper getDefaultThreadDumper() { - return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()}); - } - - @Override - protected TickCounter newTickCounter() { - return new SpongeTickCounter(SparkSpongePlugin.this); - } - }; - - @Inject - @AsynchronousExecutor - private SpongeExecutorService asyncExecutor; - - @Inject - private Game game; - - @Listener - public void onServerStart(GameStartedServerEvent event) { - game.getCommandManager().register(this, this, "spark", "profiler"); - } - - @Override - public CommandResult process(CommandSource source, String arguments) { - if (!testPermission(source)) { - source.sendMessage(Text.builder("You do not have permission to use this command.").color(TextColors.RED).build()); - return CommandResult.empty(); - } - - commandHandler.handleCommand(source, arguments.split(" ")); - return CommandResult.empty(); - } - - @Override - public List getSuggestions(CommandSource source, String arguments, @Nullable Location targetPosition) { - return Collections.emptyList(); - } - - @Override - public boolean testPermission(CommandSource source) { - return source.hasPermission("spark.profiler"); - } - - @Override - public Optional getShortDescription(CommandSource source) { - return Optional.of(Text.of("Main spark plugin command")); - } - - @Override - public Optional getHelp(CommandSource source) { - return Optional.of(Text.of("Run '/profiler' to view usage.")); - } - - @Override - public Text getUsage(CommandSource source) { - return Text.of("Run '/profiler' to view usage."); - } -} diff --git a/sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java b/sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java deleted file mode 100644 index bda2a69..0000000 --- a/sponge/src/main/java/me/lucko/spark/sponge/SpongeTickCounter.java +++ /dev/null @@ -1,54 +0,0 @@ -package me.lucko.spark.sponge; - -import me.lucko.spark.profiler.TickCounter; - -import org.spongepowered.api.scheduler.Task; - -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.atomic.LongAdder; - -public class SpongeTickCounter implements TickCounter, Runnable { - private final SparkSpongePlugin plugin; - private Task task; - - private final Set tasks = new HashSet<>(); - private final LongAdder tick = new LongAdder(); - - public SpongeTickCounter(SparkSpongePlugin plugin) { - this.plugin = plugin; - } - - @Override - public void run() { - this.tick.increment(); - for (Runnable r : this.tasks){ - r.run(); - } - } - - @Override - public void start() { - this.task = Task.builder().intervalTicks(1).name("spark-ticker").execute(this).submit(this.plugin); - } - - @Override - public void close() { - this.task.cancel(); - } - - @Override - public long getCurrentTick() { - return this.tick.longValue(); - } - - @Override - public void addTickTask(Runnable runnable) { - this.tasks.add(runnable); - } - - @Override - public void removeTickTask(Runnable runnable) { - this.tasks.remove(runnable); - } -} diff --git a/universal/pom.xml b/universal/pom.xml deleted file mode 100644 index 507df7b..0000000 --- a/universal/pom.xml +++ /dev/null @@ -1,102 +0,0 @@ - - - - spark-parent - me.lucko - 1.0-SNAPSHOT - - 4.0.0 - - spark-universal - jar - - - clean package - spark - - - org.apache.maven.plugins - maven-shade-plugin - ${shade.version} - - - package - - shade - - - false - false - - - - okio - me.lucko.spark.lib.okio - - - okhttp3 - me.lucko.spark.lib.okhttp3 - - - - - - - - - - - - sign - - - - org.apache.maven.plugins - maven-gpg-plugin - 1.6 - - - sign-artifacts - verify - - sign - - - - - - - - - - - - me.lucko - spark-common - ${project.version} - compile - - - - me.lucko - spark-bukkit - 1.0-SNAPSHOT - compile - - - me.lucko - spark-bungeecord - 1.0-SNAPSHOT - compile - - - me.lucko - spark-sponge - 1.0-SNAPSHOT - compile - - - - -- cgit