aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java13
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java38
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java148
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java38
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java20
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java1
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java1
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java3
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java1
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java1
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java3
-rw-r--r--spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java4
-rw-r--r--spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java4
14 files changed, 158 insertions, 121 deletions
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
index 53a9a7d..77fa64c 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java
@@ -46,6 +46,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
private SparkPlatform platform;
private CommandExecutor tpsCommand = null;
+ private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Override
public void onEnable() {
@@ -94,6 +95,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
+ this.threadDumper.ensureSetup();
this.platform.executeCommand(new BukkitCommandSender(sender, this.audienceFactory), args);
return true;
}
@@ -133,7 +135,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
+ return this.threadDumper.get();
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index a5dadba..c9aa030 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -53,6 +53,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
@@ -80,6 +81,7 @@ public class SparkPlatform {
private final SparkPlugin plugin;
private final List<CommandModule> commandModules;
private final List<Command> commands;
+ private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
private final ActivityLog activityLog;
private final TickHook tickHook;
private final TickReporter tickReporter;
@@ -186,6 +188,17 @@ public class SparkPlatform {
}
public void executeCommand(CommandSender sender, String[] args) {
+ this.plugin.executeAsync(() -> {
+ this.commandExecuteLock.lock();
+ try {
+ executeCommand0(sender, args);
+ } finally {
+ this.commandExecuteLock.unlock();
+ }
+ });
+ }
+
+ private void executeCommand0(CommandSender sender, String[] args) {
CommandResponseHandler resp = new CommandResponseHandler(this, sender);
List<Command> commands = getAvailableCommands(sender);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index c8f25c7..547131c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -133,32 +133,30 @@ public class HealthModule implements CommandModule {
private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
resp.replyPrefixed(text("Generating server health report..."));
- platform.getPlugin().executeAsync(() -> {
- List<Component> report = new LinkedList<>();
- report.add(empty());
+ List<Component> report = new LinkedList<>();
+ report.add(empty());
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics != null) {
- addTickStats(report, tickStatistics);
- }
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics != null) {
+ addTickStats(report, tickStatistics);
+ }
- addCpuStats(report);
+ addCpuStats(report);
- MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
- addBasicMemoryStats(report, memoryMXBean);
+ MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+ addBasicMemoryStats(report, memoryMXBean);
- if (arguments.boolFlag("memory")) {
- addDetailedMemoryStats(report, memoryMXBean);
- }
+ if (arguments.boolFlag("memory")) {
+ addDetailedMemoryStats(report, memoryMXBean);
+ }
- try {
- addDiskStats(report);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ try {
+ addDiskStats(report);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
- report.forEach(resp::reply);
- });
+ report.forEach(resp::reply);
}
private static void addTickStats(List<Component> report, TickStatistics tickStatistics) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 94e44a6..b0a3c31 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -83,98 +83,94 @@ public class HeapAnalysisModule implements CommandModule {
}
private static void heapSummary(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- platform.getPlugin().executeAsync(() -> {
- if (arguments.boolFlag("run-gc-before")) {
- resp.broadcastPrefixed(text("Running garbage collector..."));
- System.gc();
- }
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed(text("Running garbage collector..."));
+ System.gc();
+ }
- resp.broadcastPrefixed(text("Creating a new heap dump summary, please wait..."));
+ resp.broadcastPrefixed(text("Creating a new heap dump summary, please wait..."));
- HeapDumpSummary heapDump;
- try {
- heapDump = HeapDumpSummary.createNew();
- } catch (Exception e) {
- resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED));
- e.printStackTrace();
- return;
- }
+ HeapDumpSummary heapDump;
+ try {
+ heapDump = HeapDumpSummary.createNew();
+ } catch (Exception e) {
+ resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED));
+ e.printStackTrace();
+ return;
+ }
- byte[] output = heapDump.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), sender);
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key();
- String url = SparkPlatform.VIEWER_URL + key;
-
- resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD));
- resp.broadcast(text()
- .content(url)
- .color(GRAY)
- .clickEvent(ClickEvent.openUrl(url))
- .build()
- );
-
- platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
- } catch (IOException e) {
- resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED));
- e.printStackTrace();
- }
- });
+ byte[] output = heapDump.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), sender);
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key();
+ String url = SparkPlatform.VIEWER_URL + key;
+
+ resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD));
+ resp.broadcast(text()
+ .content(url)
+ .color(GRAY)
+ .clickEvent(ClickEvent.openUrl(url))
+ .build()
+ );
+
+ platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED));
+ e.printStackTrace();
+ }
}
private static void heapDump(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- platform.getPlugin().executeAsync(() -> {
- Path pluginFolder = platform.getPlugin().getPluginDirectory();
- try {
- Files.createDirectories(pluginFolder);
- } catch (IOException e) {
- // ignore
- }
+ Path pluginFolder = platform.getPlugin().getPluginDirectory();
+ try {
+ Files.createDirectories(pluginFolder);
+ } catch (IOException e) {
+ // ignore
+ }
- Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
- boolean liveOnly = !arguments.boolFlag("include-non-live");
+ Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
+ boolean liveOnly = !arguments.boolFlag("include-non-live");
- if (arguments.boolFlag("run-gc-before")) {
- resp.broadcastPrefixed(text("Running garbage collector..."));
- System.gc();
- }
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed(text("Running garbage collector..."));
+ System.gc();
+ }
- resp.broadcastPrefixed(text("Creating a new heap dump, please wait..."));
+ resp.broadcastPrefixed(text("Creating a new heap dump, please wait..."));
- try {
- HeapDump.dumpHeap(file, liveOnly);
- } catch (Exception e) {
- resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED));
- e.printStackTrace();
- return;
- }
+ try {
+ HeapDump.dumpHeap(file, liveOnly);
+ } catch (Exception e) {
+ resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED));
+ e.printStackTrace();
+ return;
+ }
- resp.broadcastPrefixed(text()
- .content("Heap dump written to: ")
- .color(GOLD)
- .append(text(file.toString(), GRAY))
- .build()
- );
- platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
+ resp.broadcastPrefixed(text()
+ .content("Heap dump written to: ")
+ .color(GOLD)
+ .append(text(file.toString(), GRAY))
+ .build()
+ );
+ platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
- CompressionMethod compressionMethod = null;
- Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
- if (compressArgs.hasNext()) {
- try {
- compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
- } catch (IllegalArgumentException e) {
- // ignore
- }
+ CompressionMethod compressionMethod = null;
+ Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
+ if (compressArgs.hasNext()) {
+ try {
+ compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
+ } catch (IllegalArgumentException e) {
+ // ignore
}
+ }
- if (compressionMethod != null) {
- try {
- heapDumpCompress(platform, resp, file, compressionMethod);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ if (compressionMethod != null) {
+ try {
+ heapDumpCompress(platform, resp, file, compressionMethod);
+ } catch (IOException e) {
+ e.printStackTrace();
}
- });
+ }
}
private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index ff577d5..856a182 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -299,25 +299,23 @@ public class SamplerModule implements CommandModule {
}
private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) {
- platform.getPlugin().executeAsync(() -> {
- byte[] output = sampler.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode);
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
- String url = SparkPlatform.VIEWER_URL + key;
-
- resp.broadcastPrefixed(text("Profiler results:", GOLD));
- resp.broadcast(text()
- .content(url)
- .color(GRAY)
- .clickEvent(ClickEvent.openUrl(url))
- .build()
- );
-
- platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
- } catch (IOException e) {
- resp.broadcastPrefixed(text("An error occurred whilst uploading the results.", RED));
- e.printStackTrace();
- }
- });
+ byte[] output = sampler.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode);
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
+ String url = SparkPlatform.VIEWER_URL + key;
+
+ resp.broadcastPrefixed(text("Profiler results:", GOLD));
+ resp.broadcast(text()
+ .content(url)
+ .color(GRAY)
+ .clickEvent(ClickEvent.openUrl(url))
+ .build()
+ );
+
+ platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst uploading the results.", RED));
+ e.printStackTrace();
+ }
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 4863482..e99114a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -31,6 +31,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
+import java.util.function.Supplier;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.util.stream.Collectors;
@@ -72,6 +73,25 @@ public interface ThreadDumper {
};
/**
+ * Utility to cache the creation of a {@link ThreadDumper} targeting
+ * the game (server/client) thread.
+ */
+ final class GameThread implements Supplier<ThreadDumper> {
+ private Specific dumper = null;
+
+ @Override
+ public ThreadDumper get() {
+ return Objects.requireNonNull(this.dumper, "dumper");
+ }
+
+ public void ensureSetup() {
+ if (this.dumper == null) {
+ this.dumper = new Specific(new long[]{Thread.currentThread().getId()});
+ }
+ }
+ }
+
+ /**
* Implementation of {@link ThreadDumper} that generates data for a specific set of threads.
*/
final class Specific implements ThreadDumper {
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
index eec7b73..c7efb4e 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java
@@ -105,6 +105,7 @@ public class FabricClientSparkPlugin extends FabricSparkPlugin implements Sugges
return false;
}
+ this.threadDumper.ensureSetup();
this.platform.executeCommand(new FabricCommandSender(this.minecraft.player, this), args);
this.minecraft.inGameHud.getChatHud().addToMessageHistory(chat);
return true;
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
index b8f94f5..7e88a24 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java
@@ -81,6 +81,7 @@ public class FabricServerSparkPlugin extends FabricSparkPlugin implements Comman
return 0;
}
+ this.threadDumper.ensureSetup();
CommandOutput source = context.getSource().getEntity() != null ? context.getSource().getEntity() : context.getSource().getMinecraftServer();
this.platform.executeCommand(new FabricCommandSender(source, this), args);
return Command.SINGLE_SUCCESS;
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
index 3454365..59610b1 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java
@@ -44,6 +44,7 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
private final FabricSparkMod mod;
protected final ScheduledExecutorService scheduler;
protected final SparkPlatform platform;
+ protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected FabricSparkPlugin(FabricSparkMod mod) {
this.mod = mod;
@@ -85,7 +86,7 @@ public abstract class FabricSparkPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
+ return this.threadDumper.get();
}
protected static <T> void registerCommands(CommandDispatcher<T> dispatcher, Command<T> executor, SuggestionProvider<T> suggestor, String... aliases) {
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
index 681035b..a97a0bf 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java
@@ -104,6 +104,7 @@ public class ForgeClientSparkPlugin extends ForgeSparkPlugin implements Suggesti
return;
}
+ this.threadDumper.ensureSetup();
this.platform.executeCommand(new ForgeCommandSender(this.minecraft.player, this), args);
this.minecraft.ingameGUI.getChatGUI().addToSentMessages(event.getMessage());
event.setCanceled(true);
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
index 25c34ec..f6ea346 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java
@@ -90,6 +90,7 @@ public class ForgeServerSparkPlugin extends ForgeSparkPlugin implements Command<
return 0;
}
+ this.threadDumper.ensureSetup();
this.platform.executeCommand(new ForgeCommandSender(context.getSource().source, this), args);
return Command.SINGLE_SUCCESS;
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
index c739102..35e6d7b 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
@@ -63,6 +63,7 @@ public abstract class ForgeSparkPlugin implements SparkPlugin {
private final ForgeSparkMod mod;
protected final ScheduledExecutorService scheduler;
protected final SparkPlatform platform;
+ protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected ForgeSparkPlugin(ForgeSparkMod mod) {
this.mod = mod;
@@ -104,7 +105,7 @@ public abstract class ForgeSparkPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
+ return this.threadDumper.get();
}
}
diff --git a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
index 0b02469..ed32638 100644
--- a/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
+++ b/spark-forge1122/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java
@@ -44,6 +44,7 @@ public abstract class ForgeSparkPlugin implements SparkPlugin, ICommand {
private final ForgeSparkMod mod;
protected final ScheduledExecutorService scheduler;
protected final SparkPlatform platform;
+ protected final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
protected ForgeSparkPlugin(ForgeSparkMod mod) {
this.mod = mod;
@@ -85,7 +86,7 @@ public abstract class ForgeSparkPlugin implements SparkPlugin, ICommand {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
+ return this.threadDumper.get();
}
// implement ICommand
@@ -107,6 +108,7 @@ public abstract class ForgeSparkPlugin implements SparkPlugin, ICommand {
@Override
public void execute(MinecraftServer server, ICommandSender sender, String[] args) {
+ this.threadDumper.ensureSetup();
this.platform.executeCommand(new ForgeCommandSender(sender, this), args);
}
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java
index 9437207..dbd0193 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java
+++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java
@@ -74,6 +74,7 @@ public class SpongeSparkPlugin implements SparkPlugin {
private SpongeAudiences audienceFactory;
private SparkPlatform platform;
+ private final ThreadDumper.GameThread threadDumper = new ThreadDumper.GameThread();
@Inject
public SpongeSparkPlugin(PluginContainer pluginContainer, Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
@@ -126,7 +127,7 @@ public class SpongeSparkPlugin implements SparkPlugin {
@Override
public ThreadDumper getDefaultThreadDumper() {
- return new ThreadDumper.Specific(new long[]{Thread.currentThread().getId()});
+ return this.threadDumper.get();
}
@Override
@@ -148,6 +149,7 @@ public class SpongeSparkPlugin implements SparkPlugin {
@Override
public CommandResult process(CommandSource source, String arguments) {
+ this.plugin.threadDumper.ensureSetup();
this.plugin.platform.executeCommand(new SpongeCommandSender(source, this.plugin.audienceFactory), arguments.split(" "));
return CommandResult.empty();
}