aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java
diff options
context:
space:
mode:
Diffstat (limited to 'spark-common/src/main/java')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java13
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java38
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java148
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java38
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java20
5 files changed, 141 insertions, 116 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index a5dadba..c9aa030 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -53,6 +53,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import static net.kyori.adventure.text.Component.space;
@@ -80,6 +81,7 @@ public class SparkPlatform {
private final SparkPlugin plugin;
private final List<CommandModule> commandModules;
private final List<Command> commands;
+ private final ReentrantLock commandExecuteLock = new ReentrantLock(true);
private final ActivityLog activityLog;
private final TickHook tickHook;
private final TickReporter tickReporter;
@@ -186,6 +188,17 @@ public class SparkPlatform {
}
public void executeCommand(CommandSender sender, String[] args) {
+ this.plugin.executeAsync(() -> {
+ this.commandExecuteLock.lock();
+ try {
+ executeCommand0(sender, args);
+ } finally {
+ this.commandExecuteLock.unlock();
+ }
+ });
+ }
+
+ private void executeCommand0(CommandSender sender, String[] args) {
CommandResponseHandler resp = new CommandResponseHandler(this, sender);
List<Command> commands = getAvailableCommands(sender);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index c8f25c7..547131c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -133,32 +133,30 @@ public class HealthModule implements CommandModule {
private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
resp.replyPrefixed(text("Generating server health report..."));
- platform.getPlugin().executeAsync(() -> {
- List<Component> report = new LinkedList<>();
- report.add(empty());
+ List<Component> report = new LinkedList<>();
+ report.add(empty());
- TickStatistics tickStatistics = platform.getTickStatistics();
- if (tickStatistics != null) {
- addTickStats(report, tickStatistics);
- }
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics != null) {
+ addTickStats(report, tickStatistics);
+ }
- addCpuStats(report);
+ addCpuStats(report);
- MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
- addBasicMemoryStats(report, memoryMXBean);
+ MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+ addBasicMemoryStats(report, memoryMXBean);
- if (arguments.boolFlag("memory")) {
- addDetailedMemoryStats(report, memoryMXBean);
- }
+ if (arguments.boolFlag("memory")) {
+ addDetailedMemoryStats(report, memoryMXBean);
+ }
- try {
- addDiskStats(report);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ try {
+ addDiskStats(report);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
- report.forEach(resp::reply);
- });
+ report.forEach(resp::reply);
}
private static void addTickStats(List<Component> report, TickStatistics tickStatistics) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
index 94e44a6..b0a3c31 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java
@@ -83,98 +83,94 @@ public class HeapAnalysisModule implements CommandModule {
}
private static void heapSummary(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- platform.getPlugin().executeAsync(() -> {
- if (arguments.boolFlag("run-gc-before")) {
- resp.broadcastPrefixed(text("Running garbage collector..."));
- System.gc();
- }
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed(text("Running garbage collector..."));
+ System.gc();
+ }
- resp.broadcastPrefixed(text("Creating a new heap dump summary, please wait..."));
+ resp.broadcastPrefixed(text("Creating a new heap dump summary, please wait..."));
- HeapDumpSummary heapDump;
- try {
- heapDump = HeapDumpSummary.createNew();
- } catch (Exception e) {
- resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED));
- e.printStackTrace();
- return;
- }
+ HeapDumpSummary heapDump;
+ try {
+ heapDump = HeapDumpSummary.createNew();
+ } catch (Exception e) {
+ resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED));
+ e.printStackTrace();
+ return;
+ }
- byte[] output = heapDump.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), sender);
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key();
- String url = SparkPlatform.VIEWER_URL + key;
-
- resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD));
- resp.broadcast(text()
- .content(url)
- .color(GRAY)
- .clickEvent(ClickEvent.openUrl(url))
- .build()
- );
-
- platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
- } catch (IOException e) {
- resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED));
- e.printStackTrace();
- }
- });
+ byte[] output = heapDump.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), sender);
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_HEAP_MEDIA_TYPE).key();
+ String url = SparkPlatform.VIEWER_URL + key;
+
+ resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD));
+ resp.broadcast(text()
+ .content(url)
+ .color(GRAY)
+ .clickEvent(ClickEvent.openUrl(url))
+ .build()
+ );
+
+ platform.getActivityLog().addToLog(Activity.urlActivity(sender, System.currentTimeMillis(), "Heap dump summary", url));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED));
+ e.printStackTrace();
+ }
}
private static void heapDump(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) {
- platform.getPlugin().executeAsync(() -> {
- Path pluginFolder = platform.getPlugin().getPluginDirectory();
- try {
- Files.createDirectories(pluginFolder);
- } catch (IOException e) {
- // ignore
- }
+ Path pluginFolder = platform.getPlugin().getPluginDirectory();
+ try {
+ Files.createDirectories(pluginFolder);
+ } catch (IOException e) {
+ // ignore
+ }
- Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
- boolean liveOnly = !arguments.boolFlag("include-non-live");
+ Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + (HeapDump.isOpenJ9() ? ".phd" : ".hprof"));
+ boolean liveOnly = !arguments.boolFlag("include-non-live");
- if (arguments.boolFlag("run-gc-before")) {
- resp.broadcastPrefixed(text("Running garbage collector..."));
- System.gc();
- }
+ if (arguments.boolFlag("run-gc-before")) {
+ resp.broadcastPrefixed(text("Running garbage collector..."));
+ System.gc();
+ }
- resp.broadcastPrefixed(text("Creating a new heap dump, please wait..."));
+ resp.broadcastPrefixed(text("Creating a new heap dump, please wait..."));
- try {
- HeapDump.dumpHeap(file, liveOnly);
- } catch (Exception e) {
- resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED));
- e.printStackTrace();
- return;
- }
+ try {
+ HeapDump.dumpHeap(file, liveOnly);
+ } catch (Exception e) {
+ resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED));
+ e.printStackTrace();
+ return;
+ }
- resp.broadcastPrefixed(text()
- .content("Heap dump written to: ")
- .color(GOLD)
- .append(text(file.toString(), GRAY))
- .build()
- );
- platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
+ resp.broadcastPrefixed(text()
+ .content("Heap dump written to: ")
+ .color(GOLD)
+ .append(text(file.toString(), GRAY))
+ .build()
+ );
+ platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));
- CompressionMethod compressionMethod = null;
- Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
- if (compressArgs.hasNext()) {
- try {
- compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
- } catch (IllegalArgumentException e) {
- // ignore
- }
+ CompressionMethod compressionMethod = null;
+ Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
+ if (compressArgs.hasNext()) {
+ try {
+ compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
+ } catch (IllegalArgumentException e) {
+ // ignore
}
+ }
- if (compressionMethod != null) {
- try {
- heapDumpCompress(platform, resp, file, compressionMethod);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ if (compressionMethod != null) {
+ try {
+ heapDumpCompress(platform, resp, file, compressionMethod);
+ } catch (IOException e) {
+ e.printStackTrace();
}
- });
+ }
}
private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index ff577d5..856a182 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -299,25 +299,23 @@ public class SamplerModule implements CommandModule {
}
private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) {
- platform.getPlugin().executeAsync(() -> {
- byte[] output = sampler.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode);
- try {
- String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
- String url = SparkPlatform.VIEWER_URL + key;
-
- resp.broadcastPrefixed(text("Profiler results:", GOLD));
- resp.broadcast(text()
- .content(url)
- .color(GRAY)
- .clickEvent(ClickEvent.openUrl(url))
- .build()
- );
-
- platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
- } catch (IOException e) {
- resp.broadcastPrefixed(text("An error occurred whilst uploading the results.", RED));
- e.printStackTrace();
- }
- });
+ byte[] output = sampler.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode);
+ try {
+ String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
+ String url = SparkPlatform.VIEWER_URL + key;
+
+ resp.broadcastPrefixed(text("Profiler results:", GOLD));
+ resp.broadcast(text()
+ .content(url)
+ .color(GRAY)
+ .clickEvent(ClickEvent.openUrl(url))
+ .build()
+ );
+
+ platform.getActivityLog().addToLog(Activity.urlActivity(resp.sender(), System.currentTimeMillis(), "Profiler", url));
+ } catch (IOException e) {
+ resp.broadcastPrefixed(text("An error occurred whilst uploading the results.", RED));
+ e.printStackTrace();
+ }
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
index 4863482..e99114a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java
@@ -31,6 +31,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
+import java.util.function.Supplier;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.util.stream.Collectors;
@@ -72,6 +73,25 @@ public interface ThreadDumper {
};
/**
+ * Utility to cache the creation of a {@link ThreadDumper} targeting
+ * the game (server/client) thread.
+ */
+ final class GameThread implements Supplier<ThreadDumper> {
+ private Specific dumper = null;
+
+ @Override
+ public ThreadDumper get() {
+ return Objects.requireNonNull(this.dumper, "dumper");
+ }
+
+ public void ensureSetup() {
+ if (this.dumper == null) {
+ this.dumper = new Specific(new long[]{Thread.currentThread().getId()});
+ }
+ }
+ }
+
+ /**
* Implementation of {@link ThreadDumper} that generates data for a specific set of threads.
*/
final class Specific implements ThreadDumper {