aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.gradle2
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java8
-rw-r--r--spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java (renamed from spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java)60
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java (renamed from spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java)2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java145
-rw-r--r--spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java173
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java8
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java4
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java12
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java20
-rw-r--r--spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java26
-rw-r--r--spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java12
14 files changed, 334 insertions, 156 deletions
diff --git a/build.gradle b/build.gradle
index 45c54a8..f262c3f 100644
--- a/build.gradle
+++ b/build.gradle
@@ -8,7 +8,7 @@ subprojects {
apply plugin: 'maven'
ext {
- pluginVersion = '1.1.0'
+ pluginVersion = '1.2.0'
pluginDescription = 'spark is a performance profiling plugin based on sk89q\'s WarmRoast profiler'
}
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
index aebf9a7..f1395c4 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/SparkBukkitPlugin.java
@@ -30,6 +30,7 @@ import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
+import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
@@ -52,7 +53,12 @@ public class SparkBukkitPlugin extends JavaPlugin {
@Override
public String getVersion() {
- return SparkBukkitPlugin.this.getDescription().getVersion();
+ return getDescription().getVersion();
+ }
+
+ @Override
+ public Path getPluginFolder() {
+ return getDataFolder().toPath();
}
@Override
diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java
index da8ebf9..6d23683 100644
--- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java
+++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/SparkBungeeCordPlugin.java
@@ -34,6 +34,7 @@ import net.md_5.bungee.api.plugin.Command;
import net.md_5.bungee.api.plugin.Plugin;
import net.md_5.bungee.api.plugin.TabExecutor;
+import java.nio.file.Path;
import java.util.Collections;
public class SparkBungeeCordPlugin extends Plugin {
@@ -54,7 +55,12 @@ public class SparkBungeeCordPlugin extends Plugin {
@Override
public String getVersion() {
- return SparkBungeeCordPlugin.this.getDescription().getVersion();
+ return getDescription().getVersion();
+ }
+
+ @Override
+ public Path getPluginFolder() {
+ return getDataFolder().toPath();
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index f73e3e4..ef21d1c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -24,15 +24,16 @@ import com.google.common.collect.ImmutableList;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
-import me.lucko.spark.common.command.modules.HeapModule;
-import me.lucko.spark.common.command.modules.MonitoringModule;
+import me.lucko.spark.common.command.modules.MemoryModule;
import me.lucko.spark.common.command.modules.SamplerModule;
+import me.lucko.spark.common.command.modules.TickMonitoringModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.TickCounter;
import me.lucko.spark.util.BytebinClient;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -57,8 +58,8 @@ public abstract class SparkPlatform<S> {
private static <T> List<Command<T>> prepareCommands() {
ImmutableList.Builder<Command<T>> builder = ImmutableList.builder();
new SamplerModule<T>().registerCommands(builder::add);
- new MonitoringModule<T>().registerCommands(builder::add);
- new HeapModule<T>().registerCommands(builder::add);
+ new TickMonitoringModule<T>().registerCommands(builder::add);
+ new MemoryModule<T>().registerCommands(builder::add);
return builder.build();
}
@@ -66,6 +67,7 @@ public abstract class SparkPlatform<S> {
// abstract methods implemented by each platform
public abstract String getVersion();
+ public abstract Path getPluginFolder();
public abstract String getLabel();
public abstract void sendMessage(S sender, String message);
public abstract void sendMessage(String message);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
index 318ce25..405b3d3 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MemoryModule.java
@@ -24,26 +24,37 @@ import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.memory.HeapDump;
+import me.lucko.spark.memory.HeapDumpSummary;
import okhttp3.MediaType;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.function.Consumer;
-public class HeapModule<S> implements CommandModule<S> {
+public class MemoryModule<S> implements CommandModule<S> {
private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
consumer.accept(Command.<S>builder()
- .aliases("heap", "memory")
+ .aliases("heapsummary")
+ .argumentUsage("run-gc-before", null)
.executor((platform, sender, arguments) -> {
platform.runAsync(() -> {
- platform.sendPrefixedMessage("&7Creating a new heap dump, please wait...");
+ if (arguments.boolFlag("run-gc-before")) {
+ platform.sendPrefixedMessage("&7Running garbage collector...");
+ System.gc();
+ }
- HeapDump heapDump;
+ platform.sendPrefixedMessage("&7Creating a new heap dump summary, please wait...");
+
+ HeapDumpSummary heapDump;
try {
- heapDump = HeapDump.createNew();
+ heapDump = HeapDumpSummary.createNew();
} catch (Exception e) {
platform.sendPrefixedMessage("&cAn error occurred whilst inspecting the heap.");
e.printStackTrace();
@@ -53,7 +64,7 @@ public class HeapModule<S> implements CommandModule<S> {
byte[] output = heapDump.formCompressedDataPayload();
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
- platform.sendPrefixedMessage("&bHeap dump output:");
+ platform.sendPrefixedMessage("&bHeap dump summmary output:");
platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
@@ -63,6 +74,43 @@ public class HeapModule<S> implements CommandModule<S> {
})
.build()
);
+
+ consumer.accept(Command.<S>builder()
+ .aliases("heapdump")
+ .argumentUsage("run-gc-before", null)
+ .argumentUsage("include-non-live", null)
+ .executor((platform, sender, arguments) -> {
+ platform.runAsync(() -> {
+ Path pluginFolder = platform.getPluginFolder();
+ try {
+ Files.createDirectories(pluginFolder);
+ } catch (IOException e) {
+ // ignore
+ }
+
+ Path file = pluginFolder.resolve("heap-" + DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss").format(LocalDateTime.now()) + ".hprof");
+ boolean liveOnly = !arguments.boolFlag("include-non-live");
+
+ if (arguments.boolFlag("run-gc-before")) {
+ platform.sendPrefixedMessage("&7Running garbage collector...");
+ System.gc();
+ }
+
+ platform.sendPrefixedMessage("&7Creating a new heap dump, please wait...");
+
+ try {
+ HeapDump.dumpHeap(file, liveOnly);
+ } catch (Exception e) {
+ platform.sendPrefixedMessage("&cAn error occurred whilst creating a heap dump.");
+ e.printStackTrace();
+ return;
+ }
+
+ platform.sendPrefixedMessage("&bHeap dump written to: " + file.toString());
+ });
+ })
+ .build()
+ );
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
index 608d6b4..d0513ab 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/TickMonitoringModule.java
@@ -33,7 +33,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.function.Consumer;
-public class MonitoringModule<S> implements CommandModule<S> {
+public class TickMonitoringModule<S> implements CommandModule<S> {
/** The tick monitor instance currently running, if any */
private ReportingTickMonitor activeTickMonitor = null;
diff --git a/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java b/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java
index 4007bad..d91c476 100644
--- a/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java
+++ b/spark-common/src/main/java/me/lucko/spark/memory/HeapDump.java
@@ -20,154 +20,41 @@
package me.lucko.spark.memory;
-import com.google.gson.stream.JsonWriter;
-
-import me.lucko.spark.util.TypeDescriptors;
-
-import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
import java.lang.management.ManagementFactory;
-import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Objects;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-import java.util.zip.GZIPOutputStream;
+import java.nio.file.Path;
import javax.management.JMX;
import javax.management.MBeanServer;
import javax.management.ObjectName;
/**
- * Represents a "heap dump" from the VM.
- *
- * <p>Contains a number of entries, corresponding to types of objects in the virtual machine
- * and their recorded impact on memory usage.</p>
+ * Utility for creating .hprof memory heap snapshots.
*/
-public class HeapDump {
+public final class HeapDump {
+
+ private HeapDump() {}
- /** The object name of the com.sun.management.DiagnosticCommandMBean */
- private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=DiagnosticCommand";
- /** A regex pattern representing the expected format of the raw heap output */
- private static final Pattern OUTPUT_FORMAT = Pattern.compile("^\\s*(\\d+):\\s*(\\d+)\\s*(\\d+)\\s*([^\\s]+).*$");
+ /** The object name of the com.sun.management.HotSpotDiagnosticMXBean */
+ private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=HotSpotDiagnostic";
/**
- * Obtains the raw heap data output from the DiagnosticCommandMBean.
+ * Creates a heap dump at the given output path.
*
- * @return the raw output
- * @throws Exception lots could go wrong!
+ * @param outputPath the path to write the snapshot to
+ * @param live if true dump only live objects i.e. objects that are reachable from others
+ * @throws Exception catch all
*/
- private static String getRawHeapData() throws Exception {
+ public static void dumpHeap(Path outputPath, boolean live) throws Exception {
MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN);
- DiagnosticCommandMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, DiagnosticCommandMXBean.class);
- return proxy.gcClassHistogram(new String[0]);
- }
-
- /**
- * Creates a new heap dump based on the current VM.
- *
- * @return the created heap dump
- * @throws RuntimeException if an error occurred whilst requesting a heap dump from the VM
- */
- public static HeapDump createNew() {
- String rawOutput;
- try {
- rawOutput = getRawHeapData();
- } catch (Exception e) {
- throw new RuntimeException("Unable to get heap dump", e);
- }
-
- return new HeapDump(Arrays.stream(rawOutput.split("\n"))
- .map(line -> {
- Matcher matcher = OUTPUT_FORMAT.matcher(line);
- if (!matcher.matches()) {
- return null;
- }
-
- return new Entry(
- Integer.parseInt(matcher.group(1)),
- Integer.parseInt(matcher.group(2)),
- Long.parseLong(matcher.group(3)),
- TypeDescriptors.getJavaType(matcher.group(4))
- );
- })
- .filter(Objects::nonNull)
- .collect(Collectors.toList()));
- }
-
- /** The entries in this heap dump */
- private final List<Entry> entries;
-
- private HeapDump(List<Entry> entries) {
- this.entries = entries;
- }
-
- private void writeOutput(JsonWriter writer) throws IOException {
- writer.beginObject();
- writer.name("type").value("heap");
- writer.name("entries").beginArray();
- for (Entry entry : this.entries) {
- writer.beginObject();
- writer.name("#").value(entry.getOrder());
- writer.name("i").value(entry.getInstances());
- writer.name("s").value(entry.getBytes());
- writer.name("t").value(entry.getType());
- writer.endObject();
- }
- writer.endArray();
- writer.endObject();
- }
-
- public byte[] formCompressedDataPayload() {
- ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
- try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) {
- try (JsonWriter jsonWriter = new JsonWriter(writer)) {
- writeOutput(jsonWriter);
- }
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- return byteOut.toByteArray();
- }
-
- public static final class Entry {
- private final int order;
- private final int instances;
- private final long bytes;
- private final String type;
-
- Entry(int order, int instances, long bytes, String type) {
- this.order = order;
- this.instances = instances;
- this.bytes = bytes;
- this.type = type;
- }
-
- public int getOrder() {
- return this.order;
- }
-
- public int getInstances() {
- return this.instances;
- }
-
- public long getBytes() {
- return this.bytes;
- }
-
- public String getType() {
- return this.type;
- }
+ HotSpotDiagnosticMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, HotSpotDiagnosticMXBean.class);
+ proxy.dumpHeap(outputPath.toAbsolutePath().normalize().toString(), live);
}
- public interface DiagnosticCommandMXBean {
- String gcClassHistogram(String[] args);
+ public interface HotSpotDiagnosticMXBean {
+ void dumpHeap(String outputFile, boolean live) throws IOException;
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java
new file mode 100644
index 0000000..402b89e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/memory/HeapDumpSummary.java
@@ -0,0 +1,173 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.memory;
+
+import com.google.gson.stream.JsonWriter;
+
+import me.lucko.spark.util.TypeDescriptors;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.management.ManagementFactory;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.zip.GZIPOutputStream;
+
+import javax.management.JMX;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+
+/**
+ * Represents a "heap dump summary" from the VM.
+ *
+ * <p>Contains a number of entries, corresponding to types of objects in the virtual machine
+ * and their recorded impact on memory usage.</p>
+ */
+public final class HeapDumpSummary {
+
+ /** The object name of the com.sun.management.DiagnosticCommandMBean */
+ private static final String DIAGNOSTIC_BEAN = "com.sun.management:type=DiagnosticCommand";
+ /** A regex pattern representing the expected format of the raw heap output */
+ private static final Pattern OUTPUT_FORMAT = Pattern.compile("^\\s*(\\d+):\\s*(\\d+)\\s*(\\d+)\\s*([^\\s]+).*$");
+
+ /**
+ * Obtains the raw heap data output from the DiagnosticCommandMBean.
+ *
+ * @return the raw output
+ * @throws Exception lots could go wrong!
+ */
+ private static String getRawHeapData() throws Exception {
+ MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
+ ObjectName diagnosticBeanName = ObjectName.getInstance(DIAGNOSTIC_BEAN);
+
+ DiagnosticCommandMXBean proxy = JMX.newMXBeanProxy(beanServer, diagnosticBeanName, DiagnosticCommandMXBean.class);
+ return proxy.gcClassHistogram(new String[0]);
+ }
+
+ /**
+ * Creates a new heap dump based on the current VM.
+ *
+ * @return the created heap dump
+ * @throws RuntimeException if an error occurred whilst requesting a heap dump from the VM
+ */
+ public static HeapDumpSummary createNew() {
+ String rawOutput;
+ try {
+ rawOutput = getRawHeapData();
+ } catch (Exception e) {
+ throw new RuntimeException("Unable to get heap dump", e);
+ }
+
+ return new HeapDumpSummary(Arrays.stream(rawOutput.split("\n"))
+ .map(line -> {
+ Matcher matcher = OUTPUT_FORMAT.matcher(line);
+ if (!matcher.matches()) {
+ return null;
+ }
+
+ return new Entry(
+ Integer.parseInt(matcher.group(1)),
+ Integer.parseInt(matcher.group(2)),
+ Long.parseLong(matcher.group(3)),
+ TypeDescriptors.getJavaType(matcher.group(4))
+ );
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toList()));
+ }
+
+ /** The entries in this heap dump */
+ private final List<Entry> entries;
+
+ private HeapDumpSummary(List<Entry> entries) {
+ this.entries = entries;
+ }
+
+ private void writeOutput(JsonWriter writer) throws IOException {
+ writer.beginObject();
+ writer.name("type").value("heap");
+ writer.name("entries").beginArray();
+ for (Entry entry : this.entries) {
+ writer.beginObject();
+ writer.name("#").value(entry.getOrder());
+ writer.name("i").value(entry.getInstances());
+ writer.name("s").value(entry.getBytes());
+ writer.name("t").value(entry.getType());
+ writer.endObject();
+ }
+ writer.endArray();
+ writer.endObject();
+ }
+
+ public byte[] formCompressedDataPayload() {
+ ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
+ try (Writer writer = new OutputStreamWriter(new GZIPOutputStream(byteOut), StandardCharsets.UTF_8)) {
+ try (JsonWriter jsonWriter = new JsonWriter(writer)) {
+ writeOutput(jsonWriter);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ return byteOut.toByteArray();
+ }
+
+ public static final class Entry {
+ private final int order;
+ private final int instances;
+ private final long bytes;
+ private final String type;
+
+ Entry(int order, int instances, long bytes, String type) {
+ this.order = order;
+ this.instances = instances;
+ this.bytes = bytes;
+ this.type = type;
+ }
+
+ public int getOrder() {
+ return this.order;
+ }
+
+ public int getInstances() {
+ return this.instances;
+ }
+
+ public long getBytes() {
+ return this.bytes;
+ }
+
+ public String getType() {
+ return this.type;
+ }
+ }
+
+ public interface DiagnosticCommandMXBean {
+ String gcClassHistogram(String[] args);
+ }
+
+}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java
index b56dd70..c383636 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeClientSparkPlatform.java
@@ -34,8 +34,12 @@ import java.util.List;
public class ForgeClientSparkPlatform extends ForgeSparkPlatform {
- public static void register() {
- ClientCommandHandler.instance.registerCommand(new ForgeClientSparkPlatform());
+ public static void register(SparkForgeMod mod) {
+ ClientCommandHandler.instance.registerCommand(new ForgeClientSparkPlatform(mod));
+ }
+
+ public ForgeClientSparkPlatform(SparkForgeMod mod) {
+ super(mod);
}
@Override
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java
index d667234..a5c6c01 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeServerSparkPlatform.java
@@ -34,6 +34,10 @@ import java.util.List;
public class ForgeServerSparkPlatform extends ForgeSparkPlatform {
+ public ForgeServerSparkPlatform(SparkForgeMod mod) {
+ super(mod);
+ }
+
@Override
protected void broadcast(ITextComponent msg) {
FMLCommonHandler.instance().getMinecraftServerInstance().sendMessage(msg);
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java
index f5a2c9e..1f4c173 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeSparkPlatform.java
@@ -38,6 +38,7 @@ import net.minecraft.util.text.TextFormatting;
import net.minecraft.util.text.event.ClickEvent;
import net.minecraftforge.fml.common.Mod;
+import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
@@ -48,15 +49,26 @@ import javax.annotation.Nullable;
@SuppressWarnings("NullableProblems")
public abstract class ForgeSparkPlatform extends SparkPlatform<ICommandSender> implements ICommand {
+ private final SparkForgeMod mod;
+
private final ExecutorService worker = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setNameFormat("spark-forge-async-worker").build()
);
+ protected ForgeSparkPlatform(SparkForgeMod mod) {
+ this.mod = mod;
+ }
+
@Override
public String getVersion() {
return SparkForgeMod.class.getAnnotation(Mod.class).version();
}
+ @Override
+ public Path getPluginFolder() {
+ return this.mod.getConfigDirectory();
+ }
+
@SuppressWarnings("deprecation")
protected ITextComponent colorize(String message) {
TextComponent component = ComponentSerializers.LEGACY.deserialize(message, '&');
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java b/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java
index 133ec09..aa4f379 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/SparkForgeMod.java
@@ -24,9 +24,12 @@ import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.Mod.EventHandler;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
+import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import net.minecraftforge.fml.common.event.FMLServerStartingEvent;
import net.minecraftforge.fml.relauncher.Side;
+import java.nio.file.Path;
+
@Mod(
modid = "spark",
name = "spark",
@@ -35,16 +38,29 @@ import net.minecraftforge.fml.relauncher.Side;
)
public class SparkForgeMod {
+ private Path configDirectory = null;
+
+ @EventHandler
+ public void preInit(FMLPreInitializationEvent e) {
+ this.configDirectory = e.getModConfigurationDirectory().toPath();
+ }
+
@EventHandler
public void init(FMLInitializationEvent e) {
if (FMLCommonHandler.instance().getSide() == Side.CLIENT) {
- ForgeClientSparkPlatform.register();
+ ForgeClientSparkPlatform.register(this);
}
}
@EventHandler
public void serverInit(FMLServerStartingEvent e) {
- e.registerServerCommand(new ForgeServerSparkPlatform());
+ e.registerServerCommand(new ForgeServerSparkPlatform(this));
}
+ public Path getConfigDirectory() {
+ if (this.configDirectory == null) {
+ throw new IllegalStateException("Config directory not set");
+ }
+ return this.configDirectory;
+ }
}
diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java
index 01052cb..2abaf3f 100644
--- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java
+++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SparkSpongePlugin.java
@@ -27,10 +27,10 @@ import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.TickCounter;
import org.spongepowered.api.Game;
-import org.spongepowered.api.Sponge;
import org.spongepowered.api.command.CommandCallable;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
+import org.spongepowered.api.config.ConfigDir;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.game.state.GameStartedServerEvent;
@@ -47,6 +47,7 @@ import org.spongepowered.api.world.World;
import java.net.MalformedURLException;
import java.net.URL;
+import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
@@ -66,14 +67,18 @@ import javax.annotation.Nullable;
)
public class SparkSpongePlugin implements CommandCallable {
+ private final Game game;
+ private final Path configDirectory;
+ private final SpongeExecutorService asyncExecutor;
+
private final SparkPlatform<CommandSource> sparkPlatform = new SparkPlatform<CommandSource>() {
private Text colorize(String message) {
return TextSerializers.FORMATTING_CODE.deserialize(message);
}
private void broadcast(Text msg) {
- Sponge.getServer().getConsole().sendMessage(msg);
- for (Player player : Sponge.getServer().getOnlinePlayers()) {
+ SparkSpongePlugin.this.game.getServer().getConsole().sendMessage(msg);
+ for (Player player : SparkSpongePlugin.this.game.getServer().getOnlinePlayers()) {
if (player.hasPermission("spark")) {
player.sendMessage(msg);
}
@@ -86,6 +91,11 @@ public class SparkSpongePlugin implements CommandCallable {
}
@Override
+ public Path getPluginFolder() {
+ return SparkSpongePlugin.this.configDirectory;
+ }
+
+ @Override
public String getLabel() {
return "spark";
}
@@ -131,11 +141,11 @@ public class SparkSpongePlugin implements CommandCallable {
};
@Inject
- @AsynchronousExecutor
- private SpongeExecutorService asyncExecutor;
-
- @Inject
- private Game game;
+ public SparkSpongePlugin(Game game, @ConfigDir(sharedRoot = false) Path configDirectory, @AsynchronousExecutor SpongeExecutorService asyncExecutor) {
+ this.game = game;
+ this.configDirectory = configDirectory;
+ this.asyncExecutor = asyncExecutor;
+ }
@Listener
public void onServerStart(GameStartedServerEvent event) {
diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java
index 8cc10e1..cf5ed79 100644
--- a/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java
+++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/SparkVelocityPlugin.java
@@ -26,6 +26,7 @@ import com.velocitypowered.api.event.PostOrder;
import com.velocitypowered.api.event.Subscribe;
import com.velocitypowered.api.event.proxy.ProxyInitializeEvent;
import com.velocitypowered.api.plugin.Plugin;
+import com.velocitypowered.api.plugin.annotation.DataDirectory;
import com.velocitypowered.api.proxy.Player;
import com.velocitypowered.api.proxy.ProxyServer;
@@ -39,6 +40,8 @@ import net.kyori.text.event.ClickEvent;
import net.kyori.text.format.TextColor;
import net.kyori.text.serializer.ComponentSerializers;
+import java.nio.file.Path;
+
@Plugin(
id = "spark",
name = "spark",
@@ -49,6 +52,7 @@ import net.kyori.text.serializer.ComponentSerializers;
public class SparkVelocityPlugin {
private final ProxyServer proxy;
+ private final Path configDirectory;
private final SparkPlatform<CommandSource> sparkPlatform = new SparkPlatform<CommandSource>() {
@SuppressWarnings("deprecation")
@@ -71,6 +75,11 @@ public class SparkVelocityPlugin {
}
@Override
+ public Path getPluginFolder() {
+ return SparkVelocityPlugin.this.configDirectory;
+ }
+
+ @Override
public String getLabel() {
return "sparkv";
}
@@ -111,8 +120,9 @@ public class SparkVelocityPlugin {
};
@Inject
- public SparkVelocityPlugin(ProxyServer proxy) {
+ public SparkVelocityPlugin(ProxyServer proxy, @DataDirectory Path configDirectory) {
this.proxy = proxy;
+ this.configDirectory = configDirectory;
}
@Subscribe(order = PostOrder.FIRST)