aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--settings.gradle1
-rw-r--r--spark-bukkit/build.gradle2
-rw-r--r--spark-bungeecord/build.gradle2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java24
-rwxr-xr-xspark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so)bin343408 -> 343408 bytes
-rwxr-xr-xspark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so)bin317560 -> 317560 bytes
-rwxr-xr-xspark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so)bin361312 -> 361312 bytes
-rwxr-xr-xspark-common/src/main/resources/spark-native/macos/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/macos/libasyncProfiler.so)bin724576 -> 724576 bytes
-rw-r--r--spark-fabric/build.gradle2
-rw-r--r--spark-forge/build.gradle2
-rw-r--r--spark-minestom/build.gradle2
-rw-r--r--spark-neoforge/build.gradle2
-rw-r--r--spark-nukkit/build.gradle2
-rw-r--r--spark-paper/build.gradle75
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java61
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java58
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java53
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java45
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java159
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java208
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java46
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java46
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java105
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java36
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java27
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java29
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java109
-rw-r--r--spark-sponge7/build.gradle2
-rw-r--r--spark-sponge8/build.gradle2
-rw-r--r--spark-velocity/build.gradle2
-rw-r--r--spark-velocity4/build.gradle2
-rw-r--r--spark-waterdog/build.gradle2
32 files changed, 1103 insertions, 3 deletions
diff --git a/settings.gradle b/settings.gradle
index 7075391..6931c9f 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -25,6 +25,7 @@ include (
'spark-api',
'spark-common',
'spark-bukkit',
+ 'spark-paper',
'spark-bungeecord',
'spark-velocity',
'spark-velocity4',
diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle
index de30294..58b6eba 100644
--- a/spark-bukkit/build.gradle
+++ b/spark-bukkit/build.gradle
@@ -43,6 +43,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle
index 1a279eb..dd7f1c8 100644
--- a/spark-bungeecord/build.gradle
+++ b/spark-bungeecord/build.gradle
@@ -33,6 +33,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index 192275b..7dcb131 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -114,13 +114,19 @@ public class AsyncProfilerAccess {
if (this.setupException instanceof UnsupportedSystemException) {
platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" +
this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
+ } else if (this.setupException instanceof UnsupportedJvmException) {
+ platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your JVM (" +
+ this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
} else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) {
platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed.");
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc");
} else {
- platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage());
+ String error = this.setupException.getMessage();
+ if (this.setupException.getCause() != null) {
+ error += " (" + this.setupException.getCause().getMessage() + ")";
+ }
+ platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + error);
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler");
- this.setupException.printStackTrace();
}
}
@@ -140,6 +146,12 @@ public class AsyncProfilerAccess {
// check compatibility
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
+ String jvm = System.getProperty("java.vm.name");
+
+ // openj9 not supported by async-profiler at the moment
+ if (jvm.contains("OpenJ9")) {
+ throw new UnsupportedJvmException(jvm);
+ }
if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) {
arch = "amd64-musl";
@@ -159,7 +171,7 @@ public class AsyncProfilerAccess {
}
// extract the profiler binary from the spark jar file
- String resource = "spark/" + libPath + "/libasyncProfiler.so";
+ String resource = "spark-native/" + libPath + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
@@ -224,6 +236,12 @@ public class AsyncProfilerAccess {
}
}
+ private static final class UnsupportedJvmException extends UnsupportedOperationException {
+ public UnsupportedJvmException(String jvm) {
+ super(jvm);
+ }
+ }
+
private static final class NativeLoadingException extends RuntimeException {
public NativeLoadingException(Throwable cause) {
super("A runtime error occurred whilst loading the native library", cause);
diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so
index 800cf91..800cf91 100755
--- a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so
index 3c81d1c..3c81d1c 100755
--- a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so
index 5af5071..5af5071 100755
--- a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so
index 4930c67..4930c67 100755
--- a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so
Binary files differ
diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle
index 0114912..711b1f9 100644
--- a/spark-fabric/build.gradle
+++ b/spark-fabric/build.gradle
@@ -86,6 +86,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
dependencies {
exclude(dependency('org.ow2.asm::'))
diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle
index 79c99b0..bde35e1 100644
--- a/spark-forge/build.gradle
+++ b/spark-forge/build.gradle
@@ -53,6 +53,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle
index 1422df7..c08baf5 100644
--- a/spark-minestom/build.gradle
+++ b/spark-minestom/build.gradle
@@ -41,6 +41,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-neoforge/build.gradle b/spark-neoforge/build.gradle
index dc0514e..a08a7ce 100644
--- a/spark-neoforge/build.gradle
+++ b/spark-neoforge/build.gradle
@@ -60,6 +60,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle
index f8f443f..5c709e3 100644
--- a/spark-nukkit/build.gradle
+++ b/spark-nukkit/build.gradle
@@ -37,6 +37,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-paper/build.gradle b/spark-paper/build.gradle
new file mode 100644
index 0000000..be0aa0d
--- /dev/null
+++ b/spark-paper/build.gradle
@@ -0,0 +1,75 @@
+plugins {
+ id 'io.github.goooler.shadow' version '8.1.7'
+ id 'maven-publish'
+}
+
+tasks.withType(JavaCompile) {
+ // override, compile targeting J21
+ options.release = 21
+}
+
+tasks.jar {
+ archiveClassifier = 'original'
+}
+
+dependencies {
+ implementation project(':spark-common')
+ compileOnly 'io.papermc.paper:paper-api:1.21-R0.1-SNAPSHOT'
+}
+
+repositories {
+ maven { url "https://repo.papermc.io/repository/maven-public/" }
+}
+
+shadowJar {
+ archiveFileName = "spark-${project.pluginVersion}-paper.jar"
+ archiveClassifier = ''
+
+ dependencies {
+ exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
+ exclude(dependency('net.bytebuddy:byte-buddy-agent'))
+ }
+
+ relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.paper.lib.adventure.pagination'
+ relocate 'com.google.protobuf', 'me.lucko.spark.paper.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.paper.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.paper.lib.asyncprofiler'
+ relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.paper.lib.bytesocks'
+ relocate 'org.java_websocket', 'me.lucko.spark.paper.lib.bytesocks.ws'
+
+ // nest common classes beneath the paper package to avoid conflicts with spark-bukkit
+ relocate 'me.lucko.spark.common', 'me.lucko.spark.paper.common'
+ relocate 'me.lucko.spark.proto', 'me.lucko.spark.paper.proto'
+ relocate 'spark-native', 'spark-paper-native'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
+
+publishing {
+ //repositories {
+ // maven {
+ // url = 'https://oss.sonatype.org/content/repositories/snapshots'
+ // credentials {
+ // username = sonatypeUsername
+ // password = sonatypePassword
+ // }
+ // }
+ //}
+ publications {
+ shadow(MavenPublication) { publication ->
+ project.shadow.component(publication)
+ version = "${project.pluginVersion}-SNAPSHOT"
+ }
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java
new file mode 100644
index 0000000..2c5f7c0
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java
@@ -0,0 +1,61 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import org.bukkit.plugin.java.JavaPlugin;
+
+import java.lang.reflect.Field;
+
+public class PaperClassSourceLookup extends ClassSourceLookup.ByClassLoader {
+ private static final Class<?> PLUGIN_CLASS_LOADER;
+ private static final Field PLUGIN_FIELD;
+
+ private static final Class<?> PAPER_PLUGIN_CLASS_LOADER;
+ private static final Field PAPER_PLUGIN_FIELD;
+
+ static {
+ try {
+ PLUGIN_CLASS_LOADER = Class.forName("org.bukkit.plugin.java.PluginClassLoader");
+ PLUGIN_FIELD = PLUGIN_CLASS_LOADER.getDeclaredField("plugin");
+ PLUGIN_FIELD.setAccessible(true);
+
+ PAPER_PLUGIN_CLASS_LOADER = Class.forName("io.papermc.paper.plugin.entrypoint.classloader.PaperPluginClassLoader");
+ PAPER_PLUGIN_FIELD = PAPER_PLUGIN_CLASS_LOADER.getDeclaredField("loadedJavaPlugin");
+ PAPER_PLUGIN_FIELD.setAccessible(true);
+ } catch (ReflectiveOperationException e) {
+ throw new ExceptionInInitializerError(e);
+ }
+ }
+
+ @Override
+ public String identify(ClassLoader loader) throws ReflectiveOperationException {
+ if (PLUGIN_CLASS_LOADER.isInstance(loader)) {
+ JavaPlugin plugin = (JavaPlugin) PLUGIN_FIELD.get(loader);
+ return plugin.getName();
+ } else if (PAPER_PLUGIN_CLASS_LOADER.isInstance(loader)) {
+ JavaPlugin plugin = (JavaPlugin) PAPER_PLUGIN_FIELD.get(loader);
+ return plugin.getName();
+ }
+ return null;
+ }
+}
+
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java
new file mode 100644
index 0000000..c3b569d
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java
@@ -0,0 +1,58 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.command.sender.AbstractCommandSender;
+import net.kyori.adventure.text.Component;
+import org.bukkit.command.CommandSender;
+import org.bukkit.entity.Player;
+
+import java.util.UUID;
+
+public class PaperCommandSender extends AbstractCommandSender<CommandSender> {
+
+ public PaperCommandSender(CommandSender sender) {
+ super(sender);
+ }
+
+ @Override
+ public String getName() {
+ return this.delegate.getName();
+ }
+
+ @Override
+ public UUID getUniqueId() {
+ if (super.delegate instanceof Player player) {
+ return player.getUniqueId();
+ }
+ return null;
+ }
+
+ @Override
+ public void sendMessage(Component message) {
+ super.delegate.sendMessage(message);
+ }
+
+ @Override
+ public boolean hasPermission(String permission) {
+ return super.delegate.hasPermission(permission);
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java
new file mode 100644
index 0000000..114175e
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java
@@ -0,0 +1,53 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import io.papermc.paper.ServerBuildInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
+
+public enum PaperPlatformInfo implements PlatformInfo {
+ INSTANCE;
+
+ @Override
+ public Type getType() {
+ return Type.SERVER;
+ }
+
+ @Override
+ public String getName() {
+ return "Paper";
+ }
+
+ @Override
+ public String getBrand() {
+ return ServerBuildInfo.buildInfo().brandName();
+ }
+
+ @Override
+ public String getVersion() {
+ return ServerBuildInfo.buildInfo().asString(ServerBuildInfo.StringRepresentation.VERSION_SIMPLE);
+ }
+
+ @Override
+ public String getMinecraftVersion() {
+ return ServerBuildInfo.buildInfo().minecraftVersionId();
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java
new file mode 100644
index 0000000..e896b21
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java
@@ -0,0 +1,45 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import com.google.common.collect.ImmutableMap;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import org.bukkit.Server;
+import org.bukkit.entity.Player;
+
+import java.util.Map;
+
+public class PaperPlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public PaperPlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers()) {
+ builder.put(player.getName(), player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java
new file mode 100644
index 0000000..d1301f8
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java
@@ -0,0 +1,159 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonSerializer;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import org.bukkit.Bukkit;
+import org.bukkit.World;
+import org.bukkit.configuration.MemorySection;
+import org.bukkit.configuration.file.YamlConfiguration;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class PaperServerConfigProvider extends ServerConfigProvider {
+
+ /** A map of provided files and their type */
+ private static final Map<String, ConfigParser> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public PaperServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ private static class YamlConfigParser implements ConfigParser {
+ public static final YamlConfigParser INSTANCE = new YamlConfigParser();
+ protected static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
+
+ return filter.apply(GSON.toJsonTree(values));
+ }
+
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ return config.getValues(false);
+ }
+ }
+
+ // Paper 1.19+ split config layout
+ private static class SplitYamlConfigParser extends YamlConfigParser {
+ public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser();
+
+ @Override
+ public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException {
+ String prefix = group.replace("/", "");
+
+ Path configDir = Paths.get("config");
+ if (!Files.exists(configDir)) {
+ return null;
+ }
+
+ JsonObject root = new JsonObject();
+
+ for (Map.Entry<String, Path> entry : getNestedFiles(configDir, prefix).entrySet()) {
+ String fileName = entry.getKey();
+ Path path = entry.getValue();
+
+ Map<String, Object> values = this.parse(path);
+ if (values == null) {
+ continue;
+ }
+
+ // apply the filter individually to each nested file
+ root.add(fileName, filter.apply(GSON.toJsonTree(values)));
+ }
+
+ return root;
+ }
+
+ private static Map<String, Path> getNestedFiles(Path configDir, String prefix) {
+ Map<String, Path> files = new LinkedHashMap<>();
+ files.put("global.yml", configDir.resolve(prefix + "-global.yml"));
+ files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml"));
+ for (World world : Bukkit.getWorlds()) {
+ files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml"));
+ }
+ return files;
+ }
+ }
+
+ static {
+ ImmutableMap.Builder<String, ConfigParser> files = ImmutableMap.<String, ConfigParser>builder()
+ .put("server.properties", PropertiesConfigParser.INSTANCE)
+ .put("bukkit.yml", YamlConfigParser.INSTANCE)
+ .put("spigot.yml", YamlConfigParser.INSTANCE)
+ .put("paper.yml", YamlConfigParser.INSTANCE)
+ .put("paper/", SplitYamlConfigParser.INSTANCE)
+ .put("purpur.yml", YamlConfigParser.INSTANCE)
+ .put("pufferfish.yml", YamlConfigParser.INSTANCE);
+
+ for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
+ files.put(config, YamlConfigParser.INSTANCE);
+ }
+
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("database")
+ .add("settings.bungeecord-addresses")
+ .add("settings.velocity-support.secret")
+ .add("proxies.velocity.secret")
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("rcon<dot>ip")
+ .add("level-seed")
+ .add("world-settings.*.feature-seeds")
+ .add("world-settings.*.seed-*")
+ .add("feature-seeds")
+ .add("seed-*")
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = files.build();
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java
new file mode 100644
index 0000000..4924812
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java
@@ -0,0 +1,208 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or