aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--settings.gradle1
-rw-r--r--spark-bukkit/build.gradle2
-rw-r--r--spark-bungeecord/build.gradle2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java24
-rwxr-xr-xspark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so)bin343408 -> 343408 bytes
-rwxr-xr-xspark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so)bin317560 -> 317560 bytes
-rwxr-xr-xspark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so)bin361312 -> 361312 bytes
-rwxr-xr-xspark-common/src/main/resources/spark-native/macos/libasyncProfiler.so (renamed from spark-common/src/main/resources/spark/macos/libasyncProfiler.so)bin724576 -> 724576 bytes
-rw-r--r--spark-fabric/build.gradle2
-rw-r--r--spark-forge/build.gradle2
-rw-r--r--spark-minestom/build.gradle2
-rw-r--r--spark-neoforge/build.gradle2
-rw-r--r--spark-nukkit/build.gradle2
-rw-r--r--spark-paper/build.gradle75
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java61
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java58
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java53
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java45
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java159
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java208
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java46
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java46
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java105
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java36
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java27
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java29
-rw-r--r--spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java109
-rw-r--r--spark-sponge7/build.gradle2
-rw-r--r--spark-sponge8/build.gradle2
-rw-r--r--spark-velocity/build.gradle2
-rw-r--r--spark-velocity4/build.gradle2
-rw-r--r--spark-waterdog/build.gradle2
32 files changed, 1103 insertions, 3 deletions
diff --git a/settings.gradle b/settings.gradle
index 7075391..6931c9f 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -25,6 +25,7 @@ include (
'spark-api',
'spark-common',
'spark-bukkit',
+ 'spark-paper',
'spark-bungeecord',
'spark-velocity',
'spark-velocity4',
diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle
index de30294..58b6eba 100644
--- a/spark-bukkit/build.gradle
+++ b/spark-bukkit/build.gradle
@@ -43,6 +43,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle
index 1a279eb..dd7f1c8 100644
--- a/spark-bungeecord/build.gradle
+++ b/spark-bungeecord/build.gradle
@@ -33,6 +33,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
index 192275b..7dcb131 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java
@@ -114,13 +114,19 @@ public class AsyncProfilerAccess {
if (this.setupException instanceof UnsupportedSystemException) {
platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your os/arch (" +
this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
+ } else if (this.setupException instanceof UnsupportedJvmException) {
+ platform.getPlugin().log(Level.INFO, "The async-profiler engine is not supported for your JVM (" +
+ this.setupException.getMessage() + "), so the built-in Java engine will be used instead.");
} else if (this.setupException instanceof NativeLoadingException && this.setupException.getCause().getMessage().contains("libstdc++")) {
platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine because libstdc++ is not installed.");
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler#install-libstdc");
} else {
- platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + this.setupException.getMessage());
+ String error = this.setupException.getMessage();
+ if (this.setupException.getCause() != null) {
+ error += " (" + this.setupException.getCause().getMessage() + ")";
+ }
+ platform.getPlugin().log(Level.WARNING, "Unable to initialise the async-profiler engine: " + error);
platform.getPlugin().log(Level.WARNING, "Please see here for more information: https://spark.lucko.me/docs/misc/Using-async-profiler");
- this.setupException.printStackTrace();
}
}
@@ -140,6 +146,12 @@ public class AsyncProfilerAccess {
// check compatibility
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "");
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
+ String jvm = System.getProperty("java.vm.name");
+
+ // openj9 not supported by async-profiler at the moment
+ if (jvm.contains("OpenJ9")) {
+ throw new UnsupportedJvmException(jvm);
+ }
if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) {
arch = "amd64-musl";
@@ -159,7 +171,7 @@ public class AsyncProfilerAccess {
}
// extract the profiler binary from the spark jar file
- String resource = "spark/" + libPath + "/libasyncProfiler.so";
+ String resource = "spark-native/" + libPath + "/libasyncProfiler.so";
URL profilerResource = AsyncProfilerAccess.class.getClassLoader().getResource(resource);
if (profilerResource == null) {
throw new IllegalStateException("Could not find " + resource + " in spark jar file");
@@ -224,6 +236,12 @@ public class AsyncProfilerAccess {
}
}
+ private static final class UnsupportedJvmException extends UnsupportedOperationException {
+ public UnsupportedJvmException(String jvm) {
+ super(jvm);
+ }
+ }
+
private static final class NativeLoadingException extends RuntimeException {
public NativeLoadingException(Throwable cause) {
super("A runtime error occurred whilst loading the native library", cause);
diff --git a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so
index 800cf91..800cf91 100755
--- a/spark-common/src/main/resources/spark/linux/aarch64/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/linux/aarch64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so
index 3c81d1c..3c81d1c 100755
--- a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/linux/amd64-musl/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so
index 5af5071..5af5071 100755
--- a/spark-common/src/main/resources/spark/linux/amd64/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/linux/amd64/libasyncProfiler.so
Binary files differ
diff --git a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so
index 4930c67..4930c67 100755
--- a/spark-common/src/main/resources/spark/macos/libasyncProfiler.so
+++ b/spark-common/src/main/resources/spark-native/macos/libasyncProfiler.so
Binary files differ
diff --git a/spark-fabric/build.gradle b/spark-fabric/build.gradle
index 0114912..711b1f9 100644
--- a/spark-fabric/build.gradle
+++ b/spark-fabric/build.gradle
@@ -86,6 +86,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
dependencies {
exclude(dependency('org.ow2.asm::'))
diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle
index 79c99b0..bde35e1 100644
--- a/spark-forge/build.gradle
+++ b/spark-forge/build.gradle
@@ -53,6 +53,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-minestom/build.gradle b/spark-minestom/build.gradle
index 1422df7..c08baf5 100644
--- a/spark-minestom/build.gradle
+++ b/spark-minestom/build.gradle
@@ -41,6 +41,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-neoforge/build.gradle b/spark-neoforge/build.gradle
index dc0514e..a08a7ce 100644
--- a/spark-neoforge/build.gradle
+++ b/spark-neoforge/build.gradle
@@ -60,6 +60,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-nukkit/build.gradle b/spark-nukkit/build.gradle
index f8f443f..5c709e3 100644
--- a/spark-nukkit/build.gradle
+++ b/spark-nukkit/build.gradle
@@ -37,6 +37,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-paper/build.gradle b/spark-paper/build.gradle
new file mode 100644
index 0000000..be0aa0d
--- /dev/null
+++ b/spark-paper/build.gradle
@@ -0,0 +1,75 @@
+plugins {
+ id 'io.github.goooler.shadow' version '8.1.7'
+ id 'maven-publish'
+}
+
+tasks.withType(JavaCompile) {
+ // override, compile targeting J21
+ options.release = 21
+}
+
+tasks.jar {
+ archiveClassifier = 'original'
+}
+
+dependencies {
+ implementation project(':spark-common')
+ compileOnly 'io.papermc.paper:paper-api:1.21-R0.1-SNAPSHOT'
+}
+
+repositories {
+ maven { url "https://repo.papermc.io/repository/maven-public/" }
+}
+
+shadowJar {
+ archiveFileName = "spark-${project.pluginVersion}-paper.jar"
+ archiveClassifier = ''
+
+ dependencies {
+ exclude(dependency('net.kyori:^(?!adventure-text-feature-pagination).+$'))
+ exclude(dependency('net.bytebuddy:byte-buddy-agent'))
+ }
+
+ relocate 'net.kyori.adventure.text.feature.pagination', 'me.lucko.spark.paper.lib.adventure.pagination'
+ relocate 'com.google.protobuf', 'me.lucko.spark.paper.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.paper.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.paper.lib.asyncprofiler'
+ relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.paper.lib.bytesocks'
+ relocate 'org.java_websocket', 'me.lucko.spark.paper.lib.bytesocks.ws'
+
+ // nest common classes beneath the paper package to avoid conflicts with spark-bukkit
+ relocate 'me.lucko.spark.common', 'me.lucko.spark.paper.common'
+ relocate 'me.lucko.spark.proto', 'me.lucko.spark.paper.proto'
+ relocate 'spark-native', 'spark-paper-native'
+
+ exclude 'module-info.class'
+ exclude 'META-INF/maven/**'
+ exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+}
+
+publishing {
+ //repositories {
+ // maven {
+ // url = 'https://oss.sonatype.org/content/repositories/snapshots'
+ // credentials {
+ // username = sonatypeUsername
+ // password = sonatypePassword
+ // }
+ // }
+ //}
+ publications {
+ shadow(MavenPublication) { publication ->
+ project.shadow.component(publication)
+ version = "${project.pluginVersion}-SNAPSHOT"
+ }
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java
new file mode 100644
index 0000000..2c5f7c0
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperClassSourceLookup.java
@@ -0,0 +1,61 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import org.bukkit.plugin.java.JavaPlugin;
+
+import java.lang.reflect.Field;
+
+public class PaperClassSourceLookup extends ClassSourceLookup.ByClassLoader {
+ private static final Class<?> PLUGIN_CLASS_LOADER;
+ private static final Field PLUGIN_FIELD;
+
+ private static final Class<?> PAPER_PLUGIN_CLASS_LOADER;
+ private static final Field PAPER_PLUGIN_FIELD;
+
+ static {
+ try {
+ PLUGIN_CLASS_LOADER = Class.forName("org.bukkit.plugin.java.PluginClassLoader");
+ PLUGIN_FIELD = PLUGIN_CLASS_LOADER.getDeclaredField("plugin");
+ PLUGIN_FIELD.setAccessible(true);
+
+ PAPER_PLUGIN_CLASS_LOADER = Class.forName("io.papermc.paper.plugin.entrypoint.classloader.PaperPluginClassLoader");
+ PAPER_PLUGIN_FIELD = PAPER_PLUGIN_CLASS_LOADER.getDeclaredField("loadedJavaPlugin");
+ PAPER_PLUGIN_FIELD.setAccessible(true);
+ } catch (ReflectiveOperationException e) {
+ throw new ExceptionInInitializerError(e);
+ }
+ }
+
+ @Override
+ public String identify(ClassLoader loader) throws ReflectiveOperationException {
+ if (PLUGIN_CLASS_LOADER.isInstance(loader)) {
+ JavaPlugin plugin = (JavaPlugin) PLUGIN_FIELD.get(loader);
+ return plugin.getName();
+ } else if (PAPER_PLUGIN_CLASS_LOADER.isInstance(loader)) {
+ JavaPlugin plugin = (JavaPlugin) PAPER_PLUGIN_FIELD.get(loader);
+ return plugin.getName();
+ }
+ return null;
+ }
+}
+
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java
new file mode 100644
index 0000000..c3b569d
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperCommandSender.java
@@ -0,0 +1,58 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.command.sender.AbstractCommandSender;
+import net.kyori.adventure.text.Component;
+import org.bukkit.command.CommandSender;
+import org.bukkit.entity.Player;
+
+import java.util.UUID;
+
+public class PaperCommandSender extends AbstractCommandSender<CommandSender> {
+
+ public PaperCommandSender(CommandSender sender) {
+ super(sender);
+ }
+
+ @Override
+ public String getName() {
+ return this.delegate.getName();
+ }
+
+ @Override
+ public UUID getUniqueId() {
+ if (super.delegate instanceof Player player) {
+ return player.getUniqueId();
+ }
+ return null;
+ }
+
+ @Override
+ public void sendMessage(Component message) {
+ super.delegate.sendMessage(message);
+ }
+
+ @Override
+ public boolean hasPermission(String permission) {
+ return super.delegate.hasPermission(permission);
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java
new file mode 100644
index 0000000..114175e
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlatformInfo.java
@@ -0,0 +1,53 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import io.papermc.paper.ServerBuildInfo;
+import me.lucko.spark.common.platform.PlatformInfo;
+
+public enum PaperPlatformInfo implements PlatformInfo {
+ INSTANCE;
+
+ @Override
+ public Type getType() {
+ return Type.SERVER;
+ }
+
+ @Override
+ public String getName() {
+ return "Paper";
+ }
+
+ @Override
+ public String getBrand() {
+ return ServerBuildInfo.buildInfo().brandName();
+ }
+
+ @Override
+ public String getVersion() {
+ return ServerBuildInfo.buildInfo().asString(ServerBuildInfo.StringRepresentation.VERSION_SIMPLE);
+ }
+
+ @Override
+ public String getMinecraftVersion() {
+ return ServerBuildInfo.buildInfo().minecraftVersionId();
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java
new file mode 100644
index 0000000..e896b21
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperPlayerPingProvider.java
@@ -0,0 +1,45 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import com.google.common.collect.ImmutableMap;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import org.bukkit.Server;
+import org.bukkit.entity.Player;
+
+import java.util.Map;
+
+public class PaperPlayerPingProvider implements PlayerPingProvider {
+ private final Server server;
+
+ public PaperPlayerPingProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public Map<String, Integer> poll() {
+ ImmutableMap.Builder<String, Integer> builder = ImmutableMap.builder();
+ for (Player player : this.server.getOnlinePlayers()) {
+ builder.put(player.getName(), player.getPing());
+ }
+ return builder.build();
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java
new file mode 100644
index 0000000..d1301f8
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperServerConfigProvider.java
@@ -0,0 +1,159 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonSerializer;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ExcludedConfigFilter;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import org.bukkit.Bukkit;
+import org.bukkit.World;
+import org.bukkit.configuration.MemorySection;
+import org.bukkit.configuration.file.YamlConfiguration;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class PaperServerConfigProvider extends ServerConfigProvider {
+
+ /** A map of provided files and their type */
+ private static final Map<String, ConfigParser> FILES;
+ /** A collection of paths to be excluded from the files */
+ private static final Collection<String> HIDDEN_PATHS;
+
+ public PaperServerConfigProvider() {
+ super(FILES, HIDDEN_PATHS);
+ }
+
+ private static class YamlConfigParser implements ConfigParser {
+ public static final YamlConfigParser INSTANCE = new YamlConfigParser();
+ protected static final Gson GSON = new GsonBuilder()
+ .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
+ .create();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
+
+ return filter.apply(GSON.toJsonTree(values));
+ }
+
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ return config.getValues(false);
+ }
+ }
+
+ // Paper 1.19+ split config layout
+ private static class SplitYamlConfigParser extends YamlConfigParser {
+ public static final SplitYamlConfigParser INSTANCE = new SplitYamlConfigParser();
+
+ @Override
+ public JsonElement load(String group, ExcludedConfigFilter filter) throws IOException {
+ String prefix = group.replace("/", "");
+
+ Path configDir = Paths.get("config");
+ if (!Files.exists(configDir)) {
+ return null;
+ }
+
+ JsonObject root = new JsonObject();
+
+ for (Map.Entry<String, Path> entry : getNestedFiles(configDir, prefix).entrySet()) {
+ String fileName = entry.getKey();
+ Path path = entry.getValue();
+
+ Map<String, Object> values = this.parse(path);
+ if (values == null) {
+ continue;
+ }
+
+ // apply the filter individually to each nested file
+ root.add(fileName, filter.apply(GSON.toJsonTree(values)));
+ }
+
+ return root;
+ }
+
+ private static Map<String, Path> getNestedFiles(Path configDir, String prefix) {
+ Map<String, Path> files = new LinkedHashMap<>();
+ files.put("global.yml", configDir.resolve(prefix + "-global.yml"));
+ files.put("world-defaults.yml", configDir.resolve(prefix + "-world-defaults.yml"));
+ for (World world : Bukkit.getWorlds()) {
+ files.put(world.getName() + ".yml", world.getWorldFolder().toPath().resolve(prefix + "-world.yml"));
+ }
+ return files;
+ }
+ }
+
+ static {
+ ImmutableMap.Builder<String, ConfigParser> files = ImmutableMap.<String, ConfigParser>builder()
+ .put("server.properties", PropertiesConfigParser.INSTANCE)
+ .put("bukkit.yml", YamlConfigParser.INSTANCE)
+ .put("spigot.yml", YamlConfigParser.INSTANCE)
+ .put("paper.yml", YamlConfigParser.INSTANCE)
+ .put("paper/", SplitYamlConfigParser.INSTANCE)
+ .put("purpur.yml", YamlConfigParser.INSTANCE)
+ .put("pufferfish.yml", YamlConfigParser.INSTANCE);
+
+ for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
+ files.put(config, YamlConfigParser.INSTANCE);
+ }
+
+ ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
+ .add("database")
+ .add("settings.bungeecord-addresses")
+ .add("settings.velocity-support.secret")
+ .add("proxies.velocity.secret")
+ .add("server-ip")
+ .add("motd")
+ .add("resource-pack")
+ .add("rcon<dot>password")
+ .add("rcon<dot>ip")
+ .add("level-seed")
+ .add("world-settings.*.feature-seeds")
+ .add("world-settings.*.seed-*")
+ .add("feature-seeds")
+ .add("seed-*")
+ .addAll(getSystemPropertyList("spark.serverconfigs.hiddenpaths"));
+
+ FILES = files.build();
+ HIDDEN_PATHS = hiddenPaths.build();
+ }
+
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java
new file mode 100644
index 0000000..4924812
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java
@@ -0,0 +1,208 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.api.Spark;
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.sampler.ThreadDumper;
+import me.lucko.spark.common.sampler.source.ClassSourceLookup;
+import me.lucko.spark.common.sampler.source.SourceMetadata;
+import me.lucko.spark.common.tick.TickHook;
+import me.lucko.spark.common.tick.TickReporter;
+import me.lucko.spark.common.util.classfinder.ClassFinder;
+import me.lucko.spark.paper.api.PaperClassLookup;
+import me.lucko.spark.paper.api.PaperScheduler;
+import me.lucko.spark.paper.api.PaperSparkModule;
+import org.bukkit.Server;
+import org.bukkit.command.CommandSender;
+import org.bukkit.plugin.Plugin;
+import org.bukkit.plugin.ServicePriority;
+
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Stream;
+
+public class PaperSparkPlugin implements PaperSparkModule, SparkPlugin {
+ private final Server server;
+ private final Logger logger;
+ private final PaperScheduler scheduler;
+ private final PaperClassLookup classLookup;
+
+ private final PaperTickHook tickHook;
+ private final PaperTickReporter tickReporter;
+ private final ThreadDumper gameThreadDumper;
+ private final SparkPlatform platform;
+
+ public PaperSparkPlugin(Server server, Logger logger, PaperScheduler scheduler, PaperClassLookup classLookup) {
+ this.server = server;
+ this.logger = logger;
+ this.scheduler = scheduler;
+ this.classLookup = classLookup;
+ this.tickHook = new PaperTickHook();
+ this.tickReporter = new PaperTickReporter();
+ this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread());
+ this.platform = new SparkPlatform(this);
+ }
+
+ @Override
+ public void enable() {
+ this.platform.enable();
+ }
+
+ @Override
+ public void disable() {
+ this.platform.disable();
+ }
+
+ @Override
+ public void executeCommand(CommandSender sender, String[] args) {
+ this.platform.executeCommand(new PaperCommandSender(sender), args);
+ }
+
+ @Override
+ public List<String> tabComplete(CommandSender sender, String[] args) {
+ return this.platform.tabCompleteCommand(new PaperCommandSender(sender), args);
+ }
+
+ @Override
+ public void onServerTickStart() {
+ this.tickHook.onTick();
+ }
+
+ @Override
+ public void onServerTickEnd(double duration) {
+ this.tickReporter.onTick(duration);
+ }
+
+ @Override
+ public String getVersion() {
+ return "TODO"; // TODO - get from package implementation version?
+ }
+
+ @Override
+ public Path getPluginDirectory() {
+ return this.server.getPluginsFolder().toPath().resolve("spark");
+ }
+
+ @Override
+ public String getCommandName() {
+ return "spark";
+ }
+
+ @Override
+ public Stream<PaperCommandSender> getCommandSenders() {
+ return Stream.concat(
+ this.server.getOnlinePlayers().stream(),
+ Stream.of(this.server.getConsoleSender())
+ ).map(PaperCommandSender::new);
+ }
+
+ @Override
+ public void executeAsync(Runnable task) {
+ this.scheduler.executeAsync(task);
+ }
+
+ @Override
+ public void executeSync(Runnable task) {
+ this.scheduler.executeSync(task);
+ }
+
+ @Override
+ public void log(Level level, String msg) {
+ this.logger.log(level, msg);
+ }
+
+ @Override
+ public ThreadDumper getDefaultThreadDumper() {
+ return this.gameThreadDumper;
+ }
+
+ @Override
+ public TickHook createTickHook() {
+ return this.tickHook;
+ }
+
+ @Override
+ public TickReporter createTickReporter() {
+ return this.tickReporter;
+ }
+
+ @Override
+ public ClassSourceLookup createClassSourceLookup() {
+ return new PaperClassSourceLookup();
+ }
+
+ @Override
+ public ClassFinder createClassFinder() {
+ return className -> {
+ try {
+ return this.classLookup.lookup(className);
+ } catch (Exception e) {
+ return null;
+ }
+ };
+ }
+
+ @Override
+ public Collection<SourceMetadata> getKnownSources() {
+ return SourceMetadata.gather(
+ Arrays.asList(this.server.getPluginManager().getPlugins()),
+ Plugin::getName,
+ plugin -> plugin.getPluginMeta().getVersion(),
+ plugin -> String.join(", ", plugin.getPluginMeta().getAuthors())
+ );
+ }
+
+ @Override
+ public PlayerPingProvider createPlayerPingProvider() {
+ return new PaperPlayerPingProvider(this.server);
+ }
+
+ @Override
+ public ServerConfigProvider createServerConfigProvider() {
+ return new PaperServerConfigProvider();
+ }
+
+ @Override
+ public WorldInfoProvider createWorldInfoProvider() {
+ return new PaperWorldInfoProvider(this.server);
+ }
+
+ @Override
+ public PlatformInfo getPlatformInfo() {
+ return PaperPlatformInfo.INSTANCE;
+ }
+
+ @SuppressWarnings("DataFlowIssue") // null plugin
+ @Override
+ public void registerApi(Spark api) {
+ this.server.getServicesManager().register(Spark.class, api, null, ServicePriority.Normal);
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java
new file mode 100644
index 0000000..06126e1
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickHook.java
@@ -0,0 +1,46 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.tick.AbstractTickHook;
+import me.lucko.spark.common.tick.TickHook;
+import org.bukkit.event.Listener;
+
+public class PaperTickHook extends AbstractTickHook implements TickHook, Listener {
+ private boolean open = false;
+
+ @Override
+ public void start() {
+ this.open = true;
+ }
+
+ @Override
+ public void close() {
+ this.open = false;
+ }
+
+ @Override
+ public void onTick() {
+ if (this.open) {
+ super.onTick();
+ }
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java
new file mode 100644
index 0000000..4db1f16
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperTickReporter.java
@@ -0,0 +1,46 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.tick.AbstractTickReporter;
+import me.lucko.spark.common.tick.TickReporter;
+import org.bukkit.event.Listener;
+
+public class PaperTickReporter extends AbstractTickReporter implements TickReporter, Listener {
+ private boolean open = false;
+
+ @Override
+ public void start() {
+ this.open = true;
+ }
+
+ @Override
+ public void close() {
+ this.open = false;
+ }
+
+ @Override
+ public void onTick(double duration) {
+ if (this.open) {
+ super.onTick(duration);
+ }
+ }
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java
new file mode 100644
index 0000000..29ab1ef
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperWorldInfoProvider.java
@@ -0,0 +1,105 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper;
+
+import me.lucko.spark.common.platform.world.AbstractChunkInfo;
+import me.lucko.spark.common.platform.world.CountMap;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import org.bukkit.Chunk;
+import org.bukkit.Server;
+import org.bukkit.World;
+import org.bukkit.entity.Entity;
+import org.bukkit.entity.EntityType;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class PaperWorldInfoProvider implements WorldInfoProvider {
+ private final Server server;
+
+ public PaperWorldInfoProvider(Server server) {
+ this.server = server;
+ }
+
+ @Override
+ public CountsResult pollCounts() {
+ int players = this.server.getOnlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (World world : this.server.getWorlds()) {
+ entities += world.getEntityCount();
+ tileEntities += world.getTileEntityCount();
+ chunks += world.getChunkCount();
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<PaperChunkInfo> pollChunks() {
+ ChunksResult<PaperChunkInfo> data = new ChunksResult<>();
+
+ for (World world : this.server.getWorlds()) {
+ Chunk[] chunks = world.getLoadedChunks();
+
+ List<PaperChunkInfo> list = new ArrayList<>(chunks.length);
+ for (Chunk chunk : chunks) {
+ if (chunk != null) {
+ list.add(new PaperChunkInfo(chunk));
+ }
+ }
+
+ data.put(world.getName(), list);
+ }
+
+ return data;
+ }
+
+ static final class PaperChunkInfo extends AbstractChunkInfo<EntityType> {
+ private final CountMap<EntityType> entityCounts;
+
+ PaperChunkInfo(Chunk chunk) {
+ super(chunk.getX(), chunk.getZ());
+
+ this.entityCounts = new CountMap.EnumKeyed<>(EntityType.class);
+ for (Entity entity : chunk.getEntities()) {
+ if (entity != null) {
+ this.entityCounts.increment(entity.getType());
+ }
+ }
+ }
+
+ @Override
+ public CountMap<EntityType> getEntityCounts() {
+ return this.entityCounts;
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public String entityTypeName(EntityType type) {
+ return type.getName();
+ }
+
+ }
+
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java
new file mode 100644
index 0000000..deca337
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/Compatibility.java
@@ -0,0 +1,36 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper.api;
+
+/**
+ * Used to indicate the version of the spark module API supported by the Paper server.
+ *
+ * <p>This allows spark to add/deprecate callback methods in the {@link PaperSparkModule} interface,
+ * but know at runtime whether they will actually be called by Paper.</p>
+ */
+public enum Compatibility {
+
+ /**
+ * Indicates that Paper supports version 1.0 of the spark module API.
+ */
+ VERSION_1_0
+
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java
new file mode 100644
index 0000000..280e4d9
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperClassLookup.java
@@ -0,0 +1,27 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper.api;
+
+public interface PaperClassLookup {
+
+ Class<?> lookup(String className) throws Exception;
+
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java
new file mode 100644
index 0000000..7f4fdd3
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperScheduler.java
@@ -0,0 +1,29 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper.api;
+
+public interface PaperScheduler {
+
+ void executeAsync(Runnable task);
+
+ void executeSync(Runnable task);
+
+}
diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java
new file mode 100644
index 0000000..011841d
--- /dev/null
+++ b/spark-paper/src/main/java/me/lucko/spark/paper/api/PaperSparkModule.java
@@ -0,0 +1,109 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.paper.api;
+
+import me.lucko.spark.paper.PaperSparkPlugin;
+import org.bukkit.Server;
+import org.bukkit.command.CommandSender;
+
+import java.util.List;
+import java.util.logging.Logger;
+
+/**
+ * Spark module for use as a library within the Paper server implementation.
+ *
+ * <p>Paper provides:</p>
+ * <ul>
+ * <li>a {@link Server} instance</li>
+ * <li>a {@link Logger} instance</li>
+ * <li>a {@link PaperScheduler} instance</li>
+ * <li>a {@link PaperClassLookup} instance</li>
+ * </ul>
+ *
+ * <p>Paper is expected to:</p>
+ * <ul>
+ * <li>call {@link #enable()} to enable spark, either immediately or when the server has finished starting</li>
+ * <li>call {@link #disable()} to disable spark when the server is stopping</li>
+ * <li>call {@link #executeCommand(CommandSender, String[])} when the spark command is executed</li>
+ * <li>call {@link #tabComplete(CommandSender, String[])} when the spark command is tab completed</li>
+ * <li>call {@link #onServerTickStart()} at the start of each server tick</li>
+ * <li>call {@link #onServerTickEnd(double)} at the end of each server tick</li>
+ * </ul>
+ *
+ * <p>This interface and the other interfaces in this package define the API between Paper and spark. All other classes
+ * are subject to change, even between minor versions.</p>
+ */
+public interface PaperSparkModule {
+
+ /**
+ * Creates a new PaperSparkModule.
+ *
+ * @param compatibility the Paper/spark compatibility version
+ * @param server the server
+ * @param logger a logger that can be used by spark
+ * @param scheduler the scheduler
+ * @param classLookup a class lookup utility
+ * @return a new PaperSparkModule
+ */
+ static PaperSparkModule create(Compatibility compatibility, Server server, Logger logger, PaperScheduler scheduler, PaperClassLookup classLookup) {
+ return new PaperSparkPlugin(server, logger, scheduler, classLookup);
+ }
+
+ /**
+ * Enables the spark module.
+ */
+ void enable();
+
+ /**
+ * Disables the spark module.
+ */
+ void disable();
+
+ /**
+ * Handles a command execution.
+ *
+ * @param sender the sender
+ * @param args the command arguments
+ */
+ void executeCommand(CommandSender sender, String[] args);
+
+ /**
+ * Handles a tab completion request.
+ *
+ * @param sender the sender
+ * @param args the command arguments
+ * @return a list of completions
+ */
+ List<String> tabComplete(CommandSender sender, String[] args);
+
+ /**
+ * Called by Paper at the start of each server tick.
+ */
+ void onServerTickStart();
+
+ /**
+ * Called by Paper at the end of each server tick.
+ *
+ * @param duration the duration of the tick
+ */
+ void onServerTickEnd(double duration);
+
+}
diff --git a/spark-sponge7/build.gradle b/spark-sponge7/build.gradle
index 796c5a3..aaaab74 100644
--- a/spark-sponge7/build.gradle
+++ b/spark-sponge7/build.gradle
@@ -35,6 +35,8 @@ shadowJar {
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
exclude 'META-INF/versions/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-sponge8/build.gradle b/spark-sponge8/build.gradle
index cdd2330..56222dd 100644
--- a/spark-sponge8/build.gradle
+++ b/spark-sponge8/build.gradle
@@ -39,6 +39,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-velocity/build.gradle b/spark-velocity/build.gradle
index 0507cbd..4ed64e7 100644
--- a/spark-velocity/build.gradle
+++ b/spark-velocity/build.gradle
@@ -37,6 +37,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-velocity4/build.gradle b/spark-velocity4/build.gradle
index 9f0a7de..76c9e38 100644
--- a/spark-velocity4/build.gradle
+++ b/spark-velocity4/build.gradle
@@ -42,6 +42,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {
diff --git a/spark-waterdog/build.gradle b/spark-waterdog/build.gradle
index 6f36bf7..9eb4a53 100644
--- a/spark-waterdog/build.gradle
+++ b/spark-waterdog/build.gradle
@@ -44,6 +44,8 @@ shadowJar {
exclude 'module-info.class'
exclude 'META-INF/maven/**'
exclude 'META-INF/proguard/**'
+ exclude '**/*.proto'
+ exclude '**/*.proto.bin'
}
artifacts {