aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--settings.gradle1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java5
-rw-r--r--spark-common/src/main/proto/spark/spark.proto1
-rw-r--r--spark-standalone-agent/build.gradle62
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java61
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java84
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java98
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java160
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java98
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java28
-rw-r--r--spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java78
14 files changed, 679 insertions, 4 deletions
diff --git a/settings.gradle b/settings.gradle
index d3e1b45..d689d80 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -31,5 +31,6 @@ include (
'spark-neoforge',
'spark-paper',
'spark-sponge',
+ 'spark-standalone-agent',
'spark-velocity',
)
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 0466e75..5e4e053 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -369,7 +369,7 @@ public class SparkPlatform {
try {
executeCommand0(sender, args);
future.complete(null);
- } catch (Exception e) {
+ } catch (Throwable e) {
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command", e);
future.completeExceptionally(e);
} finally {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
index 1d71d53..96549a1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformInfo.java
@@ -48,7 +48,8 @@ public interface PlatformInfo {
enum Type {
SERVER(PlatformMetadata.Type.SERVER),
CLIENT(PlatformMetadata.Type.CLIENT),
- PROXY(PlatformMetadata.Type.PROXY);
+ PROXY(PlatformMetadata.Type.PROXY),
+ APPLICATION(PlatformMetadata.Type.APPLICATION);
private final PlatformMetadata.Type type;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index 93bd59d..d9fbff2 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -219,7 +219,7 @@ public class PlatformStatisticsProvider {
List<CommandSender> senders = this.platform.getPlugin().getCommandSenders().collect(Collectors.toList());
PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType();
- if (platformType != PlatformInfo.Type.CLIENT) {
+ if (platformType == PlatformInfo.Type.SERVER || platformType == PlatformInfo.Type.PROXY) {
long playerCount = senders.size() - 1; // includes console
builder.setPlayerCount(playerCount);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java
index 5f06d64..1381d4d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/classfinder/InstrumentationClassFinder.java
@@ -57,7 +57,10 @@ public class InstrumentationClassFinder implements ClassFinder {
private final Map<String, Class<?>> classes = new HashMap<>();
public InstrumentationClassFinder(SparkPlugin plugin) {
- Instrumentation instrumentation = loadInstrumentation(plugin);
+ this(loadInstrumentation(plugin));
+ }
+
+ public InstrumentationClassFinder(Instrumentation instrumentation) {
if (instrumentation == null) {
return;
}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index f8d7988..4b95a4a 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -21,6 +21,7 @@ message PlatformMetadata {
SERVER = 0;
CLIENT = 1;
PROXY = 2;
+ APPLICATION = 3;
}
}
diff --git a/spark-standalone-agent/build.gradle b/spark-standalone-agent/build.gradle
new file mode 100644
index 0000000..d0efd2b
--- /dev/null
+++ b/spark-standalone-agent/build.gradle
@@ -0,0 +1,62 @@
+plugins {
+ id 'net.kyori.blossom' version '1.3.0'
+ id 'com.gradleup.shadow' version '8.3.0'
+}
+
+dependencies {
+ implementation project(':spark-common')
+ implementation('net.kyori:adventure-text-serializer-ansi:4.17.0') {
+ exclude(module: 'adventure-bom')
+ exclude(module: 'adventure-api')
+ exclude(module: 'annotations')
+ }
+ implementation 'org.slf4j:slf4j-simple:2.0.16'
+ implementation 'com.google.code.gson:gson:2.9.0'
+ implementation 'com.google.guava:guava:31.1-jre'
+
+ implementation 'org.jline:jline-remote-ssh:3.28.0'
+ implementation 'org.apache.sshd:sshd-core:2.14.0'
+}
+
+tasks.withType(JavaCompile).configureEach {
+ options.compilerArgs += ['--add-modules', 'jdk.attach']
+ options.release = 11
+}
+
+blossom {
+ replaceTokenIn('src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java')
+ replaceToken '@version@', project.pluginVersion
+}
+
+jar {
+ manifest {
+ attributes(
+ 'Main-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent',
+ 'Agent-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent',
+ 'Premain-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent'
+ )
+ }
+}
+
+shadowJar {
+ archiveFileName = "spark-${project.pluginVersion}-standalone-agent.jar"
+
+ relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
+ relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
+ relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option'
+ relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
+ relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
+ relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
+ relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
+ relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks'
+ relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws'
+ relocate 'com.google.gson', 'me.lucko.spark.lib.gson'
+ relocate 'com.google.common', 'me.lucko.spark.lib.guava'
+
+ project.applyExcludes(delegate)
+}
+
+artifacts {
+ archives shadowJar
+ shadow shadowJar
+} \ No newline at end of file
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java
new file mode 100644
index 0000000..92ace6b
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java
@@ -0,0 +1,61 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone;
+
+import me.lucko.spark.common.command.sender.AbstractCommandSender;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer;
+
+import java.util.UUID;
+
+public class StandaloneCommandSender extends AbstractCommandSender<StandaloneCommandSender.Output> {
+ public static final StandaloneCommandSender NO_OP = new StandaloneCommandSender(msg -> {});
+ public static final StandaloneCommandSender SYSTEM_OUT = new StandaloneCommandSender(System.out::println);
+
+ public StandaloneCommandSender(Output output) {
+ super(output);
+ }
+
+ @Override
+ public String getName() {
+ return "Standalone";
+ }
+
+ @Override
+ public UUID getUniqueId() {
+ return null;
+ }
+
+ @Override
+ public void sendMessage(Component message) {
+ this.delegate.sendMessage(ANSIComponentSerializer.ansi().serialize(message));
+ }
+
+ @Override
+ public boolean hasPermission(String permission) {
+ return true;
+ }
+
+ public interface Output {
+ void sendMessage(String message);
+ }
+
+}
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java
new file mode 100644
index 0000000..48b8d21
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java
@@ -0,0 +1,84 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import me.lucko.spark.common.platform.PlatformInfo;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.URL;
+
+public class StandalonePlatformInfo implements PlatformInfo {
+ private final String version;
+ private final String minecraftVersion;
+
+ public StandalonePlatformInfo(String version) {
+ this.version = version;
+ this.minecraftVersion = detectVanillaMinecraftVersion();
+ }
+
+ @Override
+ public Type getType() {
+ return Type.APPLICATION;
+ }
+
+ @Override
+ public String getName() {
+ return "Standalone";
+ }
+
+ @Override
+ public String getBrand() {
+ return this.minecraftVersion != null ? "Vanilla Minecraft" : "Unknown";
+ }
+
+ @Override
+ public String getVersion() {
+ return this.version;
+ }
+
+ @Override
+ public String getMinecraftVersion() {
+ return this.minecraftVersion;
+ }
+
+ private static String detectVanillaMinecraftVersion() {
+ try {
+ Class<?> clazz = Class.forName("net.minecraft.bundler.Main");
+ URL resource = clazz.getClassLoader().getResource("version.json");
+ if (resource != null) {
+ try (InputStream stream = resource.openStream(); InputStreamReader reader = new InputStreamReader(stream)) {
+ JsonObject obj = new Gson().fromJson(reader, JsonObject.class);
+ JsonElement name = obj.get("name");
+ if (name.isJsonPrimitive() && name.getAsJsonPrimitive().isString()) {
+ return name.getAsString();
+ }
+ }
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+ return null;
+ }
+}
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java
new file mode 100644
index 0000000..2820b85
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java
@@ -0,0 +1,98 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone;
+
+import com.sun.tools.attach.VirtualMachine;
+import com.sun.tools.attach.VirtualMachineDescriptor;
+
+import java.lang.instrument.Instrumentation;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class StandaloneSparkAgent {
+
+ // Entry point when the agent is run as a normal jar
+ public static void main(String[] args) {
+ if (args.length == 0) {
+ System.err.println("Usage: java -jar spark-standalone-agent.jar <pid> [args...]");
+
+ List<VirtualMachineDescriptor> vms = VirtualMachine.list();
+ if (vms.isEmpty()) {
+ return;
+ }
+
+ System.out.println("Current JVM processes:");
+ for (VirtualMachineDescriptor vm : vms) {
+ System.out.println(" pid=" + vm.id() + " (" + vm.displayName() + ")");
+ }
+
+ return;
+ }
+
+ try {
+ VirtualMachine vm = VirtualMachine.attach(args[0]);
+ String agentPath = StandaloneSparkAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath();
+ String arguments = String.join(",", Arrays.copyOfRange(args, 1, args.length));
+ vm.loadAgent(agentPath, arguments);
+ System.out.println("[spark] Agent loaded successfully.");
+ vm.detach();
+ } catch (Throwable e) {
+ System.err.println("Failed to attach agent to process " + args[0]);
+ e.printStackTrace(System.err);
+ }
+ }
+
+ // Entry point when the agent is loaded via -javaagent
+ public static void premain(String agentArgs, Instrumentation instrumentation) {
+ System.out.println("[spark] Loading standalone agent... (premain)");
+ init(agentArgs, instrumentation);
+ }
+
+ // Entry point when the agent is loaded via VirtualMachine#loadAgent
+ public static void agentmain(String agentArgs, Instrumentation instrumentation) {
+ System.out.println("[spark] Loading standalone agent... (agentmain)");
+ init(agentArgs, instrumentation);
+ }
+
+ private static void init(String agentArgs, Instrumentation instrumentation) {
+ try {
+ Map<String, String> arguments = new HashMap<>();
+ if (agentArgs == null) {
+ agentArgs = "";
+ }
+ for (String arg : agentArgs.split(",")) {
+ if (arg.contains("=")) {
+ String[] parts = arg.split("=", 2);
+ arguments.put(parts[0], parts[1]);
+ } else {
+ arguments.put(arg, "true");
+ }
+ }
+ new StandaloneSparkPlugin(instrumentation, arguments);
+ } catch (Throwable e) {
+ System.err.println("[spark] Loading failed :(");
+ e.printStackTrace(System.err);
+ }
+ }
+
+}
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java
new file mode 100644
index 0000000..dd59f99
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java
@@ -0,0 +1,160 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.common.command.CommandResponseHandler;
+import me.lucko.spark.common.platform.PlatformInfo;
+import me.lucko.spark.common.util.SparkThreadFactory;
+import me.lucko.spark.common.util.classfinder.ClassFinder;
+import me.lucko.spark.common.util.classfinder.FallbackClassFinder;
+import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder;
+import me.lucko.spark.standalone.remote.RemoteInterface;
+import me.lucko.spark.standalone.remote.SshRemoteInterface;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.NamedTextColor;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.lang.instrument.Instrumentation;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.logging.Level;
+import java.util.stream.Stream;
+
+public class StandaloneSparkPlugin implements SparkPlugin {
+ private final Instrumentation instrumentation;
+ private final Set<StandaloneCommandSender> senders;
+ private final ScheduledExecutorService scheduler;
+ private final SparkPlatform platform;
+
+ private final RemoteInterface remoteInterface;
+
+ public StandaloneSparkPlugin(Instrumentation instrumentation, Map<String, String> arguments) {
+ this.instrumentation = instrumentation;
+ this.senders = ConcurrentHashMap.newKeySet();
+ this.senders.add(StandaloneCommandSender.SYSTEM_OUT);
+ this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory());
+ this.platform = new SparkPlatform(this);
+ this.platform.enable();
+ this.remoteInterface = new SshRemoteInterface(this, Integer.parseInt(arguments.getOrDefault("port", "0")));
+
+ if (arguments.containsKey("start")) {
+ execute(new String[]{"profiler", "start"}, StandaloneCommandSender.SYSTEM_OUT).join();
+
+ if (arguments.containsKey("open")) {
+ execute(new String[]{"profiler", "open"}, StandaloneCommandSender.SYSTEM_OUT).join();
+ }
+ }
+ }
+
+ public void disable() {
+ this.platform.disable();
+ this.scheduler.shutdown();
+ this.remoteInterface.close();
+ }
+
+ public CompletableFuture<Void> execute(String[] args, StandaloneCommandSender sender) {
+ return this.platform.executeCommand(sender, args);
+ }
+
+ public List<String> suggest(String[] args, StandaloneCommandSender sender) {
+ return this.platform.tabCompleteCommand(sender, args);
+ }
+
+ public void addSender(StandaloneCommandSender sender) {
+ this.senders.add(sender);
+ }
+
+ public void removeSender(StandaloneCommandSender sender) {
+ this.senders.remove(sender);
+ }
+
+ public CommandResponseHandler createResponseHandler(StandaloneCommandSender sender) {
+ return new CommandResponseHandler(this.platform, sender);
+ }
+
+ @Override
+ public String getVersion() {
+ return "@version@";
+ }
+
+ @Override
+ public Path getPluginDirectory() {
+ return Paths.get("spark");
+ }
+
+ @Override
+ public String getCommandName() {
+ return "spark";
+ }
+
+ @Override
+ public Stream<StandaloneCommandSender> getCommandSenders() {
+ return this.senders.stream();
+ }
+
+ @Override
+ public void executeAsync(Runnable task) {
+ this.scheduler.execute(task);
+ }
+
+ @Override
+ public void log(Level level, String msg) {
+ log(level, msg, null);
+ }
+
+ @Override
+ public void log(Level level, String msg, Throwable throwable) {
+ CommandResponseHandler resp = createResponseHandler(StandaloneCommandSender.SYSTEM_OUT);
+ if (level.intValue() >= 900 || throwable != null) { // severe/warning
+ resp.replyPrefixed(Component.text(msg, NamedTextColor.RED));
+ if (throwable != null) {
+ StringWriter stringWriter = new StringWriter();
+ throwable.printStackTrace(new PrintWriter(stringWriter));
+ resp.replyPrefixed(Component.text(stringWriter.toString(), NamedTextColor.YELLOW));
+ }
+ } else {
+ resp.replyPrefixed(Component.text(msg));
+ }
+ }
+
+ @Override
+ public PlatformInfo getPlatformInfo() {
+ return new StandalonePlatformInfo(getVersion());
+ }
+
+ @Override
+ public ClassFinder createClassFinder() {
+ return ClassFinder.combining(
+ new InstrumentationClassFinder(this.instrumentation),
+ FallbackClassFinder.INSTANCE
+ );
+ }
+}
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java
new file mode 100644
index 0000000..1c03aa8
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java
@@ -0,0 +1,98 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone.remote;
+
+import me.lucko.spark.common.command.CommandResponseHandler;
+import me.lucko.spark.standalone.StandaloneCommandSender;
+import me.lucko.spark.standalone.StandaloneSparkPlugin;
+import net.kyori.adventure.text.Component;
+import org.jline.reader.Candidate;
+import org.jline.reader.EndOfFileException;
+import org.jline.reader.LineReader;
+import org.jline.reader.LineReaderBuilder;
+import org.jline.reader.UserInterruptException;
+import org.jline.terminal.Terminal;
+import org.jline.terminal.impl.AbstractTerminal;
+
+public abstract class AbstractRemoteInterface implements RemoteInterface {
+
+ protected final StandaloneSparkPlugin spark;
+
+ public AbstractRemoteInterface(StandaloneSparkPlugin spark) {
+ this.spark = spark;
+ }
+
+ private static String stripSlashSpark(String command) {
+ if (command.startsWith("/")) {
+ command = command.substring(1);
+ }
+ if (command.startsWith("spark ")) {
+ command = command.substring(6);
+ }
+ return command;
+ }
+
+ public void processSession(Terminal terminal, Runnable closer) {
+ LineReader reader = LineReaderBuilder.builder()
+ .terminal(terminal)
+ .completer((lineReader, parsedLine, list) -> {
+ String command = stripSlashSpark(parsedLine.line());
+ String[] args = command.split(" ", -1);
+ for (String suggestion : this.spark.suggest(args, StandaloneCommandSender.NO_OP)) {
+ list.add(new Candidate(suggestion));
+ }
+ })
+ .build();
+
+ StandaloneCommandSender sender = new StandaloneCommandSender(reader::printAbove);
+
+ this.spark.addSender(sender);
+ ((AbstractTerminal) terminal).setOnClose(() -> this.spark.removeSender(sender));
+
+ CommandResponseHandler resp = this.spark.createResponseHandler(sender);
+ resp.replyPrefixed(Component.text("spark remote interface - " + this.spark.getVersion()));
+ resp.replyPrefixed(Component.text("Use '/spark' commands as usual, or run 'exit' to exit."));
+
+ while (true) {
+ try {
+ String line = reader.readLine("> ");
+ if (line.trim().isEmpty()) {
+ continue;
+ }
+
+ String command = stripSlashSpark(line);
+ if (command.equals("exit")) {
+ closer.run();
+ return;
+ }
+
+ this.spark.execute(command.split(" ", 0), sender);
+
+ } catch (UserInterruptException e) {
+ // ignore
+ } catch (EndOfFileException e) {
+ this.spark.removeSender(sender);
+ return;
+ }
+ }
+ }
+
+}
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java
new file mode 100644
index 0000000..ce6a8dc
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java
@@ -0,0 +1,28 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone.remote;
+
+public interface RemoteInterface extends AutoCloseable {
+
+ @Override
+ void close();
+
+}
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java
new file mode 100644
index 0000000..025dadb
--- /dev/null
+++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java
@@ -0,0 +1,78 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.standalone.remote;
+
+import me.lucko.spark.standalone.StandaloneSparkPlugin;
+import org.apache.sshd.server.SshServer;
+import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider;
+import org.jline.builtins.ssh.ShellFactoryImpl;
+
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.security.SecureRandom;
+import java.util.logging.Level;
+
+public class SshRemoteInterface extends AbstractRemoteInterface {
+ private final String password;
+ private final SshServer sshd;
+
+ public SshRemoteInterface(StandaloneSparkPlugin spark, int port) {
+ super(spark);
+ this.password = new SecureRandom().ints(48, 122)
+ .filter(i -> (i <= 57 || i >= 65) && (i <= 90 || i >= 97))
+ .limit(32)
+ .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
+ .toString();
+
+ this.sshd = SshServer.setUpDefaultServer();
+ if (port > 0) {
+ this.sshd.setPort(port);
+ }
+ this.sshd.setKeyPairProvider(new SimpleGeneratorHostKeyProvider());
+ this.sshd.setPasswordAuthenticator((username, password, session) -> "spark".equals(username) && MessageDigest.isEqual(this.password.getBytes(), password.getBytes()));
+ this.sshd.setShellFactory(new ShellFactoryImpl(shellParams -> this.processSession(shellParams.getTerminal(), shellParams.getCloser())));
+
+ new Thread(() -> {
+ try {
+ this.start();
+ } catch (IOException e) {
+ this.spark.log(Level.SEVERE, "Error whilst starting SSH server", e);
+ }
+ }, "spark-ssh-server").start();
+ }
+
+ private void start() throws IOException {
+ this.sshd.start();
+ this.spark.log(Level.INFO, "SSH Server started on port " + this.sshd.getPort());
+ this.spark.log(Level.INFO, "Connect using: ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -p " + this.sshd.getPort() + " spark@localhost");
+ this.spark.log(Level.INFO, "When prompted, enter the password: " + this.password);
+ }
+
+ @Override
+ public void close() {
+ try {
+ this.sshd.stop();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+} \ No newline at end of file