diff options
author | lucko <git@lucko.me> | 2025-01-12 21:59:55 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2025-01-12 21:59:55 +0000 |
commit | 273f9bc8aa6501d7de908f12e26dc68657304704 (patch) | |
tree | 25abc4bea5da94746c4fea3b1999bd998ba5af7a /spark-standalone-agent/src/main/java/me/lucko/spark | |
parent | 336102f88b38900b60888ab85ea13b388d4fe0dc (diff) | |
download | spark-273f9bc8aa6501d7de908f12e26dc68657304704.tar.gz spark-273f9bc8aa6501d7de908f12e26dc68657304704.tar.bz2 spark-273f9bc8aa6501d7de908f12e26dc68657304704.zip |
Implement standalone profiling agent (#480)
Diffstat (limited to 'spark-standalone-agent/src/main/java/me/lucko/spark')
7 files changed, 607 insertions, 0 deletions
diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java new file mode 100644 index 0000000..92ace6b --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneCommandSender.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone; + +import me.lucko.spark.common.command.sender.AbstractCommandSender; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer; + +import java.util.UUID; + +public class StandaloneCommandSender extends AbstractCommandSender<StandaloneCommandSender.Output> { + public static final StandaloneCommandSender NO_OP = new StandaloneCommandSender(msg -> {}); + public static final StandaloneCommandSender SYSTEM_OUT = new StandaloneCommandSender(System.out::println); + + public StandaloneCommandSender(Output output) { + super(output); + } + + @Override + public String getName() { + return "Standalone"; + } + + @Override + public UUID getUniqueId() { + return null; + } + + @Override + public void sendMessage(Component message) { + this.delegate.sendMessage(ANSIComponentSerializer.ansi().serialize(message)); + } + + @Override + public boolean hasPermission(String permission) { + return true; + } + + public interface Output { + void sendMessage(String message); + } + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java new file mode 100644 index 0000000..48b8d21 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandalonePlatformInfo.java @@ -0,0 +1,84 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import me.lucko.spark.common.platform.PlatformInfo; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; + +public class StandalonePlatformInfo implements PlatformInfo { + private final String version; + private final String minecraftVersion; + + public StandalonePlatformInfo(String version) { + this.version = version; + this.minecraftVersion = detectVanillaMinecraftVersion(); + } + + @Override + public Type getType() { + return Type.APPLICATION; + } + + @Override + public String getName() { + return "Standalone"; + } + + @Override + public String getBrand() { + return this.minecraftVersion != null ? "Vanilla Minecraft" : "Unknown"; + } + + @Override + public String getVersion() { + return this.version; + } + + @Override + public String getMinecraftVersion() { + return this.minecraftVersion; + } + + private static String detectVanillaMinecraftVersion() { + try { + Class<?> clazz = Class.forName("net.minecraft.bundler.Main"); + URL resource = clazz.getClassLoader().getResource("version.json"); + if (resource != null) { + try (InputStream stream = resource.openStream(); InputStreamReader reader = new InputStreamReader(stream)) { + JsonObject obj = new Gson().fromJson(reader, JsonObject.class); + JsonElement name = obj.get("name"); + if (name.isJsonPrimitive() && name.getAsJsonPrimitive().isString()) { + return name.getAsString(); + } + } + } + } catch (Exception e) { + // ignore + } + return null; + } +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java new file mode 100644 index 0000000..2820b85 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkAgent.java @@ -0,0 +1,98 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone; + +import com.sun.tools.attach.VirtualMachine; +import com.sun.tools.attach.VirtualMachineDescriptor; + +import java.lang.instrument.Instrumentation; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class StandaloneSparkAgent { + + // Entry point when the agent is run as a normal jar + public static void main(String[] args) { + if (args.length == 0) { + System.err.println("Usage: java -jar spark-standalone-agent.jar <pid> [args...]"); + + List<VirtualMachineDescriptor> vms = VirtualMachine.list(); + if (vms.isEmpty()) { + return; + } + + System.out.println("Current JVM processes:"); + for (VirtualMachineDescriptor vm : vms) { + System.out.println(" pid=" + vm.id() + " (" + vm.displayName() + ")"); + } + + return; + } + + try { + VirtualMachine vm = VirtualMachine.attach(args[0]); + String agentPath = StandaloneSparkAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + String arguments = String.join(",", Arrays.copyOfRange(args, 1, args.length)); + vm.loadAgent(agentPath, arguments); + System.out.println("[spark] Agent loaded successfully."); + vm.detach(); + } catch (Throwable e) { + System.err.println("Failed to attach agent to process " + args[0]); + e.printStackTrace(System.err); + } + } + + // Entry point when the agent is loaded via -javaagent + public static void premain(String agentArgs, Instrumentation instrumentation) { + System.out.println("[spark] Loading standalone agent... (premain)"); + init(agentArgs, instrumentation); + } + + // Entry point when the agent is loaded via VirtualMachine#loadAgent + public static void agentmain(String agentArgs, Instrumentation instrumentation) { + System.out.println("[spark] Loading standalone agent... (agentmain)"); + init(agentArgs, instrumentation); + } + + private static void init(String agentArgs, Instrumentation instrumentation) { + try { + Map<String, String> arguments = new HashMap<>(); + if (agentArgs == null) { + agentArgs = ""; + } + for (String arg : agentArgs.split(",")) { + if (arg.contains("=")) { + String[] parts = arg.split("=", 2); + arguments.put(parts[0], parts[1]); + } else { + arguments.put(arg, "true"); + } + } + new StandaloneSparkPlugin(instrumentation, arguments); + } catch (Throwable e) { + System.err.println("[spark] Loading failed :("); + e.printStackTrace(System.err); + } + } + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java new file mode 100644 index 0000000..dd59f99 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java @@ -0,0 +1,160 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; +import me.lucko.spark.common.command.CommandResponseHandler; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.SparkThreadFactory; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; +import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder; +import me.lucko.spark.standalone.remote.RemoteInterface; +import me.lucko.spark.standalone.remote.SshRemoteInterface; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.format.NamedTextColor; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.lang.instrument.Instrumentation; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.logging.Level; +import java.util.stream.Stream; + +public class StandaloneSparkPlugin implements SparkPlugin { + private final Instrumentation instrumentation; + private final Set<StandaloneCommandSender> senders; + private final ScheduledExecutorService scheduler; + private final SparkPlatform platform; + + private final RemoteInterface remoteInterface; + + public StandaloneSparkPlugin(Instrumentation instrumentation, Map<String, String> arguments) { + this.instrumentation = instrumentation; + this.senders = ConcurrentHashMap.newKeySet(); + this.senders.add(StandaloneCommandSender.SYSTEM_OUT); + this.scheduler = Executors.newScheduledThreadPool(4, new SparkThreadFactory()); + this.platform = new SparkPlatform(this); + this.platform.enable(); + this.remoteInterface = new SshRemoteInterface(this, Integer.parseInt(arguments.getOrDefault("port", "0"))); + + if (arguments.containsKey("start")) { + execute(new String[]{"profiler", "start"}, StandaloneCommandSender.SYSTEM_OUT).join(); + + if (arguments.containsKey("open")) { + execute(new String[]{"profiler", "open"}, StandaloneCommandSender.SYSTEM_OUT).join(); + } + } + } + + public void disable() { + this.platform.disable(); + this.scheduler.shutdown(); + this.remoteInterface.close(); + } + + public CompletableFuture<Void> execute(String[] args, StandaloneCommandSender sender) { + return this.platform.executeCommand(sender, args); + } + + public List<String> suggest(String[] args, StandaloneCommandSender sender) { + return this.platform.tabCompleteCommand(sender, args); + } + + public void addSender(StandaloneCommandSender sender) { + this.senders.add(sender); + } + + public void removeSender(StandaloneCommandSender sender) { + this.senders.remove(sender); + } + + public CommandResponseHandler createResponseHandler(StandaloneCommandSender sender) { + return new CommandResponseHandler(this.platform, sender); + } + + @Override + public String getVersion() { + return "@version@"; + } + + @Override + public Path getPluginDirectory() { + return Paths.get("spark"); + } + + @Override + public String getCommandName() { + return "spark"; + } + + @Override + public Stream<StandaloneCommandSender> getCommandSenders() { + return this.senders.stream(); + } + + @Override + public void executeAsync(Runnable task) { + this.scheduler.execute(task); + } + + @Override + public void log(Level level, String msg) { + log(level, msg, null); + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + CommandResponseHandler resp = createResponseHandler(StandaloneCommandSender.SYSTEM_OUT); + if (level.intValue() >= 900 || throwable != null) { // severe/warning + resp.replyPrefixed(Component.text(msg, NamedTextColor.RED)); + if (throwable != null) { + StringWriter stringWriter = new StringWriter(); + throwable.printStackTrace(new PrintWriter(stringWriter)); + resp.replyPrefixed(Component.text(stringWriter.toString(), NamedTextColor.YELLOW)); + } + } else { + resp.replyPrefixed(Component.text(msg)); + } + } + + @Override + public PlatformInfo getPlatformInfo() { + return new StandalonePlatformInfo(getVersion()); + } + + @Override + public ClassFinder createClassFinder() { + return ClassFinder.combining( + new InstrumentationClassFinder(this.instrumentation), + FallbackClassFinder.INSTANCE + ); + } +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java new file mode 100644 index 0000000..1c03aa8 --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/AbstractRemoteInterface.java @@ -0,0 +1,98 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone.remote; + +import me.lucko.spark.common.command.CommandResponseHandler; +import me.lucko.spark.standalone.StandaloneCommandSender; +import me.lucko.spark.standalone.StandaloneSparkPlugin; +import net.kyori.adventure.text.Component; +import org.jline.reader.Candidate; +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.reader.UserInterruptException; +import org.jline.terminal.Terminal; +import org.jline.terminal.impl.AbstractTerminal; + +public abstract class AbstractRemoteInterface implements RemoteInterface { + + protected final StandaloneSparkPlugin spark; + + public AbstractRemoteInterface(StandaloneSparkPlugin spark) { + this.spark = spark; + } + + private static String stripSlashSpark(String command) { + if (command.startsWith("/")) { + command = command.substring(1); + } + if (command.startsWith("spark ")) { + command = command.substring(6); + } + return command; + } + + public void processSession(Terminal terminal, Runnable closer) { + LineReader reader = LineReaderBuilder.builder() + .terminal(terminal) + .completer((lineReader, parsedLine, list) -> { + String command = stripSlashSpark(parsedLine.line()); + String[] args = command.split(" ", -1); + for (String suggestion : this.spark.suggest(args, StandaloneCommandSender.NO_OP)) { + list.add(new Candidate(suggestion)); + } + }) + .build(); + + StandaloneCommandSender sender = new StandaloneCommandSender(reader::printAbove); + + this.spark.addSender(sender); + ((AbstractTerminal) terminal).setOnClose(() -> this.spark.removeSender(sender)); + + CommandResponseHandler resp = this.spark.createResponseHandler(sender); + resp.replyPrefixed(Component.text("spark remote interface - " + this.spark.getVersion())); + resp.replyPrefixed(Component.text("Use '/spark' commands as usual, or run 'exit' to exit.")); + + while (true) { + try { + String line = reader.readLine("> "); + if (line.trim().isEmpty()) { + continue; + } + + String command = stripSlashSpark(line); + if (command.equals("exit")) { + closer.run(); + return; + } + + this.spark.execute(command.split(" ", 0), sender); + + } catch (UserInterruptException e) { + // ignore + } catch (EndOfFileException e) { + this.spark.removeSender(sender); + return; + } + } + } + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java new file mode 100644 index 0000000..ce6a8dc --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/RemoteInterface.java @@ -0,0 +1,28 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone.remote; + +public interface RemoteInterface extends AutoCloseable { + + @Override + void close(); + +} diff --git a/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java new file mode 100644 index 0000000..025dadb --- /dev/null +++ b/spark-standalone-agent/src/main/java/me/lucko/spark/standalone/remote/SshRemoteInterface.java @@ -0,0 +1,78 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.standalone.remote; + +import me.lucko.spark.standalone.StandaloneSparkPlugin; +import org.apache.sshd.server.SshServer; +import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider; +import org.jline.builtins.ssh.ShellFactoryImpl; + +import java.io.IOException; +import java.security.MessageDigest; +import java.security.SecureRandom; +import java.util.logging.Level; + +public class SshRemoteInterface extends AbstractRemoteInterface { + private final String password; + private final SshServer sshd; + + public SshRemoteInterface(StandaloneSparkPlugin spark, int port) { + super(spark); + this.password = new SecureRandom().ints(48, 122) + .filter(i -> (i <= 57 || i >= 65) && (i <= 90 || i >= 97)) + .limit(32) + .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) + .toString(); + + this.sshd = SshServer.setUpDefaultServer(); + if (port > 0) { + this.sshd.setPort(port); + } + this.sshd.setKeyPairProvider(new SimpleGeneratorHostKeyProvider()); + this.sshd.setPasswordAuthenticator((username, password, session) -> "spark".equals(username) && MessageDigest.isEqual(this.password.getBytes(), password.getBytes())); + this.sshd.setShellFactory(new ShellFactoryImpl(shellParams -> this.processSession(shellParams.getTerminal(), shellParams.getCloser()))); + + new Thread(() -> { + try { + this.start(); + } catch (IOException e) { + this.spark.log(Level.SEVERE, "Error whilst starting SSH server", e); + } + }, "spark-ssh-server").start(); + } + + private void start() throws IOException { + this.sshd.start(); + this.spark.log(Level.INFO, "SSH Server started on port " + this.sshd.getPort()); + this.spark.log(Level.INFO, "Connect using: ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -p " + this.sshd.getPort() + " spark@localhost"); + this.spark.log(Level.INFO, "When prompted, enter the password: " + this.password); + } + + @Override + public void close() { + try { + this.sshd.stop(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + +}
\ No newline at end of file |