diff options
Diffstat (limited to 'spark-common')
10 files changed, 110 insertions, 151 deletions
diff --git a/spark-common/build.gradle b/spark-common/build.gradle index bc493f3..ce09d51 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -8,13 +8,10 @@ license { dependencies { api project(':spark-api') - implementation 'com.github.jvm-profiling-tools:async-profiler:v2.7' + implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.1' implementation 'org.ow2.asm:asm:9.1' implementation 'com.google.protobuf:protobuf-javalite:3.15.6' - implementation 'com.squareup.okhttp3:okhttp:3.14.1' - implementation 'com.squareup.okio:okio:1.17.3' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' - implementation 'org.tukaani:xz:1.8' api('net.kyori:adventure-api:4.11.0') { exclude(module: 'adventure-bom') exclude(module: 'checker-qual') @@ -37,13 +34,6 @@ dependencies { compileOnly 'org.checkerframework:checker-qual:3.8.0' } -processResources { - from(sourceSets.main.resources.srcDirs) { - include 'spark/linux/libasyncProfiler.so' - include 'spark/macosx/libasyncProfiler.so' - } -} - protobuf { protoc { if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 0ef4556..f92abf3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -54,8 +54,6 @@ import me.lucko.spark.common.util.TemporaryFiles; import net.kyori.adventure.text.event.ClickEvent; -import okhttp3.OkHttpClient; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -93,7 +91,6 @@ public class SparkPlatform { private final SparkPlugin plugin; private final Configuration configuration; private final String viewerUrl; - private final OkHttpClient httpClient; private final BytebinClient bytebinClient; private final boolean disableResponseBroadcast; private final List<CommandModule> commandModules; @@ -116,9 +113,7 @@ public class SparkPlatform { this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); - - this.httpClient = new OkHttpClient(); - this.bytebinClient = new BytebinClient(this.httpClient, bytebinUrl, "spark-plugin"); + this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin"); this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); @@ -198,11 +193,6 @@ public class SparkPlatform { SparkApi.unregister(); TemporaryFiles.deleteTemporaryFiles(); - - // shutdown okhttp - // see: https://github.com/square/okhttp/issues/4029 - this.httpClient.dispatcher().executorService().shutdown(); - this.httpClient.connectionPool().evictAll(); } public SparkPlugin getPlugin() { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 1030f35..5bd62a8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -36,8 +36,6 @@ import me.lucko.spark.proto.SparkHeapProtos; import net.kyori.adventure.text.event.ClickEvent; -import okhttp3.MediaType; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -54,7 +52,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.GREEN; import static net.kyori.adventure.text.format.NamedTextColor.RED; public class HeapAnalysisModule implements CommandModule { - private static final MediaType SPARK_HEAP_MEDIA_TYPE = MediaType.parse("application/x-spark-heap"); + private static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap"; @Override public void registerCommands(Consumer<Command> consumer) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index fd5cd67..0a80c31 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -44,8 +44,6 @@ import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.event.ClickEvent; -import okhttp3.MediaType; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -66,7 +64,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; public class SamplerModule implements CommandModule { - private static final MediaType SPARK_SAMPLER_MEDIA_TYPE = MediaType.parse("application/x-spark-sampler"); + private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; /** The sampler instance currently running, if any */ private Sampler activeSampler = null; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java index 9d54f50..fd0c413 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java @@ -76,17 +76,29 @@ public interface ThreadDumper { * the game (server/client) thread. */ final class GameThread implements Supplier<ThreadDumper> { + private Supplier<Thread> threadSupplier; private Specific dumper = null; + public GameThread() { + + } + + public GameThread(Supplier<Thread> threadSupplier) { + this.threadSupplier = threadSupplier; + } + @Override public ThreadDumper get() { + if (this.dumper == null) { + setThread(this.threadSupplier.get()); + this.threadSupplier = null; + } + return Objects.requireNonNull(this.dumper, "dumper"); } - public void ensureSetup() { - if (this.dumper == null) { - this.dumper = new Specific(new long[]{Thread.currentThread().getId()}); - } + public void setThread(Thread thread) { + this.dumper = new Specific(new long[]{thread.getId()}); } } @@ -98,6 +110,10 @@ public interface ThreadDumper { private Set<Thread> threads; private Set<String> threadNamesLowerCase; + public Specific(Thread thread) { + this.ids = new long[]{thread.getId()}; + } + public Specific(long[] ids) { this.ids = ids; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index d642a53..ef2c035 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -29,13 +29,16 @@ import me.lucko.spark.common.util.TemporaryFiles; import one.profiler.AsyncProfiler; import one.profiler.Events; +import java.io.BufferedReader; import java.io.InputStream; +import java.io.InputStreamReader; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Locale; import java.util.logging.Level; +import java.util.stream.Collectors; /** * Provides a bridge between spark and async-profiler. @@ -108,8 +111,13 @@ public enum AsyncProfilerAccess { String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT); + if (os.equals("linux") && arch.equals("amd64") && isLinuxMusl()) { + arch = "amd64-musl"; + } + Table<String, String, String> supported = ImmutableTable.<String, String, String>builder() .put("linux", "amd64", "linux/amd64") + .put("linux", "amd64-musl", "linux/amd64-musl") .put("linux", "aarch64", "linux/aarch64") .put("macosx", "amd64", "macos") .put("macosx", "aarch64", "macos") @@ -190,4 +198,20 @@ public enum AsyncProfilerAccess { super("A runtime error occurred whilst loading the native library", cause); } } + + // Checks if the system is using musl instead of glibc + private static boolean isLinuxMusl() { + try { + InputStream stream = new ProcessBuilder("sh", "-c", "ldd `which ls`") + .start() + .getInputStream(); + + BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); + String output = reader.lines().collect(Collectors.joining()); + return output.contains("musl"); // shrug + } catch (Throwable e) { + // ignore + return false; + } + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java deleted file mode 100644 index 8ece3d4..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) <luck@lucko.me> - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package me.lucko.spark.common.util; - -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; - -import java.io.IOException; - -public class AbstractHttpClient { - - /** The http client */ - protected final OkHttpClient okHttp; - - public AbstractHttpClient(OkHttpClient okHttp) { - this.okHttp = okHttp; - } - - protected Response makeHttpRequest(Request request) throws IOException { - Response response = this.okHttp.newCall(request).execute(); - if (!response.isSuccessful()) { - response.close(); - throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message()); - } - return response; - } -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java index c2ca1b1..e69b94e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java @@ -22,73 +22,66 @@ package me.lucko.spark.common.util; import com.google.protobuf.AbstractMessageLite; -import okhttp3.MediaType; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; - -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import java.util.zip.GZIPOutputStream; /** * Utility for posting content to bytebin. */ -public class BytebinClient extends AbstractHttpClient { +public class BytebinClient { /** The bytebin URL */ private final String url; /** The client user agent */ private final String userAgent; - /** - * Creates a new bytebin instance - * - * @param url the bytebin url - * @param userAgent the client user agent string - */ - public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) { - super(okHttpClient); + public BytebinClient(String url, String userAgent) { this.url = url + (url.endsWith("/") ? "" : "/"); this.userAgent = userAgent; } - /** - * POSTs GZIP compressed content to bytebin. - * - * @param buf the compressed content - * @param contentType the type of the content - * @return the key of the resultant content - * @throws IOException if an error occurs - */ - public Content postContent(byte[] buf, MediaType contentType) throws IOException { - RequestBody body = RequestBody.create(contentType, buf); + private Content postContent(String contentType, Consumer<OutputStream> consumer) throws IOException { + URL url = new URL(this.url + "post"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + try { + connection.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(10)); + connection.setReadTimeout((int) TimeUnit.SECONDS.toMillis(10)); + + connection.setDoOutput(true); + connection.setRequestMethod("POST"); + connection.setRequestProperty("Content-Type", contentType); + connection.setRequestProperty("User-Agent", this.userAgent); + connection.setRequestProperty("Content-Encoding", "gzip"); - Request.Builder requestBuilder = new Request.Builder() - .url(this.url + "post") - .header("User-Agent", this.userAgent) - .header("Content-Encoding", "gzip"); + connection.connect(); + try (OutputStream output = connection.getOutputStream()) { + consumer.accept(output); + } - Request request = requestBuilder.post(body).build(); - try (Response response = makeHttpRequest(request)) { - String key = response.header("Location"); + String key = connection.getHeaderField("Location"); if (key == null) { throw new IllegalStateException("Key not returned"); } return new Content(key); + } finally { + connection.getInputStream().close(); + connection.disconnect(); } } - public Content postContent(AbstractMessageLite<?, ?> proto, MediaType contentType) throws IOException { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (OutputStream out = new GZIPOutputStream(byteOut)) { - proto.writeTo(out); - } catch (IOException e) { - throw new RuntimeException(e); - } - return postContent(byteOut.toByteArray(), contentType); + public Content postContent(AbstractMessageLite<?, ?> proto, String contentType) throws IOException { + return postContent(contentType, outputStream -> { + try (OutputStream out = new GZIPOutputStream(outputStream)) { + proto.writeTo(out); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } public static final class Content { diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java index 9295c25..c8100e1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java @@ -20,10 +20,6 @@ package me.lucko.spark.common.util; -import org.tukaani.xz.LZMA2Options; -import org.tukaani.xz.LZMAOutputStream; -import org.tukaani.xz.XZOutputStream; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -46,35 +42,35 @@ public enum Compression { } return compressedFile; } - }, - XZ { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } - }, - LZMA { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } }; + // XZ { + // @Override + // public Path compress(Path file, LongConsumer progressHandler) throws IOException { + // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); + // try (InputStream in = Files.newInputStream(file)) { + // try (OutputStream out = Files.newOutputStream(compressedFile)) { + // try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { + // copy(in, compressionOut, progressHandler); + // } + // } + // } + // return compressedFile; + // } + // }, + // LZMA { + // @Override + // public Path compress(Path file, LongConsumer progressHandler) throws IOException { + // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); + // try (InputStream in = Files.newInputStream(file)) { + // try (OutputStream out = Files.newOutputStream(compressedFile)) { + // try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { + // copy(in, compressionOut, progressHandler); + // } + // } + // } + // return compressedFile; + // } + // }; public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException; diff --git a/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so Binary files differnew file mode 100755 index 0000000..0a08f7c --- /dev/null +++ b/spark-common/src/main/resources/spark/linux/amd64-musl/libasyncProfiler.so |