diff options
author | lucko <git@lucko.me> | 2022-12-27 09:17:54 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-12-27 09:17:54 +0000 |
commit | e5b278047ccb7bc6b301d787474c51d162911867 (patch) | |
tree | 11bba64e8f28ce8b83adc05252b75f17e2ccbf6a /spark-common/src/main/java/me/lucko/spark/common/util | |
parent | 4a16a1a2f4eb09f706b4a541e3d31618de29420b (diff) | |
parent | 1075665def4a41cf0064255a6da1d1a652f5d473 (diff) | |
download | spark-e5b278047ccb7bc6b301d787474c51d162911867.tar.gz spark-e5b278047ccb7bc6b301d787474c51d162911867.tar.bz2 spark-e5b278047ccb7bc6b301d787474c51d162911867.zip |
Merge pull request #284 from embeddedt/forge-1.7.10
Align 1.7.10 with master
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/util')
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java | 75 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java | 241 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/Compression.java | 60 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java | 70 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java | 20 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java (renamed from spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java) | 33 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java | 84 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java | 191 | ||||
-rw-r--r-- | spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java | 81 |
9 files changed, 423 insertions, 432 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java index c2ca1b1..e69b94e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/BytebinClient.java @@ -22,73 +22,66 @@ package me.lucko.spark.common.util; import com.google.protobuf.AbstractMessageLite; -import okhttp3.MediaType; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; - -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import java.util.zip.GZIPOutputStream; /** * Utility for posting content to bytebin. */ -public class BytebinClient extends AbstractHttpClient { +public class BytebinClient { /** The bytebin URL */ private final String url; /** The client user agent */ private final String userAgent; - /** - * Creates a new bytebin instance - * - * @param url the bytebin url - * @param userAgent the client user agent string - */ - public BytebinClient(OkHttpClient okHttpClient, String url, String userAgent) { - super(okHttpClient); + public BytebinClient(String url, String userAgent) { this.url = url + (url.endsWith("/") ? "" : "/"); this.userAgent = userAgent; } - /** - * POSTs GZIP compressed content to bytebin. - * - * @param buf the compressed content - * @param contentType the type of the content - * @return the key of the resultant content - * @throws IOException if an error occurs - */ - public Content postContent(byte[] buf, MediaType contentType) throws IOException { - RequestBody body = RequestBody.create(contentType, buf); + private Content postContent(String contentType, Consumer<OutputStream> consumer) throws IOException { + URL url = new URL(this.url + "post"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + try { + connection.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(10)); + connection.setReadTimeout((int) TimeUnit.SECONDS.toMillis(10)); + + connection.setDoOutput(true); + connection.setRequestMethod("POST"); + connection.setRequestProperty("Content-Type", contentType); + connection.setRequestProperty("User-Agent", this.userAgent); + connection.setRequestProperty("Content-Encoding", "gzip"); - Request.Builder requestBuilder = new Request.Builder() - .url(this.url + "post") - .header("User-Agent", this.userAgent) - .header("Content-Encoding", "gzip"); + connection.connect(); + try (OutputStream output = connection.getOutputStream()) { + consumer.accept(output); + } - Request request = requestBuilder.post(body).build(); - try (Response response = makeHttpRequest(request)) { - String key = response.header("Location"); + String key = connection.getHeaderField("Location"); if (key == null) { throw new IllegalStateException("Key not returned"); } return new Content(key); + } finally { + connection.getInputStream().close(); + connection.disconnect(); } } - public Content postContent(AbstractMessageLite<?, ?> proto, MediaType contentType) throws IOException { - ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); - try (OutputStream out = new GZIPOutputStream(byteOut)) { - proto.writeTo(out); - } catch (IOException e) { - throw new RuntimeException(e); - } - return postContent(byteOut.toByteArray(), contentType); + public Content postContent(AbstractMessageLite<?, ?> proto, String contentType) throws IOException { + return postContent(contentType, outputStream -> { + try (OutputStream out = new GZIPOutputStream(outputStream)) { + proto.writeTo(out); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } public static final class Content { diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java deleted file mode 100644 index bd9ec37..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) <luck@lucko.me> - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package me.lucko.spark.common.util; - -import me.lucko.spark.common.sampler.node.StackTraceNode; -import me.lucko.spark.common.sampler.node.ThreadNode; - -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.security.CodeSource; -import java.security.ProtectionDomain; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A function which defines the source of given {@link Class}es. - */ -public interface ClassSourceLookup { - - /** - * Identify the given class. - * - * @param clazz the class - * @return the source of the class - */ - @Nullable String identify(Class<?> clazz) throws Exception; - - /** - * A no-operation {@link ClassSourceLookup}. - */ - ClassSourceLookup NO_OP = new ClassSourceLookup() { - @Override - public @Nullable String identify(Class<?> clazz) { - return null; - } - }; - - /** - * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}. - */ - abstract class ByClassLoader implements ClassSourceLookup { - - public abstract @Nullable String identify(ClassLoader loader) throws Exception; - - @Override - public final @Nullable String identify(Class<?> clazz) throws Exception { - ClassLoader loader = clazz.getClassLoader(); - while (loader != null) { - String source = identify(loader); - if (source != null) { - return source; - } - loader = loader.getParent(); - } - return null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on URL. - */ - interface ByUrl extends ClassSourceLookup { - - default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException { - Path path = null; - - String protocol = url.getProtocol(); - if (protocol.equals("file")) { - path = Paths.get(url.toURI()); - } else if (protocol.equals("jar")) { - URL innerUrl = new URL(url.getPath()); - path = Paths.get(innerUrl.getPath().split("!")[0]); - } - - if (path != null) { - return identifyFile(path.toAbsolutePath().normalize()); - } - - return null; - } - - default String identifyFile(Path path) { - return identifyFileName(path.getFileName().toString()); - } - - default String identifyFileName(String fileName) { - return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}. - */ - class ByFirstUrlSource extends ByClassLoader implements ByUrl { - @Override - public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException { - if (loader instanceof URLClassLoader) { - URLClassLoader urlClassLoader = (URLClassLoader) loader; - URL[] urls = urlClassLoader.getURLs(); - if (urls.length == 0) { - return null; - } - return identifyUrl(urls[0]); - } - return null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}. - */ - class ByCodeSource implements ClassSourceLookup, ByUrl { - @Override - public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException { - ProtectionDomain protectionDomain = clazz.getProtectionDomain(); - if (protectionDomain == null) { - return null; - } - CodeSource codeSource = protectionDomain.getCodeSource(); - if (codeSource == null) { - return null; - } - - URL url = codeSource.getLocation(); - return url == null ? null : identifyUrl(url); - } - } - - interface Visitor { - void visit(ThreadNode node); - - boolean hasMappings(); - - Map<String, String> getMapping(); - } - - static Visitor createVisitor(ClassSourceLookup lookup) { - if (lookup == ClassSourceLookup.NO_OP) { - return NoOpVisitor.INSTANCE; // don't bother! - } - return new VisitorImpl(lookup); - } - - enum NoOpVisitor implements Visitor { - INSTANCE; - - @Override - public void visit(ThreadNode node) { - - } - - @Override - public boolean hasMappings() { - return false; - } - - @Override - public Map<String, String> getMapping() { - return Collections.emptyMap(); - } - } - - /** - * Visitor which scans {@link StackTraceNode}s and accumulates class identities. - */ - class VisitorImpl implements Visitor { - private final ClassSourceLookup lookup; - private final ClassFinder classFinder = new ClassFinder(); - - // class name --> identifier (plugin name) - private final Map<String, String> map = new HashMap<>(); - - VisitorImpl(ClassSourceLookup lookup) { - this.lookup = lookup; - } - - @Override - public void visit(ThreadNode node) { - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } - } - - @Override - public boolean hasMappings() { - return !this.map.isEmpty(); - } - - @Override - public Map<String, String> getMapping() { - this.map.values().removeIf(Objects::isNull); - return this.map; - } - - private void visitStackNode(StackTraceNode node) { - String className = node.getClassName(); - if (!this.map.containsKey(className)) { - try { - Class<?> clazz = this.classFinder.findClass(className); - Objects.requireNonNull(clazz); - this.map.put(className, this.lookup.identify(clazz)); - } catch (Throwable e) { - this.map.put(className, null); - } - } - - // recursively - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java index 9295c25..c8100e1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Compression.java @@ -20,10 +20,6 @@ package me.lucko.spark.common.util; -import org.tukaani.xz.LZMA2Options; -import org.tukaani.xz.LZMAOutputStream; -import org.tukaani.xz.XZOutputStream; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -46,35 +42,35 @@ public enum Compression { } return compressedFile; } - }, - XZ { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } - }, - LZMA { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } }; + // XZ { + // @Override + // public Path compress(Path file, LongConsumer progressHandler) throws IOException { + // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); + // try (InputStream in = Files.newInputStream(file)) { + // try (OutputStream out = Files.newOutputStream(compressedFile)) { + // try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { + // copy(in, compressionOut, progressHandler); + // } + // } + // } + // return compressedFile; + // } + // }, + // LZMA { + // @Override + // public Path compress(Path file, LongConsumer progressHandler) throws IOException { + // Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); + // try (InputStream in = Files.newInputStream(file)) { + // try (OutputStream out = Files.newOutputStream(compressedFile)) { + // try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { + // copy(in, compressionOut, progressHandler); + // } + // } + // } + // return compressedFile; + // } + // }; public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException; diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java index 7588645..32f3bc6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java @@ -20,32 +20,58 @@ package me.lucko.spark.common.util; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; public final class Configuration { - private static final JsonParser PARSER = new JsonParser(); + private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); - private final JsonObject root; + private final Path file; + private JsonObject root; public Configuration(Path file) { + this.file = file; + load(); + } + + public void load() { JsonObject root = null; - if (Files.exists(file)) { - try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { - root = PARSER.parse(reader).getAsJsonObject(); + if (Files.exists(this.file)) { + try (BufferedReader reader = Files.newBufferedReader(this.file, StandardCharsets.UTF_8)) { + root = GSON.fromJson(reader, JsonObject.class); } catch (IOException e) { e.printStackTrace(); } } - this.root = root != null ? root : new JsonObject(); + if (root == null) { + root = new JsonObject(); + root.addProperty("_header", "spark configuration file - https://spark.lucko.me/docs/Configuration"); + } + this.root = root; + } + + public void save() { + try { + Files.createDirectories(this.file.getParent()); + } catch (IOException e) { + // ignore + } + + try (BufferedWriter writer = Files.newBufferedWriter(this.file, StandardCharsets.UTF_8)) { + GSON.toJson(this.root, writer); + } catch (IOException e) { + e.printStackTrace(); + } } public String getString(String path, String def) { @@ -67,4 +93,34 @@ public final class Configuration { return val.isBoolean() ? val.getAsBoolean() : def; } + public int getInteger(String path, int def) { + JsonElement el = this.root.get(path); + if (el == null || !el.isJsonPrimitive()) { + return def; + } + + JsonPrimitive val = el.getAsJsonPrimitive(); + return val.isBoolean() ? val.getAsInt() : def; + } + + public void setString(String path, String value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setBoolean(String path, boolean value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setInteger(String path, int value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public boolean contains(String path) { + return this.root.has(path); + } + + public void remove(String path) { + this.root.remove(path); + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java index c4a3d66..1ee3b0f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java @@ -62,4 +62,24 @@ public enum FormatUtil { .append(Component.text(unit)) .build(); } + + public static String formatSeconds(long seconds) { + if (seconds <= 0) { + return "0s"; + } + + long second = seconds; + long minute = second / 60; + second = second % 60; + + StringBuilder sb = new StringBuilder(); + if (minute != 0) { + sb.append(minute).append("m "); + } + if (second != 0) { + sb.append(second).append("s "); + } + + return sb.toString().trim(); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java index 8ece3d4..b2315f9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/AbstractHttpClient.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java @@ -20,27 +20,24 @@ package me.lucko.spark.common.util; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; +import java.util.ArrayList; +import java.util.List; -import java.io.IOException; - -public class AbstractHttpClient { - - /** The http client */ - protected final OkHttpClient okHttp; +/** + * List builder that returns the index of the inserted element. + * + * @param <T> generic type + */ +public class IndexedListBuilder<T> { + private int i = 0; + private final List<T> nodes = new ArrayList<>(); - public AbstractHttpClient(OkHttpClient okHttp) { - this.okHttp = okHttp; + public int add(T node) { + this.nodes.add(node); + return this.i++; } - protected Response makeHttpRequest(Request request) throws IOException { - Response response = this.okHttp.newCall(request).execute(); - if (!response.isSuccessful()) { - response.close(); - throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message()); - } - return response; + public List<T> build() { + return this.nodes; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java deleted file mode 100644 index 7d688d7..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) <luck@lucko.me> - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package me.lucko.spark.common.util; - -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; - -/** - * Utility for reading from /proc/ on Linux systems. - */ -public enum LinuxProc { - - /** - * Information about the system CPU. - */ - CPUINFO("/proc/cpuinfo"), - - /** - * Information about the system memory. - */ - MEMINFO("/proc/meminfo"), - - /** - * Information about the system network usage. - */ - NET_DEV("/proc/net/dev"); - - private final Path path; - - LinuxProc(String path) { - this.path = resolvePath(path); - } - - private static @Nullable Path resolvePath(String path) { - try { - Path p = Paths.get(path); - if (Files.isReadable(p)) { - return p; - } - } catch (Exception e) { - // ignore - } - return null; - } - - public @NonNull List<String> read() { - if (this.path != null) { - try { - return Files.readAllLines(this.path, StandardCharsets.UTF_8); - } catch (IOException e) { - // ignore - } - } - - return Collections.emptyList(); - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java new file mode 100644 index 0000000..be5bbc2 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java @@ -0,0 +1,191 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.util; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.tick.TickStatistics; + +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.TextComponent; +import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer; + +import java.util.Locale; +import java.util.function.BiFunction; + +public enum SparkPlaceholder { + + TPS((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", ")) + .append(StatisticFormatter.formatTps(tickStatistics.tps15Min())) + .build(); + } + + switch (arg) { + case "5s": + return StatisticFormatter.formatTps(tickStatistics.tps5Sec()); + case "10s": + return StatisticFormatter.formatTps(tickStatistics.tps10Sec()); + case "1m": + return StatisticFormatter.formatTps(tickStatistics.tps1Min()); + case "5m": + return StatisticFormatter.formatTps(tickStatistics.tps5Min()); + case "15m": + return StatisticFormatter.formatTps(tickStatistics.tps15Min()); + } + + return null; + }), + + TICKDURATION((platform, arg) -> { + TickStatistics tickStatistics = platform.getTickStatistics(); + if (tickStatistics == null || !tickStatistics.isDurationSupported()) { + return null; + } + + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; ")) + .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec()); + case "1m": + return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()); + } + + return null; + }), + + CPU_SYSTEM((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()); + } + + return null; + }), + + CPU_PROCESS((platform, arg) -> { + if (arg == null) { + return Component.text() + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", ")) + .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg())) + .build(); + } + + switch (arg) { + case "10s": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg()); + case "1m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg()); + case "15m": + return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()); + } + + return null; + }); + + private final String name; + private final BiFunction<SparkPlatform, String, TextComponent> function; + + SparkPlaceholder(BiFunction<SparkPlatform, String, TextComponent> function) { + this.name = name().toLowerCase(Locale.ROOT); + this.function = function; + } + + public String getName() { + return this.name; + } + + public TextComponent resolve(SparkPlatform platform, String arg) { + return this.function.apply(platform, arg); + } + + public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) { + String[] parts = placeholder.split("_"); + + if (parts.length == 0) { + return null; + } + + String label = parts[0]; + + if (label.equals("tps")) { + String arg = parts.length < 2 ? null : parts[1]; + return TPS.resolve(platform, arg); + } + + if (label.equals("tickduration")) { + String arg = parts.length < 2 ? null : parts[1]; + return TICKDURATION.resolve(platform, arg); + } + + if (label.equals("cpu") && parts.length >= 2) { + String type = parts[1]; + String arg = parts.length < 3 ? null : parts[2]; + + if (type.equals("system")) { + return CPU_SYSTEM.resolve(platform, arg); + } + if (type.equals("process")) { + return CPU_PROCESS.resolve(platform, arg); + } + } + + return null; + } + + public static String resolveFormattingCode(SparkPlatform platform, String placeholder) { + TextComponent result = resolveComponent(platform, placeholder); + if (result == null) { + return null; + } + return LegacyComponentSerializer.legacySection().serialize(result); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java index 8a4a621..91a474c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java @@ -20,10 +20,18 @@ package me.lucko.spark.common.util; +import com.google.common.collect.ImmutableList; + import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.Set; @@ -32,23 +40,47 @@ import java.util.Set; * Utility for handling temporary files. */ public final class TemporaryFiles { - private TemporaryFiles() {} - private static final Set<Path> DELETE_SET = Collections.synchronizedSet(new HashSet<>()); + public static final FileAttribute<?>[] OWNER_ONLY_FILE_PERMISSIONS; + + static { + boolean isPosix = FileSystems.getDefault().supportedFileAttributeViews().contains("posix"); + if (isPosix) { + OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[]{PosixFilePermissions.asFileAttribute(EnumSet.of( + PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE + ))}; + } else { + OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[0]; + } + } + + private final Path tmpDirectory; + private final Set<Path> files = Collections.synchronizedSet(new HashSet<>()); - public static Path create(String prefix, String suffix) throws IOException { - return register(Files.createTempFile(prefix, suffix)); + public TemporaryFiles(Path tmpDirectory) { + this.tmpDirectory = tmpDirectory; } - public static Path register(Path path) { + public Path create(String prefix, String suffix) throws IOException { + Path file; + if (ensureDirectoryIsReady()) { + String name = prefix + Long.toHexString(System.nanoTime()) + suffix; + file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS); + } else { + file = Files.createTempFile(prefix, suffix); + } + return register(file); + } + + public Path register(Path path) { path.toFile().deleteOnExit(); - DELETE_SET.add(path); + this.files.add(path); return path; } - public static void deleteTemporaryFiles() { - synchronized (DELETE_SET) { - for (Iterator<Path> iterator = DELETE_SET.iterator(); iterator.hasNext(); ) { + public void deleteTemporaryFiles() { + synchronized (this.files) { + for (Iterator<Path> iterator = this.files.iterator(); iterator.hasNext(); ) { Path path = iterator.next(); try { Files.deleteIfExists(path); @@ -60,4 +92,35 @@ public final class TemporaryFiles { } } + private boolean ensureDirectoryIsReady() { + if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) { + return false; + } + + if (Files.isDirectory(this.tmpDirectory)) { + return true; + } + + try { + Files.createDirectories(this.tmpDirectory); + + Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of( + "# What is this directory?", + "", + "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ", + "* Previously, a temporary directory provided by the operating system was used for this purpose. ", + "* However, this proved to be unreliable in some circumstances, so spark now stores temporary data here instead!", + "", + "spark will automatically cleanup the contents of this directory. " , + "(but if for some reason it doesn't, if the server is stopped, you can freely delete any files ending in .tmp)", + "", + "tl;dr: spark uses this folder to store some temporary data." + ), StandardCharsets.UTF_8); + + return true; + } catch (IOException e) { + return false; + } + } + } |