diff options
author | embeddedt <42941056+embeddedt@users.noreply.github.com> | 2022-12-26 18:58:46 -0500 |
---|---|---|
committer | embeddedt <42941056+embeddedt@users.noreply.github.com> | 2022-12-26 18:58:46 -0500 |
commit | 1075665def4a41cf0064255a6da1d1a652f5d473 (patch) | |
tree | 11bba64e8f28ce8b83adc05252b75f17e2ccbf6a /spark-common/src/main/java/me/lucko/spark/common/util | |
parent | d9550259c1995d21fc345c58f2e531fdecf75acd (diff) | |
parent | d9655c40c02aef137c7a6a00a1cc90a1e6fb08d1 (diff) | |
download | spark-1075665def4a41cf0064255a6da1d1a652f5d473.tar.gz spark-1075665def4a41cf0064255a6da1d1a652f5d473.tar.bz2 spark-1075665def4a41cf0064255a6da1d1a652f5d473.zip |
Merge remote-tracking branch 'lucko/master' into forge-1.7.10
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/util')
6 files changed, 198 insertions, 341 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java deleted file mode 100644 index bd9ec37..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/ClassSourceLookup.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) <luck@lucko.me> - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package me.lucko.spark.common.util; - -import me.lucko.spark.common.sampler.node.StackTraceNode; -import me.lucko.spark.common.sampler.node.ThreadNode; - -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.security.CodeSource; -import java.security.ProtectionDomain; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A function which defines the source of given {@link Class}es. - */ -public interface ClassSourceLookup { - - /** - * Identify the given class. - * - * @param clazz the class - * @return the source of the class - */ - @Nullable String identify(Class<?> clazz) throws Exception; - - /** - * A no-operation {@link ClassSourceLookup}. - */ - ClassSourceLookup NO_OP = new ClassSourceLookup() { - @Override - public @Nullable String identify(Class<?> clazz) { - return null; - } - }; - - /** - * A {@link ClassSourceLookup} which identifies classes based on their {@link ClassLoader}. - */ - abstract class ByClassLoader implements ClassSourceLookup { - - public abstract @Nullable String identify(ClassLoader loader) throws Exception; - - @Override - public final @Nullable String identify(Class<?> clazz) throws Exception { - ClassLoader loader = clazz.getClassLoader(); - while (loader != null) { - String source = identify(loader); - if (source != null) { - return source; - } - loader = loader.getParent(); - } - return null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on URL. - */ - interface ByUrl extends ClassSourceLookup { - - default String identifyUrl(URL url) throws URISyntaxException, MalformedURLException { - Path path = null; - - String protocol = url.getProtocol(); - if (protocol.equals("file")) { - path = Paths.get(url.toURI()); - } else if (protocol.equals("jar")) { - URL innerUrl = new URL(url.getPath()); - path = Paths.get(innerUrl.getPath().split("!")[0]); - } - - if (path != null) { - return identifyFile(path.toAbsolutePath().normalize()); - } - - return null; - } - - default String identifyFile(Path path) { - return identifyFileName(path.getFileName().toString()); - } - - default String identifyFileName(String fileName) { - return fileName.endsWith(".jar") ? fileName.substring(0, fileName.length() - 4) : null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on the first URL in a {@link URLClassLoader}. - */ - class ByFirstUrlSource extends ByClassLoader implements ByUrl { - @Override - public @Nullable String identify(ClassLoader loader) throws IOException, URISyntaxException { - if (loader instanceof URLClassLoader) { - URLClassLoader urlClassLoader = (URLClassLoader) loader; - URL[] urls = urlClassLoader.getURLs(); - if (urls.length == 0) { - return null; - } - return identifyUrl(urls[0]); - } - return null; - } - } - - /** - * A {@link ClassSourceLookup} which identifies classes based on their {@link ProtectionDomain#getCodeSource()}. - */ - class ByCodeSource implements ClassSourceLookup, ByUrl { - @Override - public @Nullable String identify(Class<?> clazz) throws URISyntaxException, MalformedURLException { - ProtectionDomain protectionDomain = clazz.getProtectionDomain(); - if (protectionDomain == null) { - return null; - } - CodeSource codeSource = protectionDomain.getCodeSource(); - if (codeSource == null) { - return null; - } - - URL url = codeSource.getLocation(); - return url == null ? null : identifyUrl(url); - } - } - - interface Visitor { - void visit(ThreadNode node); - - boolean hasMappings(); - - Map<String, String> getMapping(); - } - - static Visitor createVisitor(ClassSourceLookup lookup) { - if (lookup == ClassSourceLookup.NO_OP) { - return NoOpVisitor.INSTANCE; // don't bother! - } - return new VisitorImpl(lookup); - } - - enum NoOpVisitor implements Visitor { - INSTANCE; - - @Override - public void visit(ThreadNode node) { - - } - - @Override - public boolean hasMappings() { - return false; - } - - @Override - public Map<String, String> getMapping() { - return Collections.emptyMap(); - } - } - - /** - * Visitor which scans {@link StackTraceNode}s and accumulates class identities. - */ - class VisitorImpl implements Visitor { - private final ClassSourceLookup lookup; - private final ClassFinder classFinder = new ClassFinder(); - - // class name --> identifier (plugin name) - private final Map<String, String> map = new HashMap<>(); - - VisitorImpl(ClassSourceLookup lookup) { - this.lookup = lookup; - } - - @Override - public void visit(ThreadNode node) { - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } - } - - @Override - public boolean hasMappings() { - return !this.map.isEmpty(); - } - - @Override - public Map<String, String> getMapping() { - this.map.values().removeIf(Objects::isNull); - return this.map; - } - - private void visitStackNode(StackTraceNode node) { - String className = node.getClassName(); - if (!this.map.containsKey(className)) { - try { - Class<?> clazz = this.classFinder.findClass(className); - Objects.requireNonNull(clazz); - this.map.put(className, this.lookup.identify(clazz)); - } catch (Throwable e) { - this.map.put(className, null); - } - } - - // recursively - for (StackTraceNode child : node.getChildren()) { - visitStackNode(child); - } - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java index 7588645..32f3bc6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java @@ -20,32 +20,58 @@ package me.lucko.spark.common.util; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; public final class Configuration { - private static final JsonParser PARSER = new JsonParser(); + private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); - private final JsonObject root; + private final Path file; + private JsonObject root; public Configuration(Path file) { + this.file = file; + load(); + } + + public void load() { JsonObject root = null; - if (Files.exists(file)) { - try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { - root = PARSER.parse(reader).getAsJsonObject(); + if (Files.exists(this.file)) { + try (BufferedReader reader = Files.newBufferedReader(this.file, StandardCharsets.UTF_8)) { + root = GSON.fromJson(reader, JsonObject.class); } catch (IOException e) { e.printStackTrace(); } } - this.root = root != null ? root : new JsonObject(); + if (root == null) { + root = new JsonObject(); + root.addProperty("_header", "spark configuration file - https://spark.lucko.me/docs/Configuration"); + } + this.root = root; + } + + public void save() { + try { + Files.createDirectories(this.file.getParent()); + } catch (IOException e) { + // ignore + } + + try (BufferedWriter writer = Files.newBufferedWriter(this.file, StandardCharsets.UTF_8)) { + GSON.toJson(this.root, writer); + } catch (IOException e) { + e.printStackTrace(); + } } public String getString(String path, String def) { @@ -67,4 +93,34 @@ public final class Configuration { return val.isBoolean() ? val.getAsBoolean() : def; } + public int getInteger(String path, int def) { + JsonElement el = this.root.get(path); + if (el == null || !el.isJsonPrimitive()) { + return def; + } + + JsonPrimitive val = el.getAsJsonPrimitive(); + return val.isBoolean() ? val.getAsInt() : def; + } + + public void setString(String path, String value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setBoolean(String path, boolean value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setInteger(String path, int value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public boolean contains(String path) { + return this.root.has(path); + } + + public void remove(String path) { + this.root.remove(path); + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java index c4a3d66..1ee3b0f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java @@ -62,4 +62,24 @@ public enum FormatUtil { .append(Component.text(unit)) .build(); } + + public static String formatSeconds(long seconds) { + if (seconds <= 0) { + return "0s"; + } + + long second = seconds; + long minute = second / 60; + second = second % 60; + + StringBuilder sb = new StringBuilder(); + if (minute != 0) { + sb.append(minute).append("m "); + } + if (second != 0) { + sb.append(second).append("s "); + } + + return sb.toString().trim(); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java new file mode 100644 index 0000000..b2315f9 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/IndexedListBuilder.java @@ -0,0 +1,43 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.util; + +import java.util.ArrayList; +import java.util.List; + +/** + * List builder that returns the index of the inserted element. + * + * @param <T> generic type + */ +public class IndexedListBuilder<T> { + private int i = 0; + private final List<T> nodes = new ArrayList<>(); + + public int add(T node) { + this.nodes.add(node); + return this.i++; + } + + public List<T> build() { + return this.nodes; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java b/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java deleted file mode 100644 index 7d688d7..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/util/LinuxProc.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) <luck@lucko.me> - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package me.lucko.spark.common.util; - -import org.checkerframework.checker.nullness.qual.NonNull; -import org.checkerframework.checker.nullness.qual.Nullable; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; - -/** - * Utility for reading from /proc/ on Linux systems. - */ -public enum LinuxProc { - - /** - * Information about the system CPU. - */ - CPUINFO("/proc/cpuinfo"), - - /** - * Information about the system memory. - */ - MEMINFO("/proc/meminfo"), - - /** - * Information about the system network usage. - */ - NET_DEV("/proc/net/dev"); - - private final Path path; - - LinuxProc(String path) { - this.path = resolvePath(path); - } - - private static @Nullable Path resolvePath(String path) { - try { - Path p = Paths.get(path); - if (Files.isReadable(p)) { - return p; - } - } catch (Exception e) { - // ignore - } - return null; - } - - public @NonNull List<String> read() { - if (this.path != null) { - try { - return Files.readAllLines(this.path, StandardCharsets.UTF_8); - } catch (IOException e) { - // ignore - } - } - - return Collections.emptyList(); - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java index 8a4a621..91a474c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/TemporaryFiles.java @@ -20,10 +20,18 @@ package me.lucko.spark.common.util; +import com.google.common.collect.ImmutableList; + import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.Set; @@ -32,23 +40,47 @@ import java.util.Set; * Utility for handling temporary files. */ public final class TemporaryFiles { - private TemporaryFiles() {} - private static final Set<Path> DELETE_SET = Collections.synchronizedSet(new HashSet<>()); + public static final FileAttribute<?>[] OWNER_ONLY_FILE_PERMISSIONS; + + static { + boolean isPosix = FileSystems.getDefault().supportedFileAttributeViews().contains("posix"); + if (isPosix) { + OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[]{PosixFilePermissions.asFileAttribute(EnumSet.of( + PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE + ))}; + } else { + OWNER_ONLY_FILE_PERMISSIONS = new FileAttribute[0]; + } + } + + private final Path tmpDirectory; + private final Set<Path> files = Collections.synchronizedSet(new HashSet<>()); - public static Path create(String prefix, String suffix) throws IOException { - return register(Files.createTempFile(prefix, suffix)); + public TemporaryFiles(Path tmpDirectory) { + this.tmpDirectory = tmpDirectory; } - public static Path register(Path path) { + public Path create(String prefix, String suffix) throws IOException { + Path file; + if (ensureDirectoryIsReady()) { + String name = prefix + Long.toHexString(System.nanoTime()) + suffix; + file = Files.createFile(this.tmpDirectory.resolve(name), OWNER_ONLY_FILE_PERMISSIONS); + } else { + file = Files.createTempFile(prefix, suffix); + } + return register(file); + } + + public Path register(Path path) { path.toFile().deleteOnExit(); - DELETE_SET.add(path); + this.files.add(path); return path; } - public static void deleteTemporaryFiles() { - synchronized (DELETE_SET) { - for (Iterator<Path> iterator = DELETE_SET.iterator(); iterator.hasNext(); ) { + public void deleteTemporaryFiles() { + synchronized (this.files) { + for (Iterator<Path> iterator = this.files.iterator(); iterator.hasNext(); ) { Path path = iterator.next(); try { Files.deleteIfExists(path); @@ -60,4 +92,35 @@ public final class TemporaryFiles { } } + private boolean ensureDirectoryIsReady() { + if (Boolean.parseBoolean(System.getProperty("spark.useOsTmpDir", "false"))) { + return false; + } + + if (Files.isDirectory(this.tmpDirectory)) { + return true; + } + + try { + Files.createDirectories(this.tmpDirectory); + + Files.write(this.tmpDirectory.resolve("about.txt"), ImmutableList.of( + "# What is this directory?", + "", + "* In order to perform certain functions, spark sometimes needs to write temporary data to the disk. ", + "* Previously, a temporary directory provided by the operating system was used for this purpose. ", + "* However, this proved to be unreliable in some circumstances, so spark now stores temporary data here instead!", + "", + "spark will automatically cleanup the contents of this directory. " , + "(but if for some reason it doesn't, if the server is stopped, you can freely delete any files ending in .tmp)", + "", + "tl;dr: spark uses this folder to store some temporary data." + ), StandardCharsets.UTF_8); + + return true; + } catch (IOException e) { + return false; + } + } + } |