aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2024-07-29 18:33:08 +0100
committerLuck <git@lucko.me>2024-07-29 18:33:08 +0100
commit60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7 (patch)
tree2bf8fcf914ac57466549d35dcd89ef96d3a2d65f /spark-common/src/main/java
parent4c0149b6a15fa887328bbd88c8055c2138cc4d72 (diff)
downloadspark-60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7.tar.gz
spark-60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7.tar.bz2
spark-60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7.zip
Add some unit tests
Diffstat (limited to 'spark-common/src/main/java')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java18
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java3
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java16
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java67
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java5
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java32
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java5
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java132
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java54
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java (renamed from spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java)18
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java106
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java2
15 files changed, 453 insertions, 23 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 733510d..5e25d91 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -52,10 +52,12 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
-import me.lucko.spark.common.util.Configuration;
import me.lucko.spark.common.util.SparkStaticLogger;
import me.lucko.spark.common.util.TemporaryFiles;
import me.lucko.spark.common.util.classfinder.ClassFinder;
+import me.lucko.spark.common.util.config.Configuration;
+import me.lucko.spark.common.util.config.FileConfiguration;
+import me.lucko.spark.common.util.config.RuntimeConfiguration;
import me.lucko.spark.common.ws.TrustedKeyStore;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
@@ -71,6 +73,7 @@ import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
@@ -121,7 +124,11 @@ public class SparkPlatform {
SparkStaticLogger.setLogger(plugin::log);
this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp"));
- this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json"));
+ this.configuration = Configuration.combining(
+ RuntimeConfiguration.SYSTEM_PROPERTIES,
+ RuntimeConfiguration.ENVIRONMENT_VARIABLES,
+ new FileConfiguration(this.plugin.getPluginDirectory().resolve("config.json"))
+ );
this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/");
String bytebinUrl = this.configuration.getString("bytebinUrl", "https://spark-usercontent.lucko.me/");
@@ -330,7 +337,8 @@ public class SparkPlatform {
return !getAvailableCommands(sender).isEmpty();
}
- public void executeCommand(CommandSender sender, String[] args) {
+ public CompletableFuture<Void> executeCommand(CommandSender sender, String[] args) {
+ CompletableFuture<Void> future = new CompletableFuture<>();
AtomicReference<Thread> executorThread = new AtomicReference<>();
AtomicReference<Thread> timeoutThread = new AtomicReference<>();
AtomicBoolean completed = new AtomicBoolean(false);
@@ -341,9 +349,11 @@ public class SparkPlatform {
this.commandExecuteLock.lock();
try {
executeCommand0(sender, args);
+ future.complete(null);
} catch (Exception e) {
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command");
e.printStackTrace();
+ future.completeExceptionally(e);
} finally {
this.commandExecuteLock.unlock();
executorThread.set(null);
@@ -393,6 +403,8 @@ public class SparkPlatform {
timeoutThread.set(null);
}
});
+
+ return future;
}
private void executeCommand0(CommandSender sender, String[] args) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 9e2647a..334e416 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -59,6 +59,7 @@ import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
+import java.util.function.Supplier;
import static net.kyori.adventure.text.Component.empty;
import static net.kyori.adventure.text.Component.space;
@@ -208,7 +209,7 @@ public class SamplerModule implements CommandModule {
}
}
- ThreadGrouper threadGrouper;
+ Supplier<ThreadGrouper> threadGrouper;
if (arguments.boolFlag("combine-all")) {
threadGrouper = ThreadGrouper.AS_ONE;
} else if (arguments.boolFlag("not-combined")) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
index 364edd6..4d34d4a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java
@@ -20,6 +20,7 @@
package me.lucko.spark.common.heapdump;
+import com.google.common.annotations.VisibleForTesting;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.proto.SparkHeapProtos.HeapData;
@@ -123,6 +124,11 @@ public final class HeapDumpSummary {
this.entries = entries;
}
+ @VisibleForTesting
+ List<Entry> getEntries() {
+ return this.entries;
+ }
+
public HeapData toProto(SparkPlatform platform, CommandSender.Data creator) {
HeapMetadata.Builder metadata = HeapMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
@@ -186,6 +192,16 @@ public final class HeapDumpSummary {
.setType(this.type)
.build();
}
+
+ @Override
+ public String toString() {
+ return "Entry{" +
+ "order=" + this.order +
+ ", instances=" + this.instances +
+ ", bytes=" + this.bytes +
+ ", type='" + this.type + '\'' +
+ '}';
+ }
}
public interface DiagnosticCommandMXBean {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java
new file mode 100644
index 0000000..c279f31
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java
@@ -0,0 +1,67 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.monitor;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Utility for reading from sysctl on macOS systems.
+ */
+public enum MacosSysctl {
+
+ SYSCTL("sysctl", "-a"),;
+
+ private static final boolean SUPPORTED = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "").equals("macosx");
+
+ private final String[] cmdArgs;
+
+ MacosSysctl(String... cmdArgs) {
+ this.cmdArgs = cmdArgs;
+ }
+
+ public @NonNull List<String> read() {
+ if (SUPPORTED) {
+ ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true);
+ try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) {
+ List<String> lines = new ArrayList<>();
+
+ String line;
+ while ((line = buf.readLine()) != null) {
+ lines.add(line);
+ }
+
+ return lines;
+ } catch (Exception e) {
+ // ignore
+ }
+ }
+
+ return Collections.emptyList();
+ }
+}
+
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
index 9954bd5..07875cc 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java
@@ -21,6 +21,7 @@
package me.lucko.spark.common.monitor.cpu;
import me.lucko.spark.common.monitor.LinuxProc;
+import me.lucko.spark.common.monitor.MacosSysctl;
import me.lucko.spark.common.monitor.WindowsWmic;
import java.util.regex.Pattern;
@@ -52,6 +53,12 @@ public enum CpuInfo {
}
}
+ for (String line : MacosSysctl.SYSCTL.read()) {
+ if (line.startsWith("machdep.cpu.brand_string:")) {
+ return line.substring("machdep.cpu.brand_string:".length()).trim();
+ }
+ }
+
return "";
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
index eed695e..c2ba1da 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java
@@ -20,6 +20,7 @@
package me.lucko.spark.common.monitor.net;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import me.lucko.spark.common.monitor.LinuxProc;
import org.checkerframework.checker.nullness.qual.NonNull;
@@ -200,7 +201,8 @@ public final class NetworkInterfaceInfo {
private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$");
- private static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) {
+ @VisibleForTesting
+ static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) {
// Inter-| Receive | Transmit
// face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed
// lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
index 4e9ca9e..1889304 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java
@@ -22,8 +22,9 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.platform.PlatformInfo;
-import me.lucko.spark.common.util.Configuration;
+import me.lucko.spark.common.util.config.Configuration;
+import java.util.function.Supplier;
import java.util.logging.Level;
public class BackgroundSamplerManager {
@@ -103,7 +104,7 @@ public class BackgroundSamplerManager {
private void startSampler() {
boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java");
- ThreadGrouper threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool"));
+ Supplier<ThreadGrouper> threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool"));
ThreadDumper threadDumper = ThreadDumper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_DUMPER, "default"));
if (threadDumper == null) {
threadDumper = this.platform.getPlugin().getDefaultThreadDumper();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index b6895ce..3046d92 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -28,6 +28,7 @@ import me.lucko.spark.common.sampler.java.JavaSampler;
import me.lucko.spark.common.tick.TickHook;
import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
/**
* Builds {@link Sampler} instances.
@@ -44,7 +45,7 @@ public class SamplerBuilder {
private long autoEndTime = -1;
private boolean background = false;
private ThreadDumper threadDumper = ThreadDumper.ALL;
- private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME;
+ private Supplier<ThreadGrouper> threadGrouper = ThreadGrouper.BY_NAME;
private int ticksOver = -1;
private TickHook tickHook = null;
@@ -80,7 +81,7 @@ public class SamplerBuilder {
return this;
}
- public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) {
+ public SamplerBuilder threadGrouper(Supplier<ThreadGrouper> threadGrouper) {
this.threadGrouper = threadGrouper;
return this;
}
@@ -131,7 +132,7 @@ public class SamplerBuilder {
this.samplingInterval
);
- SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background);
+ SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper.get(), this.autoEndTime, this.background);
Sampler sampler;
if (this.mode == SamplerMode.ALLOCATION) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
index b6cfbea..c8d5b3c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java
@@ -26,6 +26,7 @@ import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -64,7 +65,7 @@ public interface ThreadGrouper {
* @param setting the config setting
* @return the thread grouper
*/
- static ThreadGrouper parseConfigSetting(String setting) {
+ static Supplier<ThreadGrouper> parseConfigSetting(String setting) {
switch (setting) {
case "as-one":
return AS_ONE;
@@ -76,9 +77,14 @@ public interface ThreadGrouper {
}
/**
+ * Supplier for {@link ByName} thread groupers.
+ */
+ Supplier<ThreadGrouper> BY_NAME = ByName::new;
+
+ /**
* Implementation of {@link ThreadGrouper} that just groups by thread name.
*/
- ThreadGrouper BY_NAME = new ThreadGrouper() {
+ class ByName implements ThreadGrouper {
@Override
public String getGroup(long threadId, String threadName) {
return threadName;
@@ -93,7 +99,12 @@ public interface ThreadGrouper {
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME;
}
- };
+ }
+
+ /**
+ * Supplier for {@link ByPool} thread groupers.
+ */
+ Supplier<ThreadGrouper> BY_POOL = ByPool::new;
/**
* Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool
@@ -102,8 +113,8 @@ public interface ThreadGrouper {
* <p>The regex pattern used to match pools expects a digit at the end of the thread name,
* separated from the pool name with any of one or more of ' ', '-', or '#'.</p>
*/
- ThreadGrouper BY_POOL = new ThreadGrouper() {
- private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$");
+ class ByPool implements ThreadGrouper {
+ private static final Pattern PATTERN = Pattern.compile("^(.*?)[-# ]+\\d+$");
// thread id -> group
private final Map<Long, String> cache = new ConcurrentHashMap<>();
@@ -117,7 +128,7 @@ public interface ThreadGrouper {
return cached;
}
- Matcher matcher = this.pattern.matcher(threadName);
+ Matcher matcher = PATTERN.matcher(threadName);
if (!matcher.matches()) {
return threadName;
}
@@ -141,13 +152,18 @@ public interface ThreadGrouper {
public SamplerMetadata.DataAggregator.ThreadGrouper asProto() {
return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL;
}
- };
+ }
+
+ /**
+ * Supplier for {@link AsOne} thread groupers.
+ */
+ Supplier<ThreadGrouper> AS_ONE = AsOne::new;
/**
* Implementation of {@link ThreadGrouper} which groups all threads as one, under
* the name "All".
*/
- ThreadGrouper AS_ONE = new ThreadGrouper() {
+ class AsOne implements ThreadGrouper {
private final Set<Long> seen = ConcurrentHashMap.newKeySet();
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java
index efeabfc..1dad75b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java
@@ -20,6 +20,8 @@
package me.lucko.spark.common.util;
+import com.google.common.annotations.VisibleForTesting;
+
public class JavaVersion {
;
@@ -28,7 +30,8 @@ public class JavaVersion {
JAVA_VERSION = parseJavaVersion(System.getProperty("java.version"));
}
- private static int parseJavaVersion(String version) {
+ @VisibleForTesting
+ static int parseJavaVersion(String version) {
if (version.startsWith("1.")) {
// Java 8 and below
return Integer.parseInt(version.substring(2, 3));
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java
new file mode 100644
index 0000000..ff7388a
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/CombinedConfiguration.java
@@ -0,0 +1,132 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util.config;
+
+import com.google.common.collect.ImmutableList;
+
+import java.util.Collections;
+import java.util.List;
+
+class CombinedConfiguration implements Configuration {
+
+ private final List<Configuration> configurations;
+
+ CombinedConfiguration(Configuration... configurations) {
+ this.configurations = ImmutableList.copyOf(configurations).reverse();
+ }
+
+ @Override
+ public void load() {
+ for (Configuration configuration : this.configurations) {
+ configuration.load();
+ }
+ }
+
+ @Override
+ public void save() {
+ for (Configuration configuration : this.configurations) {
+ configuration.save();
+ }
+ }
+
+ @Override
+ public String getString(String path, String def) {
+ String result = def;
+ for (Configuration configuration : this.configurations) {
+ result = configuration.getString(path, result);
+ }
+ return result;
+ }
+
+ @Override
+ public boolean getBoolean(String path, boolean def) {
+ boolean result = def;
+ for (Configuration configuration : this.configurations) {
+ result = configuration.getBoolean(path, result);
+ }
+ return result;
+ }
+
+ @Override
+ public int getInteger(String path, int def) {
+ int result = def;
+ for (Configuration configuration : this.configurations) {
+ result = configuration.getInteger(path, result);
+ }
+ return result;
+ }
+
+ @Override
+ public List<String> getStringList(String path) {
+ for (Configuration configuration : this.configurations) {
+ List<String> result = configuration.getStringList(path);
+ if (!result.isEmpty()) {
+ return result;
+ }
+ }
+ return Collections.emptyList();
+ }
+
+ @Override
+ public void setString(String path, String value) {
+ for (Configuration configuration : this.configurations) {
+ configuration.setString(path, value);
+ }
+ }
+
+ @Override
+ public void setBoolean(String path, boolean value) {
+ for (Configuration configuration : this.configurations) {
+ configuration.setBoolean(path, value);
+ }
+ }
+
+ @Override
+ public void setInteger(String path, int value) {
+ for (Configuration configuration : this.configurations) {
+ configuration.setInteger(path, value);
+ }
+ }
+
+ @Override
+ public void setStringList(String path, List<String> value) {
+ for (Configuration configuration : this.configurations) {
+ configuration.setStringList(path, value);
+ }
+ }
+
+ @Override
+ public boolean contains(String path) {
+ for (Configuration configuration : this.configurations) {
+ if (configuration.contains(path)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public void remove(String path) {
+ for (Configuration configuration : this.configurations) {
+ configuration.remove(path);
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java
new file mode 100644
index 0000000..c2c2d88
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/Configuration.java
@@ -0,0 +1,54 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util.config;
+
+import java.util.List;
+
+public interface Configuration {
+
+ static Configuration combining(Configuration... configurations) {
+ return new CombinedConfiguration(configurations);
+ }
+
+ void load();
+
+ void save();
+
+ String getString(String path, String def);
+
+ boolean getBoolean(String path, boolean def);
+
+ int getInteger(String path, int def);
+
+ List<String> getStringList(String path);
+
+ void setString(String path, String value);
+
+ void setBoolean(String path, boolean value);
+
+ void setInteger(String path, int value);
+
+ void setStringList(String path, List<String> value);
+
+ boolean contains(String path);
+
+ void remove(String path);
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java
index 586a845..72a4681 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/FileConfiguration.java
@@ -18,7 +18,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-package me.lucko.spark.common.util;
+package me.lucko.spark.common.util.config;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
@@ -37,17 +37,18 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-public final class Configuration {
+public class FileConfiguration implements Configuration {
private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create();
private final Path file;
private JsonObject root;
- public Configuration(Path file) {
+ public FileConfiguration(Path file) {
this.file = file;
load();
}
+ @Override
public void load() {
JsonObject root = null;
if (Files.exists(this.file)) {
@@ -64,6 +65,7 @@ public final class Configuration {
this.root = root;
}
+ @Override
public void save() {
try {
Files.createDirectories(this.file.getParent());
@@ -78,6 +80,7 @@ public final class Configuration {
}
}
+ @Override
public String getString(String path, String def) {
JsonElement el = this.root.get(path);
if (el == null || !el.isJsonPrimitive()) {
@@ -87,6 +90,7 @@ public final class Configuration {
return el.getAsJsonPrimitive().getAsString();
}
+ @Override
public boolean getBoolean(String path, boolean def) {
JsonElement el = this.root.get(path);
if (el == null || !el.isJsonPrimitive()) {
@@ -97,6 +101,7 @@ public final class Configuration {
return val.isBoolean() ? val.getAsBoolean() : def;
}
+ @Override
public int getInteger(String path, int def) {
JsonElement el = this.root.get(path);
if (el == null || !el.isJsonPrimitive()) {
@@ -107,6 +112,7 @@ public final class Configuration {
return val.isNumber() ? val.getAsInt() : def;
}
+ @Override
public List<String> getStringList(String path) {
JsonElement el = this.root.get(path);
if (el == null || !el.isJsonArray()) {
@@ -122,18 +128,22 @@ public final class Configuration {
return list;
}
+ @Override
public void setString(String path, String value) {
this.root.add(path, new JsonPrimitive(value));
}
+ @Override
public void setBoolean(String path, boolean value) {
this.root.add(path, new JsonPrimitive(value));
}
+ @Override
public void setInteger(String path, int value) {
this.root.add(path, new JsonPrimitive(value));
}
+ @Override
public void setStringList(String path, List<String> value) {
JsonArray array = new JsonArray();
for (String str : value) {
@@ -142,10 +152,12 @@ public final class Configuration {
this.root.add(path, array);
}
+ @Override
public boolean contains(String path) {
return this.root.has(path);
}
+ @Override
public void remove(String path) {
this.root.remove(path);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java b/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java
new file mode 100644
index 0000000..d076554
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/config/RuntimeConfiguration.java
@@ -0,0 +1,106 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util.config;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+public enum RuntimeConfiguration implements Configuration {
+ SYSTEM_PROPERTIES {
+ @Override
+ public String getString(String path, String def) {
+ return System.getProperty("spark." + path, def);
+ }
+ },
+
+ ENVIRONMENT_VARIABLES {
+ @Override
+ public String getString(String path, String def) {
+ String name = "SPARK_" + path.replace(".", "_").replace("-", "_").toUpperCase();
+ String value = System.getenv(name);
+ return value != null ? value : def;
+ }
+ };
+
+ @Override
+ public boolean getBoolean(String path, boolean def) {
+ return Boolean.parseBoolean(getString(path, Boolean.toString(def)));
+ }
+
+ @Override
+ public int getInteger(String path, int def) {
+ try {
+ return Integer.parseInt(getString(path, Integer.toString(def)));
+ } catch (NumberFormatException e) {
+ return def;
+ }
+ }
+
+ @Override
+ public List<String> getStringList(String path) {
+ String value = getString(path, "");
+ if (value.isEmpty()) {
+ return Collections.emptyList();
+ }
+ return Arrays.asList(value.split(","));
+ }
+
+ @Override
+ public boolean contains(String path) {
+ return getString(path, null) != null;
+ }
+
+ @Override
+ public void load() {
+ // no-op
+ }
+
+ @Override
+ public void save() {
+ // no-op
+ }
+
+ @Override
+ public void setString(String path, String value) {
+ // no-op
+ }
+
+ @Override
+ public void setBoolean(String path, boolean value) {
+ // no-op
+ }
+
+ @Override
+ public void setInteger(String path, int value) {
+ // no-op
+ }
+
+ @Override
+ public void setStringList(String path, List<String> value) {
+ // no-op
+ }
+
+ @Override
+ public void remove(String path) {
+ // no-op
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java b/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java
index 1605a38..0d82514 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/ws/TrustedKeyStore.java
@@ -20,7 +20,7 @@
package me.lucko.spark.common.ws;
-import me.lucko.spark.common.util.Configuration;
+import me.lucko.spark.common.util.config.Configuration;
import java.security.KeyPair;
import java.security.PrivateKey;