aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java70
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java63
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java31
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java)17
4 files changed, 111 insertions, 70 deletions
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
index 953e171..ff1b55f 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitServerConfigProvider.java
@@ -22,13 +22,12 @@ package me.lucko.spark.bukkit;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
-import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
-import com.google.gson.JsonElement;
import com.google.gson.JsonSerializer;
import me.lucko.spark.common.platform.serverconfig.AbstractServerConfigProvider;
-import me.lucko.spark.common.platform.serverconfig.PropertiesFileReader;
+import me.lucko.spark.common.platform.serverconfig.ConfigParser;
+import me.lucko.spark.common.platform.serverconfig.PropertiesConfigParser;
import org.bukkit.configuration.MemorySection;
import org.bukkit.configuration.file.YamlConfiguration;
@@ -37,23 +36,16 @@ import co.aikar.timings.TimingsManager;
import java.io.BufferedReader;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
-public class BukkitServerConfigProvider extends AbstractServerConfigProvider<BukkitServerConfigProvider.FileType> {
- private static final Gson GSON = new GsonBuilder()
- .registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)))
- .create();
+public class BukkitServerConfigProvider extends AbstractServerConfigProvider {
/** A map of provided files and their type */
- private static final Map<String, FileType> FILES;
+ private static final Map<String, ConfigParser> FILES;
/** A collection of paths to be excluded from the files */
private static final Collection<String> HIDDEN_PATHS;
@@ -62,50 +54,46 @@ public class BukkitServerConfigProvider extends AbstractServerConfigProvider<Buk
}
@Override
- protected JsonElement load(String path, FileType type) throws IOException {
- Path filePath = Paths.get(path);
- if (!Files.exists(filePath)) {
- return null;
- }
+ protected void customiseGson(GsonBuilder gson) {
+ gson.registerTypeAdapter(MemorySection.class, (JsonSerializer<MemorySection>) (obj, type, ctx) -> ctx.serialize(obj.getValues(false)));
+ }
- try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
- Map<String, Object> values;
+ @Override
+ protected String rewriteConfigPath(String path) {
+ return path.startsWith("config/")
+ ? path.substring("config/".length())
+ : path;
+ }
- if (type == FileType.PROPERTIES) {
- PropertiesFileReader propertiesReader = new PropertiesFileReader(reader);
- values = propertiesReader.readProperties();
- } else if (type == FileType.YAML) {
- YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
- values = config.getValues(false);
- } else {
- throw new IllegalArgumentException("Unknown file type: " + type);
- }
+ private enum YamlConfigParser implements ConfigParser {
+ INSTANCE;
- return GSON.toJsonTree(values);
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
+ YamlConfiguration config = YamlConfiguration.loadConfiguration(reader);
+ return config.getValues(false);
}
}
- enum FileType {
- PROPERTIES,
- YAML
- }
-
static {
- ImmutableMap.Builder<String, FileType> files = ImmutableMap.<String, FileType>builder()
- .put("server.properties", FileType.PROPERTIES)
- .put("bukkit.yml", FileType.YAML)
- .put("spigot.yml", FileType.YAML)
- .put("paper.yml", FileType.YAML)
- .put("purpur.yml", FileType.YAML);
+ ImmutableMap.Builder<String, ConfigParser> files = ImmutableMap.<String, ConfigParser>builder()
+ .put("server.properties", PropertiesConfigParser.INSTANCE)
+ .put("bukkit.yml", YamlConfigParser.INSTANCE)
+ .put("spigot.yml", YamlConfigParser.INSTANCE)
+ .put("paper.yml", YamlConfigParser.INSTANCE)
+ .put("config/paper-global.yml", YamlConfigParser.INSTANCE)
+ .put("config/paper-world-defaults.yml", YamlConfigParser.INSTANCE)
+ .put("purpur.yml", YamlConfigParser.INSTANCE);
for (String config : getSystemPropertyList("spark.serverconfigs.extra")) {
- files.put(config, FileType.YAML);
+ files.put(config, YamlConfigParser.INSTANCE);
}
ImmutableSet.Builder<String> hiddenPaths = ImmutableSet.<String>builder()
.add("database")
.add("settings.bungeecord-addresses")
.add("settings.velocity-support.secret")
+ .add("proxies.velocity.secret")
.add("server-ip")
.add("motd")
.add("resource-pack")
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
index ead2131..0eef111 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -21,10 +21,17 @@
package me.lucko.spark.common.platform.serverconfig;
import com.google.common.collect.ImmutableMap;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
+import java.io.BufferedReader;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@@ -38,29 +45,37 @@ import java.util.stream.Collectors;
*
* <p>This implementation is able to delete hidden paths from
* the configurations before they are sent to the viewer.</p>
- *
- * @param <T> the file type
*/
-public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
- private final Map<String, T> files;
+public abstract class AbstractServerConfigProvider implements ServerConfigProvider {
+ private final Map<String, ConfigParser> files;
private final Collection<String> hiddenPaths;
- protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
+ private final Gson gson;
+
+ protected AbstractServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
this.files = files;
this.hiddenPaths = hiddenPaths;
+
+ GsonBuilder gson = new GsonBuilder();
+ customiseGson(gson);
+ this.gson = gson.create();
}
@Override
public final Map<String, JsonElement> loadServerConfigurations() {
ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
- this.files.forEach((path, type) -> {
+ this.files.forEach((path, reader) -> {
try {
- JsonElement json = load(path, type);
- if (json != null) {
- delete(json, this.hiddenPaths);
- builder.put(path, json);
+ JsonElement json = load(path, reader);
+ if (json == null) {
+ return;
}
+
+ delete(json, this.hiddenPaths);
+
+ String name = rewriteConfigPath(path);
+ builder.put(name, json);
} catch (Exception e) {
e.printStackTrace();
}
@@ -69,15 +84,25 @@ public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements
return builder.build();
}
- /**
- * Loads a file from the system.
- *
- * @param path the name of the file to load
- * @param type the type of the file
- * @return the loaded file
- * @throws IOException if an error occurs performing i/o
- */
- protected abstract JsonElement load(String path, T type) throws IOException;
+ private JsonElement load(String path, ConfigParser parser) throws IOException {
+ Path filePath = Paths.get(path);
+ if (!Files.exists(filePath)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8)) {
+ Map<String, Object> values = parser.parse(reader);
+ return this.gson.toJsonTree(values);
+ }
+ }
+
+ protected void customiseGson(GsonBuilder gson) {
+
+ }
+
+ protected String rewriteConfigPath(String path) {
+ return path;
+ }
/**
* Deletes the given paths from the json element.
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
new file mode 100644
index 0000000..2dd15fe
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
@@ -0,0 +1,31 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.util.Map;
+
+public interface ConfigParser {
+
+ Map<String, Object> parse(BufferedReader reader) throws IOException;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
index 8fc89d7..4c7c2c1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
@@ -20,25 +20,22 @@
package me.lucko.spark.common.platform.serverconfig;
-import java.io.FilterReader;
+import java.io.BufferedReader;
import java.io.IOException;
-import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
- * A {@link Reader} that can parse a .properties file.
+ * A {@link ConfigParser} that can parse a .properties file.
*/
-public class PropertiesFileReader extends FilterReader {
+public enum PropertiesConfigParser implements ConfigParser {
+ INSTANCE;
- public PropertiesFileReader(Reader in) {
- super(in);
- }
-
- public Map<String, Object> readProperties() throws IOException {
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
Properties properties = new Properties();
- properties.load(this);
+ properties.load(reader);
Map<String, Object> values = new HashMap<>();
properties.forEach((k, v) -> {