aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2022-09-22 22:06:10 +0100
committerLuck <git@lucko.me>2022-09-22 22:06:10 +0100
commitdbdd3eb1344b837abb13538b9c55d1d99e697e54 (patch)
treef37a53d07262641fcec9624db9410396932ddfd1 /spark-common/src/main/java/me/lucko/spark/common
parenta42dda9eebdc8db6c310978d138708c367f95096 (diff)
downloadspark-dbdd3eb1344b837abb13538b9c55d1d99e697e54.tar.gz
spark-dbdd3eb1344b837abb13538b9c55d1d99e697e54.tar.bz2
spark-dbdd3eb1344b837abb13538b9c55d1d99e697e54.zip
Allow platforms to pass extra misc metadata to the viewer
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java47
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java73
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java66
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java10
5 files changed, 110 insertions, 98 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index e2a2dbd..b7aef2a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -23,6 +23,7 @@ package me.lucko.spark.common;
import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.platform.world.WorldInfoProvider;
@@ -161,7 +162,16 @@ public interface SparkPlugin {
* @return the server config provider function
*/
default ServerConfigProvider createServerConfigProvider() {
- return ServerConfigProvider.NO_OP;
+ return null;
+ }
+
+ /**
+ * Creates a metadata provider for the platform.
+ *
+ * @return the platform extra metadata provider
+ */
+ default MetadataProvider createExtraMetadataProvider() {
+ return null;
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java
new file mode 100644
index 0000000..39022b4
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/MetadataProvider.java
@@ -0,0 +1,47 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform;
+
+import com.google.gson.JsonElement;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Function to export dynamic metadata to be displayed within the spark viewer.
+ */
+@FunctionalInterface
+public interface MetadataProvider {
+
+ /**
+ * Produces a map of the metadata.
+ *
+ * @return the metadata
+ */
+ Map<String, JsonElement> get();
+
+ default Map<String, String> export() {
+ Map<String, String> map = new LinkedHashMap<>();
+ get().forEach((key, value) -> map.put(key, value.toString()));
+ return map;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
deleted file mode 100644
index 559ae95..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.platform.serverconfig;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.gson.JsonElement;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Abstract implementation of {@link ServerConfigProvider}.
- *
- * <p>This implementation is able to delete hidden paths from
- * the configurations before they are sent to the viewer.</p>
- */
-public abstract class AbstractServerConfigProvider implements ServerConfigProvider {
- private final Map<String, ConfigParser> files;
- private final ExcludedConfigFilter hiddenPathFilters;
-
- protected AbstractServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
- this.files = files;
- this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths);
- }
-
- @Override
- public final Map<String, JsonElement> loadServerConfigurations() {
- ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
-
- this.files.forEach((path, parser) -> {
- try {
- JsonElement json = parser.load(path, this.hiddenPathFilters);
- if (json == null) {
- return;
- }
- builder.put(path, json);
- } catch (Exception e) {
- e.printStackTrace();
- }
- });
-
- return builder.build();
- }
-
- protected static List<String> getSystemPropertyList(String property) {
- String value = System.getProperty(property);
- return value == null
- ? Collections.emptyList()
- : Arrays.asList(value.split(","));
- }
-
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
index c66305f..485f215 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -20,37 +20,57 @@
package me.lucko.spark.common.platform.serverconfig;
+import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
+import me.lucko.spark.common.platform.MetadataProvider;
+
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
-import java.util.LinkedHashMap;
+import java.util.List;
import java.util.Map;
/**
- * Function to export server configuration files for access within the spark viewer.
+ * Abstract implementation of {@link MetadataProvider} which
+ * provides server configuration data.
+ *
+ * <p>This implementation is able to delete hidden paths from
+ * the configurations before they are sent to the viewer.</p>
*/
-@FunctionalInterface
-public interface ServerConfigProvider {
-
- /**
- * Loads a map of the server configuration files.
- *
- * <p>The key is the name of the file and the value is a
- * {@link JsonElement} of the contents.</p>
- *
- * @return the exported server configurations
- */
- Map<String, JsonElement> loadServerConfigurations();
-
- default Map<String, String> exportServerConfigurations() {
- Map<String, String> map = new LinkedHashMap<>();
- loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString()));
- return map;
+public abstract class ServerConfigProvider implements MetadataProvider {
+ private final Map<String, ConfigParser> files;
+ private final ExcludedConfigFilter hiddenPathFilters;
+
+ protected ServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
+ this.files = files;
+ this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths);
+ }
+
+ @Override
+ public final Map<String, JsonElement> get() {
+ ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
+
+ this.files.forEach((path, parser) -> {
+ try {
+ JsonElement json = parser.load(path, this.hiddenPathFilters);
+ if (json == null) {
+ return;
+ }
+ builder.put(path, json);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ });
+
+ return builder.build();
}
- /**
- * A no-op implementation
- */
- ServerConfigProvider NO_OP = Collections::emptyMap;
+ protected static List<String> getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
+ }
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index 7b57504..e20a2a8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.platform.MetadataProvider;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
@@ -148,7 +149,14 @@ public abstract class AbstractSampler implements Sampler {
try {
ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider();
- metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations());
+ metadata.putAllServerConfigurations(serverConfigProvider.export());
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider();
+ metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export());
} catch (Exception e) {
e.printStackTrace();
}