aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java
diff options
context:
space:
mode:
Diffstat (limited to 'spark-common/src/main/java')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java19
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java1
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java15
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java95
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java48
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java97
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java (renamed from spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java)29
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java11
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java55
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java110
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java220
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java36
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java10
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java15
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java191
21 files changed, 960 insertions, 115 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index b817df1..1116b04 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -75,6 +76,15 @@ public interface SparkPlugin {
void executeAsync(Runnable task);
/**
+ * Executes the given {@link Runnable} on the server/client main thread.
+ *
+ * @param task the task
+ */
+ default void executeSync(Runnable task) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
* Print to the plugin logger.
*
* @param level the log level
@@ -143,6 +153,15 @@ public interface SparkPlugin {
}
/**
+ * Creates a world info provider.
+ *
+ * @return the world info provider function
+ */
+ default WorldInfoProvider createWorldInfoProvider() {
+ return WorldInfoProvider.NO_OP;
+ }
+
+ /**
* Gets information for the platform.
*
* @return information about the platform
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
index b777f3e..6252ac7 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java
@@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD;
public class ActivityLogModule implements CommandModule, RowRenderer<Activity> {
private final Pagination.Builder pagination = Pagination.builder()
+ .width(45)
.renderer(new Renderer() {
@Override
public Component renderEmpty() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 970d062..fd5cd67 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -266,7 +266,7 @@ public class SamplerModule implements CommandModule {
if (this.activeSampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- long timeout = this.activeSampler.getEndTime();
+ long timeout = this.activeSampler.getAutoEndTime();
if (timeout == -1) {
resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout."));
} else {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index f35bbbe..49cfed5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
import me.lucko.spark.common.monitor.net.NetworkMonitor;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldStatisticsProvider;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
@@ -182,6 +185,18 @@ public class PlatformStatisticsProvider {
builder.setPlayerCount(playerCount);
}
+ try {
+ WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider();
+ WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo);
+ WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics();
+ if (worldStatistics != null) {
+ builder.setWorld(worldStatistics);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+
return builder.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
index ead2131..559ae95 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java
@@ -22,45 +22,39 @@ package me.lucko.spark.common.platform.serverconfig;
import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
-import java.util.Deque;
-import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
-import java.util.stream.Collectors;
/**
* Abstract implementation of {@link ServerConfigProvider}.
*
* <p>This implementation is able to delete hidden paths from
* the configurations before they are sent to the viewer.</p>
- *
- * @param <T> the file type
*/
-public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider {
- private final Map<String, T> files;
- private final Collection<String> hiddenPaths;
+public abstract class AbstractServerConfigProvider implements ServerConfigProvider {
+ private final Map<String, ConfigParser> files;
+ private final ExcludedConfigFilter hiddenPathFilters;
- protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) {
+ protected AbstractServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) {
this.files = files;
- this.hiddenPaths = hiddenPaths;
+ this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths);
}
@Override
public final Map<String, JsonElement> loadServerConfigurations() {
ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder();
- this.files.forEach((path, type) -> {
+ this.files.forEach((path, parser) -> {
try {
- JsonElement json = load(path, type);
- if (json != null) {
- delete(json, this.hiddenPaths);
- builder.put(path, json);
+ JsonElement json = parser.load(path, this.hiddenPathFilters);
+ if (json == null) {
+ return;
}
+ builder.put(path, json);
} catch (Exception e) {
e.printStackTrace();
}
@@ -69,68 +63,11 @@ public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements
return builder.build();
}
- /**
- * Loads a file from the system.
- *
- * @param path the name of the file to load
- * @param type the type of the file
- * @return the loaded file
- * @throws IOException if an error occurs performing i/o
- */
- protected abstract JsonElement load(String path, T type) throws IOException;
-
- /**
- * Deletes the given paths from the json element.
- *
- * @param json the json element
- * @param paths the paths to delete
- */
- private static void delete(JsonElement json, Collection<String> paths) {
- for (String path : paths) {
- Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
- delete(json, pathDeque);
- }
- }
-
- private static void delete(JsonElement json, Deque<String> path) {
- if (path.isEmpty()) {
- return;
- }
- if (!json.isJsonObject()) {
- return;
- }
-
- JsonObject jsonObject = json.getAsJsonObject();
- String expected = path.removeFirst().replace("<dot>", ".");
-
- Collection<String> keys;
- if (expected.equals("*")) {
- keys = jsonObject.entrySet().stream()
- .map(Map.Entry::getKey)
- .collect(Collectors.toList());
- } else if (expected.endsWith("*")) {
- String pattern = expected.substring(0, expected.length() - 1);
- keys = jsonObject.entrySet().stream()
- .map(Map.Entry::getKey)
- .filter(key -> key.startsWith(pattern))
- .collect(Collectors.toList());
- } else if (jsonObject.has(expected)) {
- keys = Collections.singletonList(expected);
- } else {
- keys = Collections.emptyList();
- }
-
- for (String key : keys) {
- if (path.isEmpty()) {
- jsonObject.remove(key);
- } else {
- Deque<String> pathCopy = keys.size() > 1
- ? new LinkedList<>(path)
- : path;
-
- delete(jsonObject.get(key), pathCopy);
- }
- }
+ protected static List<String> getSystemPropertyList(String property) {
+ String value = System.getProperty(property);
+ return value == null
+ ? Collections.emptyList()
+ : Arrays.asList(value.split(","));
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
new file mode 100644
index 0000000..675a32e
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java
@@ -0,0 +1,48 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+
+public interface ConfigParser {
+
+ JsonElement load(String file, ExcludedConfigFilter filter) throws IOException;
+
+ default Map<String, Object> parse(Path file) throws IOException {
+ if (!Files.exists(file)) {
+ return null;
+ }
+
+ try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) {
+ return this.parse(reader);
+ }
+ }
+
+ Map<String, Object> parse(BufferedReader reader) throws IOException;
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
new file mode 100644
index 0000000..c11c7f8
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java
@@ -0,0 +1,97 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.serverconfig;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Filtered excluded paths from {@link JsonElement}s (parsed configuration files).
+ */
+public class ExcludedConfigFilter {
+ private final Collection<String> pathsToExclude;
+
+ public ExcludedConfigFilter(Collection<String> pathsToExclude) {
+ this.pathsToExclude = pathsToExclude;
+ }
+
+ /**
+ * Deletes the excluded paths from the json element.
+ *
+ * @param json the json element
+ */
+ public JsonElement apply(JsonElement json) {
+ for (String path : this.pathsToExclude) {
+ Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\.")));
+ delete(json, pathDeque);
+ }
+ return json;
+ }
+
+ private static void delete(JsonElement json, Deque<String> path) {
+ if (path.isEmpty()) {
+ return;
+ }
+ if (!json.isJsonObject()) {
+ return;
+ }
+
+ JsonObject jsonObject = json.getAsJsonObject();
+ String expected = path.removeFirst().replace("<dot>", ".");
+
+ Collection<String> keys;
+ if (expected.equals("*")) {
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .collect(Collectors.toList());
+ } else if (expected.endsWith("*")) {
+ String pattern = expected.substring(0, expected.length() - 1);
+ keys = jsonObject.entrySet().stream()
+ .map(Map.Entry::getKey)
+ .filter(key -> key.startsWith(pattern))
+ .collect(Collectors.toList());
+ } else if (jsonObject.has(expected)) {
+ keys = Collections.singletonList(expected);
+ } else {
+ keys = Collections.emptyList();
+ }
+
+ for (String key : keys) {
+ if (path.isEmpty()) {
+ jsonObject.remove(key);
+ } else {
+ Deque<String> pathCopy = keys.size() > 1
+ ? new LinkedList<>(path)
+ : path;
+
+ delete(jsonObject.get(key), pathCopy);
+ }
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
index 8fc89d7..344ba1c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java
@@ -20,25 +20,38 @@
package me.lucko.spark.common.platform.serverconfig;
-import java.io.FilterReader;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+
+import java.io.BufferedReader;
import java.io.IOException;
-import java.io.Reader;
+import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
- * A {@link Reader} that can parse a .properties file.
+ * A {@link ConfigParser} that can parse a .properties file.
*/
-public class PropertiesFileReader extends FilterReader {
+public enum PropertiesConfigParser implements ConfigParser {
+ INSTANCE;
+
+ private static final Gson GSON = new Gson();
+
+ @Override
+ public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException {
+ Map<String, Object> values = this.parse(Paths.get(file));
+ if (values == null) {
+ return null;
+ }
- public PropertiesFileReader(Reader in) {
- super(in);
+ return filter.apply(GSON.toJsonTree(values));
}
- public Map<String, Object> readProperties() throws IOException {
+ @Override
+ public Map<String, Object> parse(BufferedReader reader) throws IOException {
Properties properties = new Properties();
- properties.load(this);
+ properties.load(reader);
Map<String, Object> values = new HashMap<>();
properties.forEach((k, v) -> {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
index 1fc2391..c66305f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java
@@ -23,8 +23,8 @@ package me.lucko.spark.common.platform.serverconfig;
import com.google.gson.JsonElement;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.Map;
-import java.util.stream.Collectors;
/**
* Function to export server configuration files for access within the spark viewer.
@@ -43,12 +43,9 @@ public interface ServerConfigProvider {
Map<String, JsonElement> loadServerConfigurations();
default Map<String, String> exportServerConfigurations() {
- return loadServerConfigurations().entrySet()
- .stream()
- .collect(Collectors.toMap(
- Map.Entry::getKey,
- e -> e.getValue().toString()
- ));
+ Map<String, String> map = new LinkedHashMap<>();
+ loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString()));
+ return map;
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
new file mode 100644
index 0000000..80026cd
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
@@ -0,0 +1,55 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+public abstract class AbstractChunkInfo<E> implements ChunkInfo<E> {
+ private final int x;
+ private final int z;
+
+ protected AbstractChunkInfo(int x, int z) {
+ this.x = x;
+ this.z = z;
+ }
+
+ @Override
+ public int getX() {
+ return this.x;
+ }
+
+ @Override
+ public int getZ() {
+ return this.z;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof AbstractChunkInfo)) return false;
+ AbstractChunkInfo<?> that = (AbstractChunkInfo<?>) obj;
+ return this.x == that.x && this.z == that.z;
+ }
+
+ @Override
+ public int hashCode() {
+ return this.x ^ this.z;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
new file mode 100644
index 0000000..2193a50
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+/**
+ * Information about a given chunk.
+ *
+ * @param <E> the type used to describe entities
+ */
+public interface ChunkInfo<E> {
+
+ int getX();
+
+ int getZ();
+
+ CountMap<E> getEntityCounts();
+
+ /**
+ * Converts entity type {@link E} to a string.
+ *
+ * @param type the entity type
+ * @return a string
+ */
+ String entityTypeName(E type);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
new file mode 100644
index 0000000..3083266
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
@@ -0,0 +1,110 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A map of (key) -> count.
+ *
+ * @param <T> the key type
+ */
+public interface CountMap<T> {
+
+ /**
+ * Increment the counter for the given key
+ *
+ * @param key the key
+ */
+ void increment(T key);
+
+ /**
+ * Add to the counter for the given key
+ *
+ * @param key the key
+ */
+ void add(T key, int delta);
+
+ AtomicInteger total();
+
+ Map<T, AtomicInteger> asMap();
+
+ /**
+ * A simple {@link CountMap} backed by the provided {@link Map}
+ *
+ * @param <T> the key type
+ */
+ class Simple<T> implements CountMap<T> {
+ private final Map<T, AtomicInteger> counts;
+ private final AtomicInteger total;
+
+ public Simple(Map<T, AtomicInteger> counts) {
+ this.counts = counts;
+ this.total = new AtomicInteger();
+ }
+
+ @Override
+ public void increment(T key) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ @Override
+ public void add(T key, int delta) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.addAndGet(delta);
+ this.total.addAndGet(delta);
+ }
+
+ @Override
+ public AtomicInteger total() {
+ return this.total;
+ }
+
+ @Override
+ public Map<T, AtomicInteger> asMap() {
+ return this.counts;
+ }
+ }
+
+ /**
+ * A {@link CountMap} backed by an {@link EnumMap}.
+ *
+ * @param <T> the key type - must be an enum
+ */
+ class EnumKeyed<T extends Enum<T>> extends Simple<T> {
+ public EnumKeyed(Class<T> keyClass) {
+ super(new EnumMap<>(keyClass));
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
new file mode 100644
index 0000000..9494816
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Provides information about worlds.
+ */
+public interface WorldInfoProvider {
+
+ WorldInfoProvider NO_OP = () -> null;
+
+ /**
+ * Polls for information.
+ *
+ * @return the information
+ */
+ Result<? extends ChunkInfo<?>> poll();
+
+ default boolean mustCallSync() {
+ return true;
+ }
+
+ final class Result<T> {
+ private final Map<String, List<T>> worlds = new HashMap<>();
+
+ public void put(String worldName, List<T> chunks) {
+ this.worlds.put(worldName, chunks);
+ }
+
+ public Map<String, List<T>> getWorlds() {
+ return this.worlds;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
new file mode 100644
index 0000000..80c35a6
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
@@ -0,0 +1,220 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+
+public class WorldStatisticsProvider {
+ private final SparkPlatform platform;
+ private final WorldInfoProvider provider;
+
+ public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) {
+ this.platform = platform;
+ this.provider = provider;
+ }
+
+ public WorldStatistics getWorldStatistics() {
+ if (this.provider == WorldInfoProvider.NO_OP) {
+ return null;
+ }
+
+ CompletableFuture<WorldInfoProvider.Result<? extends ChunkInfo<?>>> future;
+
+ if (this.provider.mustCallSync()) {
+ SparkPlugin plugin = this.platform.getPlugin();
+ future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync);
+ } else {
+ future = CompletableFuture.completedFuture(this.provider.poll());
+ }
+
+ WorldInfoProvider.Result<? extends ChunkInfo<?>> result;
+ try {
+ result = future.get(5, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException(e);
+ } catch (TimeoutException e) {
+ this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
+ return null;
+ }
+
+ if (result == null) {
+ return null;
+ }
+
+ WorldStatistics.Builder stats = WorldStatistics.newBuilder();
+
+ AtomicInteger combinedTotal = new AtomicInteger();
+ CountMap<String> combined = new CountMap.Simple<>(new HashMap<>());
+
+ result.getWorlds().forEach((worldName, chunks) -> {
+ WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder();
+ builder.setName(worldName);
+
+ List<Region> regions = groupIntoRegions(chunks);
+
+ int total = 0;
+
+ for (Region region : regions) {
+ builder.addRegions(regionToProto(region, combined));
+ total += region.getTotalEntities().get();
+ }
+
+ builder.setTotalEntities(total);
+ combinedTotal.addAndGet(total);
+
+ stats.addWorlds(builder.build());
+ });
+
+ stats.setTotalEntities(combinedTotal.get());
+ combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get()));
+
+ return stats.build();
+ }
+
+ private static WorldStatistics.Region regionToProto(Region region, CountMap<String> combined) {
+ WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder();
+ builder.setTotalEntities(region.getTotalEntities().get());
+ for (ChunkInfo<?> chunk : region.getChunks()) {
+ builder.addChunks(chunkToProto(chunk, combined));
+ }
+ return builder.build();
+ }
+
+ private static <E> WorldStatistics.Chunk chunkToProto(ChunkInfo<E> chunk, CountMap<String> combined) {
+ WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder();
+ builder.setX(chunk.getX());
+ builder.setZ(chunk.getZ());
+ builder.setTotalEntities(chunk.getEntityCounts().total().get());
+ chunk.getEntityCounts().asMap().forEach((key, value) -> {
+ String name = chunk.entityTypeName(key);
+ int count = value.get();
+
+ if (name == null) {
+ name = "unknown[" + key.toString() + "]";
+ }
+
+ builder.putEntityCounts(name, count);
+ combined.add(name, count);
+ });
+ return builder.build();
+ }
+
+ private static List<Region> groupIntoRegions(List<? extends ChunkInfo<?>> chunks) {
+ List<Region> regions = new ArrayList<>();
+
+ for (ChunkInfo<?> chunk : chunks) {
+ CountMap<?> counts = chunk.getEntityCounts();
+ if (counts.total().get() == 0) {
+ continue;
+ }
+
+ boolean found = false;
+
+ for (Region region : regions) {
+ if (region.isAdjacent(chunk)) {
+ found = true;
+ region.add(chunk);
+
+ // if the chunk is adjacent to more than one region, merge the regions together
+ for (Iterator<Region> iterator = regions.iterator(); iterator.hasNext(); ) {
+ Region otherRegion = iterator.next();
+ if (region != otherRegion && otherRegion.isAdjacent(chunk)) {
+ iterator.remove();
+ region.merge(otherRegion);
+ }
+ }
+
+ break;
+ }
+ }
+
+ if (!found) {
+ regions.add(new Region(chunk));
+ }
+ }
+
+ return regions;
+ }
+
+ /**
+ * A map of nearby chunks grouped together by Euclidean distance.
+ */
+ private static final class Region {
+ private static final int DISTANCE_THRESHOLD = 2;
+ private final Set<ChunkInfo<?>> chunks;
+ private final AtomicInteger totalEntities;
+
+ private Region(ChunkInfo<?> initial) {
+ this.chunks = new HashSet<>();
+ this.chunks.add(initial);
+ this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get());
+ }
+
+ public Set<ChunkInfo<?>> getChunks() {
+ return this.chunks;
+ }
+
+ public AtomicInteger getTotalEntities() {
+ return this.totalEntities;
+ }
+
+ public boolean isAdjacent(ChunkInfo<?> chunk) {
+ for (ChunkInfo<?> el : this.chunks) {
+ if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public void add(ChunkInfo<?> chunk) {
+ this.chunks.add(chunk);
+ this.totalEntities.addAndGet(chunk.getEntityCounts().total().get());
+ }
+
+ public void merge(Region group) {
+ this.chunks.addAll(group.getChunks());
+ this.totalEntities.addAndGet(group.getTotalEntities().get());
+ }
+
+ private static long squaredEuclideanDistance(ChunkInfo<?> a, ChunkInfo<?> b) {
+ long dx = a.getX() - b.getX();
+ long dz = a.getZ() - b.getZ();
+ return (dx * dx) + (dz * dz);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index ce466a0..1c217db 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -27,6 +27,7 @@ import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
import me.lucko.spark.common.sampler.aggregator.DataAggregator;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
+import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata;
@@ -41,6 +42,9 @@ import java.util.concurrent.CompletableFuture;
*/
public abstract class AbstractSampler implements Sampler {
+ /** The spark platform instance */
+ protected final SparkPlatform platform;
+
/** The interval to wait between sampling, in microseconds */
protected final int interval;
@@ -50,8 +54,11 @@ public abstract class AbstractSampler implements Sampler {
/** The time when sampling first began */
protected long startTime = -1;
+ /** The game tick when sampling first began */
+ protected int startTick = -1;
+
/** The unix timestamp (in millis) when this sampler should automatically complete. */
- protected final long endTime; // -1 for nothing
+ protected final long autoEndTime; // -1 for nothing
/** A future to encapsulate the completion of this sampler instance */
protected final CompletableFuture<Sampler> future = new CompletableFuture<>();
@@ -59,10 +66,11 @@ public abstract class AbstractSampler implements Sampler {
/** The garbage collector statistics when profiling started */
protected Map<String, GarbageCollectorStatistics> initialGcStats;
- protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) {
+ protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) {
+ this.platform = platform;
this.interval = interval;
this.threadDumper = threadDumper;
- this.endTime = endTime;
+ this.autoEndTime = autoEndTime;
}
@Override
@@ -74,8 +82,8 @@ public abstract class AbstractSampler implements Sampler {
}
@Override
- public long getEndTime() {
- return this.endTime;
+ public long getAutoEndTime() {
+ return this.autoEndTime;
}
@Override
@@ -91,6 +99,16 @@ public abstract class AbstractSampler implements Sampler {
return this.initialGcStats;
}
+ @Override
+ public void start() {
+ this.startTime = System.currentTimeMillis();
+
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ this.startTick = tickHook.getCurrentTick();
+ }
+ }
+
protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) {
SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
@@ -105,6 +123,14 @@ public abstract class AbstractSampler implements Sampler {
metadata.setComment(comment);
}
+ if (this.startTick != -1) {
+ TickHook tickHook = this.platform.getTickHook();
+ if (tickHook != null) {
+ int numberOfTicks = tickHook.getCurrentTick() - this.startTick;
+ metadata.setNumberOfTicks(numberOfTicks);
+ }
+ }
+
try {
metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats()));
} catch (Exception e) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 845043f..84f2da1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -57,7 +57,7 @@ public interface Sampler {
*
* @return the end time, or -1 if undefined
*/
- long getEndTime();
+ long getAutoEndTime();
/**
* Gets a future to encapsulate the completion of the sampler
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
index 88cf018..88b9d91 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java
@@ -97,11 +97,11 @@ public class SamplerBuilder {
Sampler sampler;
if (this.ticksOver != -1 && this.tickHook != null) {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
+ sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver);
} else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) {
- sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout);
+ sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout);
} else {
- sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative);
+ sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative);
}
sampler.start();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index 5cb7fdc..d8288da 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -65,8 +65,8 @@ public class AsyncSampler extends AbstractSampler {
/** The executor used for timeouts */
private ScheduledExecutorService timeoutExecutor;
- public AsyncSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) {
- super(interval, threadDumper, endTime);
+ public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) {
+ super(platform, interval, threadDumper, endTime);
this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler();
this.dataAggregator = new AsyncDataAggregator(threadGrouper);
}
@@ -90,7 +90,7 @@ public class AsyncSampler extends AbstractSampler {
*/
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
try {
this.outputFile = TemporaryFiles.create("spark-profile-", ".jfr.tmp");
@@ -120,11 +120,11 @@ public class AsyncSampler extends AbstractSampler {
}
private void scheduleTimeout() {
- if (this.endTime == -1) {
+ if (this.autoEndTime == -1) {
return;
}
- long delay = this.endTime - System.currentTimeMillis();
+ long delay = this.autoEndTime - System.currentTimeMillis();
if (delay <= 0) {
return;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index cfa0a0f..913faee 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -63,19 +63,19 @@ public class JavaSampler extends AbstractSampler implements Runnable {
/** Responsible for aggregating and then outputting collected sampling data */
private final JavaDataAggregator dataAggregator;
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) {
- super(interval, threadDumper, endTime);
+ public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) {
+ super(platform, interval, threadDumper, endTime);
this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative);
}
- public JavaSampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
- super(interval, threadDumper, endTime);
+ public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) {
+ super(platform, interval, threadDumper, endTime);
this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold);
}
@Override
public void start() {
- this.startTime = System.currentTimeMillis();
+ super.start();
this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS);
}
@@ -89,7 +89,7 @@ public class JavaSampler extends AbstractSampler implements Runnable {
// this is effectively synchronized, the worker pool will not allow this task
// to concurrently execute.
try {
- if (this.endTime != -1 && this.endTime <= System.currentTimeMillis()) {
+ if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) {
this.future.complete(this);
stop();
return;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
index e817828..e062f31 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java
@@ -31,6 +31,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
/**
* Implementation of {@link DataAggregator} which supports only including sampling data from "ticks"
@@ -47,6 +48,9 @@ public class TickedDataAggregator extends JavaDataAggregator {
/** The expected number of samples in each tick */
private final int expectedSize;
+ /** The number of ticks aggregated so far */
+ private final AtomicInteger numberOfTicks = new AtomicInteger();
+
private final Object mutex = new Object();
// state
@@ -64,10 +68,16 @@ public class TickedDataAggregator extends JavaDataAggregator {
@Override
public SamplerMetadata.DataAggregator getMetadata() {
+ // push the current tick (so numberOfTicks is accurate)
+ synchronized (this.mutex) {
+ pushCurrentTick();
+ }
+
return SamplerMetadata.DataAggregator.newBuilder()
.setType(SamplerMetadata.DataAggregator.Type.TICKED)
.setThreadGrouper(this.threadGrouper.asProto())
.setTickLengthThreshold(this.tickLengthThreshold)
+ .setNumberOfIncludedTicks(this.numberOfTicks.get())
.build();
}
@@ -97,6 +107,7 @@ public class TickedDataAggregator extends JavaDataAggregator {
return;
}
+ this.numberOfTicks.incrementAndGet();
this.workerPool.submit(currentData);
}
@@ -110,6 +121,10 @@ public class TickedDataAggregator extends JavaDataAggregator {
return super.exportData();
}
+ public int getNumberOfTicks() {
+ return this.numberOfTicks.get();
+ }
+
private final class TickList implements Runnable {
private final List<ThreadInfo> list;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
new file mode 100644
index 0000000..be5bbc2
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkPlaceholder.java
@@ -0,0 +1,191 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.util;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.TextComponent;
+import net.kyori.adventure.text.serializer.legacy.LegacyComponentSerializer;
+
+import java.util.Locale;
+import java.util.function.BiFunction;
+
+public enum SparkPlaceholder {
+
+ TPS((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps10Sec())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps1Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps5Min())).append(Component.text(", "))
+ .append(StatisticFormatter.formatTps(tickStatistics.tps15Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "5s":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Sec());
+ case "10s":
+ return StatisticFormatter.formatTps(tickStatistics.tps10Sec());
+ case "1m":
+ return StatisticFormatter.formatTps(tickStatistics.tps1Min());
+ case "5m":
+ return StatisticFormatter.formatTps(tickStatistics.tps5Min());
+ case "15m":
+ return StatisticFormatter.formatTps(tickStatistics.tps15Min());
+ }
+
+ return null;
+ }),
+
+ TICKDURATION((platform, arg) -> {
+ TickStatistics tickStatistics = platform.getTickStatistics();
+ if (tickStatistics == null || !tickStatistics.isDurationSupported()) {
+ return null;
+ }
+
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec())).append(Component.text("; "))
+ .append(StatisticFormatter.formatTickDurations(tickStatistics.duration1Min()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration10Sec());
+ case "1m":
+ return StatisticFormatter.formatTickDurations(tickStatistics.duration1Min());
+ }
+
+ return null;
+ }),
+
+ CPU_SYSTEM((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.systemLoad15MinAvg());
+ }
+
+ return null;
+ }),
+
+ CPU_PROCESS((platform, arg) -> {
+ if (arg == null) {
+ return Component.text()
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg())).append(Component.text(", "))
+ .append(StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg()))
+ .build();
+ }
+
+ switch (arg) {
+ case "10s":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad10SecAvg());
+ case "1m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad1MinAvg());
+ case "15m":
+ return StatisticFormatter.formatCpuUsage(CpuMonitor.processLoad15MinAvg());
+ }
+
+ return null;
+ });
+
+ private final String name;
+ private final BiFunction<SparkPlatform, String, TextComponent> function;
+
+ SparkPlaceholder(BiFunction<SparkPlatform, String, TextComponent> function) {
+ this.name = name().toLowerCase(Locale.ROOT);
+ this.function = function;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public TextComponent resolve(SparkPlatform platform, String arg) {
+ return this.function.apply(platform, arg);
+ }
+
+ public static TextComponent resolveComponent(SparkPlatform platform, String placeholder) {
+ String[] parts = placeholder.split("_");
+
+ if (parts.length == 0) {
+ return null;
+ }
+
+ String label = parts[0];
+
+ if (label.equals("tps")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TPS.resolve(platform, arg);
+ }
+
+ if (label.equals("tickduration")) {
+ String arg = parts.length < 2 ? null : parts[1];
+ return TICKDURATION.resolve(platform, arg);
+ }
+
+ if (label.equals("cpu") && parts.length >= 2) {
+ String type = parts[1];
+ String arg = parts.length < 3 ? null : parts[2];
+
+ if (type.equals("system")) {
+ return CPU_SYSTEM.resolve(platform, arg);
+ }
+ if (type.equals("process")) {
+ return CPU_PROCESS.resolve(platform, arg);
+ }
+ }
+
+ return null;
+ }
+
+ public static String resolveFormattingCode(SparkPlatform platform, String placeholder) {
+ TextComponent result = resolveComponent(platform, placeholder);
+ if (result == null) {
+ return null;
+ }
+ return LegacyComponentSerializer.legacySection().serialize(result);
+ }
+
+}