aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2022-06-25 22:48:55 +0100
committerLuck <git@lucko.me>2022-06-25 22:49:41 +0100
commit4d45579d2bf57b417d5d3eca041c2131177183e4 (patch)
treec51f46d0efb323b8d7a878f383b44dbaac129fb6 /spark-common/src/main
parent28cf3185c1374c4b5af277ef28482299694209a3 (diff)
downloadspark-4d45579d2bf57b417d5d3eca041c2131177183e4.tar.gz
spark-4d45579d2bf57b417d5d3eca041c2131177183e4.tar.bz2
spark-4d45579d2bf57b417d5d3eca041c2131177183e4.zip
Add providers for world (entity/chunk) statistics
Diffstat (limited to 'spark-common/src/main')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java19
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java15
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java55
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java110
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java216
-rw-r--r--spark-common/src/main/proto/spark/spark.proto27
8 files changed, 542 insertions, 1 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index b817df1..1116b04 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.monitor.ping.PlayerPingProvider;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
@@ -75,6 +76,15 @@ public interface SparkPlugin {
void executeAsync(Runnable task);
/**
+ * Executes the given {@link Runnable} on the server/client main thread.
+ *
+ * @param task the task
+ */
+ default void executeSync(Runnable task) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
* Print to the plugin logger.
*
* @param level the log level
@@ -143,6 +153,15 @@ public interface SparkPlugin {
}
/**
+ * Creates a world info provider.
+ *
+ * @return the world info provider function
+ */
+ default WorldInfoProvider createWorldInfoProvider() {
+ return WorldInfoProvider.NO_OP;
+ }
+
+ /**
* Gets information for the platform.
*
* @return information about the platform
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index f35bbbe..49cfed5 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages;
import me.lucko.spark.common.monitor.net.NetworkMonitor;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldStatisticsProvider;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
import me.lucko.spark.proto.SparkProtos.SystemStatistics;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
@@ -182,6 +185,18 @@ public class PlatformStatisticsProvider {
builder.setPlayerCount(playerCount);
}
+ try {
+ WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider();
+ WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo);
+ WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics();
+ if (worldStatistics != null) {
+ builder.setWorld(worldStatistics);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+
return builder.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
new file mode 100644
index 0000000..80026cd
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java
@@ -0,0 +1,55 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+public abstract class AbstractChunkInfo<E> implements ChunkInfo<E> {
+ private final int x;
+ private final int z;
+
+ protected AbstractChunkInfo(int x, int z) {
+ this.x = x;
+ this.z = z;
+ }
+
+ @Override
+ public int getX() {
+ return this.x;
+ }
+
+ @Override
+ public int getZ() {
+ return this.z;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof AbstractChunkInfo)) return false;
+ AbstractChunkInfo<?> that = (AbstractChunkInfo<?>) obj;
+ return this.x == that.x && this.z == that.z;
+ }
+
+ @Override
+ public int hashCode() {
+ return this.x ^ this.z;
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
new file mode 100644
index 0000000..2193a50
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java
@@ -0,0 +1,44 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+/**
+ * Information about a given chunk.
+ *
+ * @param <E> the type used to describe entities
+ */
+public interface ChunkInfo<E> {
+
+ int getX();
+
+ int getZ();
+
+ CountMap<E> getEntityCounts();
+
+ /**
+ * Converts entity type {@link E} to a string.
+ *
+ * @param type the entity type
+ * @return a string
+ */
+ String entityTypeName(E type);
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
new file mode 100644
index 0000000..3083266
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java
@@ -0,0 +1,110 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A map of (key) -> count.
+ *
+ * @param <T> the key type
+ */
+public interface CountMap<T> {
+
+ /**
+ * Increment the counter for the given key
+ *
+ * @param key the key
+ */
+ void increment(T key);
+
+ /**
+ * Add to the counter for the given key
+ *
+ * @param key the key
+ */
+ void add(T key, int delta);
+
+ AtomicInteger total();
+
+ Map<T, AtomicInteger> asMap();
+
+ /**
+ * A simple {@link CountMap} backed by the provided {@link Map}
+ *
+ * @param <T> the key type
+ */
+ class Simple<T> implements CountMap<T> {
+ private final Map<T, AtomicInteger> counts;
+ private final AtomicInteger total;
+
+ public Simple(Map<T, AtomicInteger> counts) {
+ this.counts = counts;
+ this.total = new AtomicInteger();
+ }
+
+ @Override
+ public void increment(T key) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.incrementAndGet();
+ this.total.incrementAndGet();
+ }
+
+ @Override
+ public void add(T key, int delta) {
+ AtomicInteger counter = this.counts.get(key);
+ if (counter == null) {
+ counter = new AtomicInteger();
+ this.counts.put(key, counter);
+ }
+ counter.addAndGet(delta);
+ this.total.addAndGet(delta);
+ }
+
+ @Override
+ public AtomicInteger total() {
+ return this.total;
+ }
+
+ @Override
+ public Map<T, AtomicInteger> asMap() {
+ return this.counts;
+ }
+ }
+
+ /**
+ * A {@link CountMap} backed by an {@link EnumMap}.
+ *
+ * @param <T> the key type - must be an enum
+ */
+ class EnumKeyed<T extends Enum<T>> extends Simple<T> {
+ public EnumKeyed(Class<T> keyClass) {
+ super(new EnumMap<>(keyClass));
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
new file mode 100644
index 0000000..9494816
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
@@ -0,0 +1,57 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Provides information about worlds.
+ */
+public interface WorldInfoProvider {
+
+ WorldInfoProvider NO_OP = () -> null;
+
+ /**
+ * Polls for information.
+ *
+ * @return the information
+ */
+ Result<? extends ChunkInfo<?>> poll();
+
+ default boolean mustCallSync() {
+ return true;
+ }
+
+ final class Result<T> {
+ private final Map<String, List<T>> worlds = new HashMap<>();
+
+ public void put(String worldName, List<T> chunks) {
+ this.worlds.put(worldName, chunks);
+ }
+
+ public Map<String, List<T>> getWorlds() {
+ return this.worlds;
+ }
+ }
+
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
new file mode 100644
index 0000000..864a296
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
@@ -0,0 +1,216 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+import me.lucko.spark.proto.SparkProtos.WorldStatistics;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+
+public class WorldStatisticsProvider {
+ private final SparkPlatform platform;
+ private final WorldInfoProvider provider;
+
+ public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) {
+ this.platform = platform;
+ this.provider = provider;
+ }
+
+ public WorldStatistics getWorldStatistics() {
+ if (this.provider == WorldInfoProvider.NO_OP) {
+ return null;
+ }
+
+ CompletableFuture<WorldInfoProvider.Result<? extends ChunkInfo<?>>> future;
+
+ if (this.provider.mustCallSync()) {
+ SparkPlugin plugin = this.platform.getPlugin();
+ future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync);
+ } else {
+ future = CompletableFuture.completedFuture(this.provider.poll());
+ }
+
+ WorldInfoProvider.Result<? extends ChunkInfo<?>> result;
+ try {
+ result = future.get(5, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException(e);
+ } catch (TimeoutException e) {
+ this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
+ return null;
+ }
+
+ if (result == null) {
+ return null;
+ }
+
+ WorldStatistics.Builder stats = WorldStatistics.newBuilder();
+
+ AtomicInteger combinedTotal = new AtomicInteger();
+ CountMap<String> combined = new CountMap.Simple<>(new HashMap<>());
+
+ result.getWorlds().forEach((worldName, chunks) -> {
+ WorldStatistics.World.Builder builder = WorldStatistics.World.newBuilder();
+ builder.setName(worldName);
+
+ List<Region> regions = groupIntoRegions(chunks);
+
+ int total = 0;
+
+ for (Region region : regions) {
+ builder.addRegions(regionToProto(region, combined));
+ total += region.getTotalEntities().get();
+ }
+
+ builder.setTotalEntities(total);
+ combinedTotal.addAndGet(total);
+
+ stats.addWorlds(builder.build());
+ });
+
+ stats.setTotalEntities(combinedTotal.get());
+ combined.asMap().forEach((key, value) -> stats.putEntityCounts(key, value.get()));
+
+ return stats.build();
+ }
+
+ private static WorldStatistics.Region regionToProto(Region region, CountMap<String> combined) {
+ WorldStatistics.Region.Builder builder = WorldStatistics.Region.newBuilder();
+ builder.setTotalEntities(region.getTotalEntities().get());
+ for (ChunkInfo<?> chunk : region.getChunks()) {
+ builder.addChunks(chunkToProto(chunk, combined));
+ }
+ return builder.build();
+ }
+
+ private static <E> WorldStatistics.Chunk chunkToProto(ChunkInfo<E> chunk, CountMap<String> combined) {
+ WorldStatistics.Chunk.Builder builder = WorldStatistics.Chunk.newBuilder();
+ builder.setX(chunk.getX());
+ builder.setZ(chunk.getZ());
+ builder.setTotalEntities(chunk.getEntityCounts().total().get());
+ chunk.getEntityCounts().asMap().forEach((key, value) -> {
+ String name = chunk.entityTypeName(key);
+ int count = value.get();
+
+ builder.putEntityCounts(name, count);
+ combined.add(name, count);
+ });
+ return builder.build();
+ }
+
+ private static List<Region> groupIntoRegions(List<? extends ChunkInfo<?>> chunks) {
+ List<Region> regions = new ArrayList<>();
+
+ for (ChunkInfo<?> chunk : chunks) {
+ CountMap<?> counts = chunk.getEntityCounts();
+ if (counts.total().get() == 0) {
+ continue;
+ }
+
+ boolean found = false;
+
+ for (Region region : regions) {
+ if (region.isAdjacent(chunk)) {
+ found = true;
+ region.add(chunk);
+
+ // if the chunk is adjacent to more than one region, merge the regions together
+ for (Iterator<Region> iterator = regions.iterator(); iterator.hasNext(); ) {
+ Region otherRegion = iterator.next();
+ if (region != otherRegion && otherRegion.isAdjacent(chunk)) {
+ iterator.remove();
+ region.merge(otherRegion);
+ }
+ }
+
+ break;
+ }
+ }
+
+ if (!found) {
+ regions.add(new Region(chunk));
+ }
+ }
+
+ return regions;
+ }
+
+ /**
+ * A map of nearby chunks grouped together by Euclidean distance.
+ */
+ private static final class Region {
+ private static final int DISTANCE_THRESHOLD = 2;
+ private final Set<ChunkInfo<?>> chunks;
+ private final AtomicInteger totalEntities;
+
+ private Region(ChunkInfo<?> initial) {
+ this.chunks = new HashSet<>();
+ this.chunks.add(initial);
+ this.totalEntities = new AtomicInteger(initial.getEntityCounts().total().get());
+ }
+
+ public Set<ChunkInfo<?>> getChunks() {
+ return this.chunks;
+ }
+
+ public AtomicInteger getTotalEntities() {
+ return this.totalEntities;
+ }
+
+ public boolean isAdjacent(ChunkInfo<?> chunk) {
+ for (ChunkInfo<?> el : this.chunks) {
+ if (squaredEuclideanDistance(el, chunk) <= DISTANCE_THRESHOLD) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public void add(ChunkInfo<?> chunk) {
+ this.chunks.add(chunk);
+ this.totalEntities.addAndGet(chunk.getEntityCounts().total().get());
+ }
+
+ public void merge(Region group) {
+ this.chunks.addAll(group.getChunks());
+ this.totalEntities.addAndGet(group.getTotalEntities().get());
+ }
+
+ private static long squaredEuclideanDistance(ChunkInfo<?> a, ChunkInfo<?> b) {
+ long dx = a.getX() - b.getX();
+ long dz = a.getZ() - b.getZ();
+ return (dx * dx) + (dz * dz);
+ }
+ }
+
+}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index ec0aa88..2ea341f 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -94,7 +94,8 @@ message PlatformStatistics {
Tps tps = 4; // optional
Mspt mspt = 5; // optional
Ping ping = 6; // optional
- int64 player_count = 7;
+ int64 player_count = 7; // optional
+ WorldStatistics world = 8; // optional
message Memory {
MemoryPool heap = 1;
@@ -127,6 +128,30 @@ message PlatformStatistics {
}
}
+message WorldStatistics {
+ int32 total_entities = 1;
+ map<string, int32> entity_counts = 2;
+ repeated World worlds = 3;
+
+ message World {
+ string name = 1;
+ int32 total_entities = 2;
+ repeated Region regions = 3;
+ }
+
+ message Region {
+ int32 total_entities = 1;
+ repeated Chunk chunks = 2;
+ }
+
+ message Chunk {
+ int32 x = 1;
+ int32 z = 2;
+ int32 total_entities = 3;
+ map<string, int32> entity_counts = 4;
+ }
+}
+
message RollingAverageValues {
double mean = 1;
double max = 2;