aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java44
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java7
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java90
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java57
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java37
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java2
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java6
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java14
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java14
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java15
-rw-r--r--spark-common/src/main/proto/spark/spark.proto6
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java42
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java4
-rw-r--r--spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java4
-rw-r--r--spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java42
-rw-r--r--spark-forge/src/main/resources/META-INF/accesstransformer.cfg4
-rw-r--r--spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java21
-rw-r--r--spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java21
21 files changed, 366 insertions, 78 deletions
diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
index 79c2715..8f876cf 100644
--- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
+++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java
@@ -34,6 +34,21 @@ import java.util.ArrayList;
import java.util.List;
public class BukkitWorldInfoProvider implements WorldInfoProvider {
+ private static final boolean SUPPORTS_PAPER_COUNT_METHODS;
+
+ static {
+ boolean supportsPaperCountMethods = false;
+ try {
+ World.class.getMethod("getEntityCount");
+ World.class.getMethod("getTileEntityCount");
+ World.class.getMethod("getChunkCount");
+ supportsPaperCountMethods = true;
+ } catch (Exception e) {
+ // ignored
+ }
+ SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods;
+ }
+
private final Server server;
public BukkitWorldInfoProvider(Server server) {
@@ -41,8 +56,33 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<BukkitChunkInfo> poll() {
- Result<BukkitChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ int players = this.server.getOnlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (World world : this.server.getWorlds()) {
+ if (SUPPORTS_PAPER_COUNT_METHODS) {
+ entities += world.getEntityCount();
+ tileEntities += world.getTileEntityCount();
+ chunks += world.getChunkCount();
+ } else {
+ entities += world.getEntities().size();
+ Chunk[] chunksArray = world.getLoadedChunks();
+ for (Chunk chunk : chunksArray) {
+ tileEntities += chunk.getTileEntities().length;
+ }
+ chunks += chunksArray.length;
+ }
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<BukkitChunkInfo> pollChunks() {
+ ChunksResult<BukkitChunkInfo> data = new ChunksResult<>();
for (World world : this.server.getWorlds()) {
Chunk[] chunks = world.getLoadedChunks();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 00cd4fa..f576eac 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -145,7 +145,7 @@ public class SamplerModule implements CommandModule {
if (previousSampler.isRunningInBackground()) {
// there is a background profiler running - stop that first
resp.replyPrefixed(text("Stopping the background profiler before starting... please wait"));
- previousSampler.stop();
+ previousSampler.stop(true);
platform.getSamplerContainer().unsetActiveSampler(previousSampler);
} else {
// there is a non-background profiler running - tell the user
@@ -310,7 +310,7 @@ public class SamplerModule implements CommandModule {
if (sampler == null) {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
- platform.getSamplerContainer().stopActiveSampler();
+ platform.getSamplerContainer().stopActiveSampler(true);
resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD));
}
}
@@ -322,7 +322,7 @@ public class SamplerModule implements CommandModule {
resp.replyPrefixed(text("There isn't an active profiler running."));
} else {
platform.getSamplerContainer().unsetActiveSampler(sampler);
- sampler.stop();
+ sampler.stop(false);
boolean saveToFile = arguments.boolFlag("save-to-file");
if (saveToFile) {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
index 1eb9753..fc7e78a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java
@@ -31,7 +31,7 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor;
import me.lucko.spark.common.monitor.os.OperatingSystemInfo;
import me.lucko.spark.common.monitor.ping.PingStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
-import me.lucko.spark.common.platform.world.WorldInfoProvider;
+import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider;
import me.lucko.spark.common.platform.world.WorldStatisticsProvider;
import me.lucko.spark.proto.SparkProtos.PlatformStatistics;
import me.lucko.spark.proto.SparkProtos.SystemStatistics;
@@ -188,8 +188,9 @@ public class PlatformStatisticsProvider {
}
try {
- WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider();
- WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo);
+ WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(
+ new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider())
+ );
WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics();
if (worldStatistics != null) {
builder.setWorld(worldStatistics);
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java
new file mode 100644
index 0000000..82cddef
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java
@@ -0,0 +1,90 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.platform.world;
+
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.SparkPlugin;
+
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.function.Function;
+import java.util.logging.Level;
+
+/**
+ * Async-friendly wrapper around {@link WorldInfoProvider}.
+ */
+public class AsyncWorldInfoProvider {
+ private static final int TIMEOUT_SECONDS = 5;
+
+ private final SparkPlatform platform;
+ private final WorldInfoProvider provider;
+
+ public AsyncWorldInfoProvider(SparkPlatform platform, WorldInfoProvider provider) {
+ this.platform = platform;
+ this.provider = provider == WorldInfoProvider.NO_OP ? null : provider;
+ }
+
+ private <T> CompletableFuture<T> async(Function<WorldInfoProvider, T> function) {
+ if (this.provider == null) {
+ return null;
+ }
+
+ if (this.provider.mustCallSync()) {
+ SparkPlugin plugin = this.platform.getPlugin();
+ return CompletableFuture.supplyAsync(() -> function.apply(this.provider), plugin::executeSync);
+ } else {
+ return CompletableFuture.completedFuture(function.apply(this.provider));
+ }
+ }
+
+ private <T> T get(CompletableFuture<T> future) {
+ if (future == null) {
+ return null;
+ }
+
+ try {
+ return future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new RuntimeException(e);
+ } catch (TimeoutException e) {
+ this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
+ return null;
+ }
+ }
+
+ public CompletableFuture<WorldInfoProvider.CountsResult> pollCounts() {
+ return async(WorldInfoProvider::pollCounts);
+ }
+
+ public CompletableFuture<WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>>> pollChunks() {
+ return async(WorldInfoProvider::pollChunks);
+ }
+
+ public WorldInfoProvider.CountsResult getCounts() {
+ return get(pollCounts());
+ }
+
+ public WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> getChunks() {
+ return get(pollChunks());
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
index 9494816..7fb581d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java
@@ -29,20 +29,37 @@ import java.util.Map;
*/
public interface WorldInfoProvider {
- WorldInfoProvider NO_OP = () -> null;
+ WorldInfoProvider NO_OP = new WorldInfoProvider() {
+ @Override
+ public CountsResult pollCounts() {
+ return null;
+ }
+
+ @Override
+ public ChunksResult<? extends ChunkInfo<?>> pollChunks() {
+ return null;
+ }
+ };
+
+ /**
+ * Polls for counts.
+ *
+ * @return the counts
+ */
+ CountsResult pollCounts();
/**
- * Polls for information.
+ * Polls for chunk information.
*
- * @return the information
+ * @return the chunk information
*/
- Result<? extends ChunkInfo<?>> poll();
+ ChunksResult<? extends ChunkInfo<?>> pollChunks();
default boolean mustCallSync() {
return true;
}
- final class Result<T> {
+ final class ChunksResult<T extends ChunkInfo<?>> {
private final Map<String, List<T>> worlds = new HashMap<>();
public void put(String worldName, List<T> chunks) {
@@ -54,4 +71,34 @@ public interface WorldInfoProvider {
}
}
+ final class CountsResult {
+ private final int players;
+ private final int entities;
+ private final int tileEntities;
+ private final int chunks;
+
+ public CountsResult(int players, int entities, int tileEntities, int chunks) {
+ this.players = players;
+ this.entities = entities;
+ this.tileEntities = tileEntities;
+ this.chunks = chunks;
+ }
+
+ public int players() {
+ return this.players;
+ }
+
+ public int entities() {
+ return this.entities;
+ }
+
+ public int tileEntities() {
+ return this.tileEntities;
+ }
+
+ public int chunks() {
+ return this.chunks;
+ }
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
index 80c35a6..7e63222 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java
@@ -20,8 +20,6 @@
package me.lucko.spark.common.platform.world;
-import me.lucko.spark.common.SparkPlatform;
-import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.proto.SparkProtos.WorldStatistics;
import java.util.ArrayList;
@@ -30,46 +28,17 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
public class WorldStatisticsProvider {
- private final SparkPlatform platform;
- private final WorldInfoProvider provider;
+ private final AsyncWorldInfoProvider provider;
- public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) {
- this.platform = platform;
+ public WorldStatisticsProvider(AsyncWorldInfoProvider provider) {
this.provider = provider;
}
public WorldStatistics getWorldStatistics() {
- if (this.provider == WorldInfoProvider.NO_OP) {
- return null;
- }
-
- CompletableFuture<WorldInfoProvider.Result<? extends ChunkInfo<?>>> future;
-
- if (this.provider.mustCallSync()) {
- SparkPlugin plugin = this.platform.getPlugin();
- future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync);
- } else {
- future = CompletableFuture.completedFuture(this.provider.poll());
- }
-
- WorldInfoProvider.Result<? extends ChunkInfo<?>> result;
- try {
- result = future.get(5, TimeUnit.SECONDS);
- } catch (InterruptedException | ExecutionException e) {
- throw new RuntimeException(e);
- } catch (TimeoutException e) {
- this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics");
- return null;
- }
-
+ WorldInfoProvider.ChunksResult<? extends ChunkInfo<?>> result = provider.getChunks();
if (result == null) {
return null;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
index 59e873c..e324fd3 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java
@@ -120,7 +120,7 @@ public abstract class AbstractSampler implements Sampler {
}
@Override
- public void stop() {
+ public void stop(boolean cancelled) {
this.windowStatisticsCollector.stop();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 5d2026d..36a63f1 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -41,7 +41,7 @@ public interface Sampler {
/**
* Stops the sampler.
*/
- void stop();
+ void stop(boolean cancelled);
/**
* Gets the time when the sampler started (unix timestamp in millis)
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
index f56dee5..d55909c 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java
@@ -66,10 +66,10 @@ public class SamplerContainer implements AutoCloseable {
/**
* Stops the active sampler, if there is one.
*/
- public void stopActiveSampler() {
+ public void stopActiveSampler(boolean cancelled) {
Sampler sampler = this.activeSampler.getAndSet(null);
if (sampler != null) {
- sampler.stop();
+ sampler.stop(cancelled);
}
}
@@ -79,7 +79,7 @@ public class SamplerContainer implements AutoCloseable {
@Override
public void close() {
- stopActiveSampler();
+ stopActiveSampler(true);
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
index db1808c..d74b75f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java
@@ -224,13 +224,15 @@ public class AsyncProfilerJob {
}
}
- // delete the output file after reading
+ deleteOutputFile();
+ }
+
+ public void deleteOutputFile() {
try {
Files.deleteIfExists(this.outputFile);
} catch (IOException e) {
// ignore
}
-
}
private void readSegments(JfrReader reader, Predicate<String> threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
index f2e7191..178f055 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java
@@ -144,7 +144,7 @@ public class AsyncSampler extends AbstractSampler {
}
this.scheduler.schedule(() -> {
- stop();
+ stop(false);
this.future.complete(this);
}, delay, TimeUnit.MILLISECONDS);
}
@@ -153,13 +153,17 @@ public class AsyncSampler extends AbstractSampler {
* Stops the profiler.
*/
@Override
- public void stop() {
- super.stop();
+ public void stop(boolean cancelled) {
+ super.stop(cancelled);
synchronized (this.currentJobMutex) {
this.currentJob.stop();
- this.windowStatisticsCollector.measureNow(this.currentJob.getWindow());
- this.currentJob.aggregate(this.dataAggregator);
+ if (!cancelled) {
+ this.windowStatisticsCollector.measureNow(this.currentJob.getWindow());
+ this.currentJob.aggregate(this.dataAggregator);
+ } else {
+ this.currentJob.deleteOutputFile();
+ }
this.currentJob = null;
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
index 42a457d..72a37e8 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java
@@ -94,13 +94,15 @@ public class JavaSampler extends AbstractSampler implements Runnable {
}
@Override
- public void stop() {
- super.stop();
+ public void stop(boolean cancelled) {
+ super.stop(cancelled);
this.task.cancel(false);
- // collect statistics for the final window
- this.windowStatisticsCollector.measureNow(this.lastWindow.get());
+ if (!cancelled) {
+ // collect statistics for the final window
+ this.windowStatisticsCollector.measureNow(this.lastWindow.get());
+ }
}
@Override
@@ -111,7 +113,7 @@ public class JavaSampler extends AbstractSampler implements Runnable {
long time = System.currentTimeMillis();
if (this.autoEndTime != -1 && this.autoEndTime <= time) {
- stop();
+ stop(false);
this.future.complete(this);
return;
}
@@ -120,7 +122,7 @@ public class JavaSampler extends AbstractSampler implements Runnable {
ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean);
this.workerPool.execute(new InsertDataTask(threadDumps, window));
} catch (Throwable t) {
- stop();
+ stop(false);
this.future.completeExceptionally(t);
}
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
index 7da62fa..ce65013 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java
@@ -23,6 +23,8 @@ package me.lucko.spark.common.sampler.window;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.tick.TickStatistics;
+import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider;
+import me.lucko.spark.common.platform.world.WorldInfoProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.RollingAverage;
import me.lucko.spark.proto.SparkProtos;
@@ -152,6 +154,19 @@ public class WindowStatisticsCollector {
builder.setCpuProcess(CpuMonitor.processLoad1MinAvg());
builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg());
+ try {
+ AsyncWorldInfoProvider worldInfoProvider = new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider());
+ WorldInfoProvider.CountsResult counts = worldInfoProvider.getCounts();
+ if (counts != null) {
+ builder.setPlayers(counts.players());
+ builder.setEntities(counts.entities());
+ builder.setTileEntities(counts.tileEntities());
+ builder.setChunks(counts.chunks());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
return builder.build();
}
diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto
index be76bd7..f61e585 100644
--- a/spark-common/src/main/proto/spark/spark.proto
+++ b/spark-common/src/main/proto/spark/spark.proto
@@ -159,6 +159,12 @@ message WindowStatistics {
double tps = 4;
double mspt_median = 5;
double mspt_max = 6;
+
+ // world
+ int32 players = 7;
+ int32 entities = 8;
+ int32 tile_entities = 9;
+ int32 chunks = 10;
}
message RollingAverageValues {
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
index f2f7b96..156db89 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java
@@ -40,6 +40,7 @@ import net.minecraft.server.MinecraftServer;
import net.minecraft.server.world.ServerEntityManager;
import net.minecraft.server.world.ServerWorld;
import net.minecraft.util.math.ChunkPos;
+import net.minecraft.world.entity.EntityIndex;
import net.minecraft.world.entity.EntityTrackingSection;
import net.minecraft.world.entity.SectionedEntityCache;
@@ -72,8 +73,25 @@ public abstract class FabricWorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<FabricChunkInfo> poll() {
- Result<FabricChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ int players = this.server.getCurrentPlayerCount();
+ int entities = 0;
+ int chunks = 0;
+
+ for (ServerWorld world : this.server.getWorlds()) {
+ ServerEntityManager<Entity> entityManager = ((ServerWorldAccessor) world).getEntityManager();
+ EntityIndex<?> entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex();
+
+ entities += entityIndex.size();
+ chunks += world.getChunkManager().getLoadedChunkCount();
+ }
+
+ return new CountsResult(players, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<FabricChunkInfo> pollChunks() {
+ ChunksResult<FabricChunkInfo> data = new ChunksResult<>();
for (ServerWorld world : this.server.getWorlds()) {
ServerEntityManager<Entity> entityManager = ((ServerWorldAccessor) world).getEntityManager();
@@ -95,8 +113,24 @@ public abstract class FabricWorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<FabricChunkInfo> poll() {
- Result<FabricChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ ClientWorld world = this.client.world;
+ if (world == null) {
+ return null;
+ }
+
+ ClientEntityManager<Entity> entityManager = ((ClientWorldAccessor) world).getEntityManager();
+ EntityIndex<?> entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex();
+
+ int entities = entityIndex.size();
+ int chunks = world.getChunkManager().getLoadedChunkCount();
+
+ return new CountsResult(-1, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<FabricChunkInfo> pollChunks() {
+ ChunksResult<FabricChunkInfo> data = new ChunksResult<>();
ClientWorld world = this.client.world;
if (world == null) {
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
index 88c9521..994c9a3 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java
@@ -22,6 +22,7 @@ package me.lucko.spark.fabric.mixin;
import net.minecraft.client.world.ClientEntityManager;
import net.minecraft.entity.Entity;
+import net.minecraft.world.entity.EntityIndex;
import net.minecraft.world.entity.SectionedEntityCache;
import org.spongepowered.asm.mixin.Mixin;
@@ -33,4 +34,7 @@ public interface ClientEntityManagerAccessor {
@Accessor
SectionedEntityCache<Entity> getCache();
+ @Accessor
+ EntityIndex<?> getIndex();
+
}
diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
index 160a12b..2c67502 100644
--- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
+++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java
@@ -22,6 +22,7 @@ package me.lucko.spark.fabric.mixin;
import net.minecraft.entity.Entity;
import net.minecraft.server.world.ServerEntityManager;
+import net.minecraft.world.entity.EntityIndex;
import net.minecraft.world.entity.SectionedEntityCache;
import org.spongepowered.asm.mixin.Mixin;
@@ -33,4 +34,7 @@ public interface ServerEntityManagerAccessor {
@Accessor
SectionedEntityCache<Entity> getCache();
+ @Accessor
+ EntityIndex<?> getIndex();
+
}
diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
index 1d65d6a..4750c08 100644
--- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
+++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java
@@ -34,6 +34,7 @@ import net.minecraft.server.level.ServerLevel;
import net.minecraft.world.entity.Entity;
import net.minecraft.world.entity.EntityType;
import net.minecraft.world.level.ChunkPos;
+import net.minecraft.world.level.entity.EntityLookup;
import net.minecraft.world.level.entity.EntitySection;
import net.minecraft.world.level.entity.EntitySectionStorage;
import net.minecraft.world.level.entity.PersistentEntitySectionManager;
@@ -68,8 +69,25 @@ public abstract class ForgeWorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<ForgeChunkInfo> poll() {
- Result<ForgeChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ int players = this.server.getPlayerCount();
+ int entities = 0;
+ int chunks = 0;
+
+ for (ServerLevel level : this.server.getAllLevels()) {
+ PersistentEntitySectionManager<Entity> entityManager = level.entityManager;
+ EntityLookup<Entity> entityIndex = entityManager.visibleEntityStorage;
+
+ entities += entityIndex.count();
+ chunks += level.getChunkSource().getLoadedChunksCount();
+ }
+
+ return new CountsResult(players, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<ForgeChunkInfo> pollChunks() {
+ ChunksResult<ForgeChunkInfo> data = new ChunksResult<>();
for (ServerLevel level : this.server.getAllLevels()) {
PersistentEntitySectionManager<Entity> entityManager = level.entityManager;
@@ -91,8 +109,24 @@ public abstract class ForgeWorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<ForgeChunkInfo> poll() {
- Result<ForgeChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ ClientLevel level = this.client.level;
+ if (level == null) {
+ return null;
+ }
+
+ TransientEntitySectionManager<Entity> entityManager = level.entityStorage;
+ EntityLookup<Entity> entityIndex = entityManager.entityStorage;
+
+ int entities = entityIndex.count();
+ int chunks = level.getChunkSource().getLoadedChunksCount();
+
+ return new CountsResult(-1, entities, -1, chunks);
+ }
+
+ @Override
+ public ChunksResult<ForgeChunkInfo> pollChunks() {
+ ChunksResult<ForgeChunkInfo> data = new ChunksResult<>();
ClientLevel level = this.client.level;
if (level == null) {
diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
index 39e9c1a..2699a0e 100644
--- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
+++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg
@@ -1,5 +1,7 @@
public net.minecraft.server.level.ServerLevel f_143244_ # entityManager
public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage
+public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157494_ # visibleEntityStorage
public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage
public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage
-public net.minecraft.client.Minecraft f_91018_ # gameThread \ No newline at end of file
+public net.minecraft.world.level.entity.TransientEntitySectionManager f_157637_ # entityStorage
+public net.minecraft.client.Minecraft f_91018_ # gameThread
diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
index fa6fa6b..df58028 100644
--- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
+++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java
@@ -20,6 +20,7 @@
package me.lucko.spark.sponge;
+import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import me.lucko.spark.common.platform.world.AbstractChunkInfo;
@@ -44,8 +45,24 @@ public class Sponge7WorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<Sponge7ChunkInfo> poll() {
- Result<Sponge7ChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ int players = this.server.getOnlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (World world : this.server.getWorlds()) {
+ entities += world.getEntities().size();
+ tileEntities += world.getTileEntities().size();
+ chunks += Iterables.size(world.getLoadedChunks());
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<Sponge7ChunkInfo> pollChunks() {
+ ChunksResult<Sponge7ChunkInfo> data = new ChunksResult<>();
for (World world : this.server.getWorlds()) {
List<Chunk> chunks = Lists.newArrayList(world.getLoadedChunks());
diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
index bff4d6e..69b4515 100644
--- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
+++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java
@@ -20,6 +20,7 @@
package me.lucko.spark.sponge;
+import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import me.lucko.spark.common.platform.world.AbstractChunkInfo;
@@ -45,8 +46,24 @@ public class Sponge8WorldInfoProvider implements WorldInfoProvider {
}
@Override
- public Result<Sponge7ChunkInfo> poll() {
- Result<Sponge7ChunkInfo> data = new Result<>();
+ public CountsResult pollCounts() {
+ int players = this.server.onlinePlayers().size();
+ int entities = 0;
+ int tileEntities = 0;
+ int chunks = 0;
+
+ for (ServerWorld world : this.server.worldManager().worlds()) {
+ entities += world.entities().size();
+ tileEntities += world.blockEntities().size();
+ chunks += Iterables.size(world.loadedChunks());
+ }
+
+ return new CountsResult(players, entities, tileEntities, chunks);
+ }
+
+ @Override
+ public ChunksResult<Sponge7ChunkInfo> pollChunks() {
+ ChunksResult<Sponge7ChunkInfo> data = new ChunksResult<>();
for (ServerWorld world : this.server.worldManager().worlds()) {
List<WorldChunk> chunks = Lists.newArrayList(world.loadedChunks());