diff options
| author | lucko <git@lucko.me> | 2023-06-08 21:35:54 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2023-06-08 21:35:54 +0100 |
| commit | 7e1fe4231d3b098f08ee9ebb68a551016e345a73 (patch) | |
| tree | d4f6c92ce81256dbda05a3005534e0c0f0265d9c | |
| parent | e5b278047ccb7bc6b301d787474c51d162911867 (diff) | |
| parent | a89e8d3cc42702e80e2f973e79aab6090e74a72e (diff) | |
| download | spark-7e1fe4231d3b098f08ee9ebb68a551016e345a73.tar.gz spark-7e1fe4231d3b098f08ee9ebb68a551016e345a73.tar.bz2 spark-7e1fe4231d3b098f08ee9ebb68a551016e345a73.zip | |
Merge pull request #332 from embeddedt/forge-1.7.10
1.7.10 update
64 files changed, 2292 insertions, 416 deletions
diff --git a/spark-api/src/main/java/me/lucko/spark/api/statistic/StatisticWindow.java b/spark-api/src/main/java/me/lucko/spark/api/statistic/StatisticWindow.java index cdf4d01..cdaa2b8 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/statistic/StatisticWindow.java +++ b/spark-api/src/main/java/me/lucko/spark/api/statistic/StatisticWindow.java @@ -92,7 +92,8 @@ public interface StatisticWindow { enum MillisPerTick implements StatisticWindow { SECONDS_10(Duration.ofSeconds(10)), - MINUTES_1(Duration.ofMinutes(1)); + MINUTES_1(Duration.ofMinutes(1)), + MINUTES_5(Duration.ofMinutes(5)); private final Duration value; diff --git a/spark-bukkit/build.gradle b/spark-bukkit/build.gradle index 92b65cc..4c33dfc 100644 --- a/spark-bukkit/build.gradle +++ b/spark-bukkit/build.gradle @@ -15,7 +15,7 @@ dependencies { } repositories { - maven { url 'https://papermc.io/repo/repository/maven-public/' } + maven { url "https://repo.papermc.io/repository/maven-public/" } } processResources { @@ -37,6 +37,7 @@ shadowJar { relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' + relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' exclude 'module-info.class' exclude 'META-INF/maven/**' diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java index 8f876cf..babb0bc 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -27,6 +27,7 @@ import me.lucko.spark.common.platform.world.WorldInfoProvider; import org.bukkit.Chunk; import org.bukkit.Server; import org.bukkit.World; +import org.bukkit.block.BlockState; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; @@ -69,11 +70,21 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider { chunks += world.getChunkCount(); } else { entities += world.getEntities().size(); + Chunk[] chunksArray = world.getLoadedChunks(); + int nullChunks = 0; + for (Chunk chunk : chunksArray) { - tileEntities += chunk.getTileEntities().length; + if (chunk == null) { + ++nullChunks; + continue; + } + + BlockState[] tileEntitiesArray = chunk.getTileEntities(); + tileEntities += tileEntitiesArray != null ? tileEntitiesArray.length : 0; } - chunks += chunksArray.length; + + chunks += chunksArray.length - nullChunks; } } diff --git a/spark-bungeecord/build.gradle b/spark-bungeecord/build.gradle index 885de55..7e6b93f 100644 --- a/spark-bungeecord/build.gradle +++ b/spark-bungeecord/build.gradle @@ -27,6 +27,7 @@ shadowJar { relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm' relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler' + relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks' exclude 'module-info.class' exclude 'META-INF/maven/**' diff --git a/spark-common/build.gradle b/spark-common/build.gradle index c3d960d..514c5dd 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -18,8 +18,11 @@ dependencies { api project(':spark-api') implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.3' implementation 'org.ow2.asm:asm:9.1' - implementation 'com.google.protobuf:protobuf-javalite:3.21.11' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' + implementation 'com.google.protobuf:protobuf-javalite:3.21.11' + implementation 'me.lucko:bytesocks-java-client-api:1.0-SNAPSHOT' + implementation 'com.neovisionaries:nv-websocket-client:2.14' + api('net.kyori:adventure-api:4.12.0') { exclude(module: 'adventure-bom') exclude(module: 'checker-qual') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index dae04ff..24b879a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -23,6 +23,7 @@ package me.lucko.spark.common; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.activitylog.ActivityLog; import me.lucko.spark.common.api.SparkApi; import me.lucko.spark.common.command.Arguments; @@ -38,11 +39,13 @@ import me.lucko.spark.common.command.modules.TickMonitoringModule; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; +import me.lucko.spark.common.legacy.LegacyBytesocksClientFactory; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.monitor.tick.SparkTickStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.PlatformStatisticsProvider; import me.lucko.spark.common.sampler.BackgroundSamplerManager; @@ -53,6 +56,7 @@ import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; import me.lucko.spark.common.util.Configuration; import me.lucko.spark.common.util.TemporaryFiles; +import me.lucko.spark.common.ws.TrustedKeyStore; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; @@ -95,6 +99,8 @@ public class SparkPlatform { private final Configuration configuration; private final String viewerUrl; private final BytebinClient bytebinClient; + private final BytesocksClient bytesocksClient; + private final TrustedKeyStore trustedKeyStore; private final boolean disableResponseBroadcast; private final List<CommandModule> commandModules; private final List<Command> commands; @@ -118,8 +124,12 @@ public class SparkPlatform { this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); - String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); + String bytebinUrl = this.configuration.getString("bytebinUrl", "https://spark-usercontent.lucko.me/"); + String bytesocksHost = this.configuration.getString("bytesocksHost", "spark-usersockets.lucko.me"); + this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin"); + this.bytesocksClient = LegacyBytesocksClientFactory.newClient(bytesocksHost, "spark-plugin"); + this.trustedKeyStore = new TrustedKeyStore(this.configuration); this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); @@ -144,9 +154,13 @@ public class SparkPlatform { this.samplerContainer = new SamplerContainer(); this.backgroundSamplerManager = new BackgroundSamplerManager(this, this.configuration); + TickStatistics tickStatistics = plugin.createTickStatistics(); this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); - this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null; + if (tickStatistics == null && (this.tickHook != null || this.tickReporter != null)) { + tickStatistics = new SparkTickStatistics(); + } + this.tickStatistics = tickStatistics; PlayerPingProvider pingProvider = plugin.createPlayerPingProvider(); this.pingStatistics = pingProvider != null ? new PingStatistics(pingProvider) : null; @@ -159,12 +173,12 @@ public class SparkPlatform { throw new RuntimeException("Platform has already been enabled!"); } - if (this.tickHook != null) { - this.tickHook.addCallback(this.tickStatistics); + if (this.tickHook != null && this.tickStatistics instanceof SparkTickStatistics) { + this.tickHook.addCallback((TickHook.Callback) this.tickStatistics); this.tickHook.start(); } - if (this.tickReporter != null) { - this.tickReporter.addCallback(this.tickStatistics); + if (this.tickReporter != null&& this.tickStatistics instanceof SparkTickStatistics) { + this.tickReporter.addCallback((TickReporter.Callback) this.tickStatistics); this.tickReporter.start(); } if (this.pingStatistics != null) { @@ -228,6 +242,14 @@ public class SparkPlatform { return this.bytebinClient; } + public BytesocksClient getBytesocksClient() { + return this.bytesocksClient; + } + + public TrustedKeyStore getTrustedKeyStore() { + return this.trustedKeyStore; + } + public boolean shouldBroadcastResponse() { return !this.disableResponseBroadcast; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index b7aef2a..a3bdceb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -23,6 +23,7 @@ package me.lucko.spark.common; import me.lucko.spark.api.Spark; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; +import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; @@ -128,6 +129,18 @@ public interface SparkPlugin { } /** + * Creates tick statistics for the platform, if supported. + * + * <p>Spark is able to provide a default implementation for platforms that + * provide a {@link TickHook} and {@link TickReporter}.</p> + * + * @return a new tick statistics instance + */ + default TickStatistics createTickStatistics() { + return null; + } + + /** * Creates a class source lookup function. * * @return the class source lookup function diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 5b1ec2b..9e4eee4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -151,6 +151,8 @@ public class SparkApi implements Spark { return stats.duration10Sec(); case MINUTES_1: return stats.duration1Min(); + case MINUTES_5: + return stats.duration5Min(); default: throw new AssertionError(window); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 5bd62a8..6ac3b2f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -32,6 +32,7 @@ import me.lucko.spark.common.heapdump.HeapDump; import me.lucko.spark.common.heapdump.HeapDumpSummary; import me.lucko.spark.common.util.Compression; import me.lucko.spark.common.util.FormatUtil; +import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.proto.SparkHeapProtos; import net.kyori.adventure.text.event.ClickEvent; @@ -52,7 +53,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.GREEN; import static net.kyori.adventure.text.format.NamedTextColor.RED; public class HeapAnalysisModule implements CommandModule { - private static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap"; @Override public void registerCommands(Consumer<Command> consumer) { @@ -97,7 +97,7 @@ public class HeapAnalysisModule implements CommandModule { saveToFile = true; } else { try { - String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); + String key = platform.getBytebinClient().postContent(output, MediaTypes.SPARK_HEAP_MEDIA_TYPE).key(); String url = platform.getViewerUrl() + key; resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD)); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index cd00f0d..27e790f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -22,6 +22,7 @@ package me.lucko.spark.common.command.modules; import com.google.common.collect.Iterables; +import me.lucko.bytesocks.client.BytesocksClient; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; @@ -33,6 +34,7 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.sampler.SamplerMode; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.async.AsyncSampler; @@ -40,7 +42,9 @@ import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.FormatUtil; +import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.common.ws.ViewerSocket; import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.Component; @@ -67,7 +71,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.RED; import static net.kyori.adventure.text.format.NamedTextColor.WHITE; public class SamplerModule implements CommandModule { - private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; @Override public void registerCommands(Consumer<Command> consumer) { @@ -75,11 +78,13 @@ public class SamplerModule implements CommandModule { .aliases("profiler", "sampler") .allowSubCommand(true) .argumentUsage("info", "", null) + .argumentUsage("open", "", null) .argumentUsage("start", "timeout", "timeout seconds") .argumentUsage("start", "thread *", null) .argumentUsage("start", "thread", "thread name") .argumentUsage("start", "only-ticks-over", "tick length millis") .argumentUsage("start", "interval", "interval millis") + .argumentUsage("start", "alloc", null) .argumentUsage("stop", "", null) .argumentUsage("cancel", "", null) .executor(this::profiler) @@ -94,14 +99,14 @@ public class SamplerModule implements CommandModule { } if (subCommand.equals("start")) { opts = new ArrayList<>(Arrays.asList("--timeout", "--regex", "--combine-all", - "--not-combined", "--interval", "--only-ticks-over", "--force-java-sampler")); + "--not-combined", "--interval", "--only-ticks-over", "--force-java-sampler", "--alloc", "--alloc-live-only")); opts.removeAll(arguments); opts.add("--thread"); // allowed multiple times } } return TabCompleter.create() - .at(0, CompletionSupplier.startsWith(Arrays.asList("info", "start", "stop", "cancel"))) + .at(0, CompletionSupplier.startsWith(Arrays |
