diff options
16 files changed, 520 insertions, 78 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index 15722d7..fbf79ef 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.command.modules; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; @@ -35,10 +36,15 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PingSummary; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.SparkMetadata; +import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.util.FormatUtil; +import me.lucko.spark.common.util.MediaTypes; import me.lucko.spark.common.util.RollingAverage; import me.lucko.spark.common.util.StatisticFormatter; +import me.lucko.spark.proto.SparkProtos; import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.event.ClickEvent; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; @@ -84,10 +90,11 @@ public class HealthModule implements CommandModule { consumer.accept(Command.builder() .aliases("healthreport", "health", "ht") + .argumentUsage("upload", null) .argumentUsage("memory", null) .argumentUsage("network", null) .executor(HealthModule::healthReport) - .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--memory", "--network")) + .tabCompleter((platform, sender, arguments) -> TabCompleter.completeForOpts(arguments, "--upload", "--memory", "--network")) .build() ); } @@ -185,6 +192,12 @@ public class HealthModule implements CommandModule { private static void healthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { resp.replyPrefixed(text("Generating server health report...")); + + if (arguments.boolFlag("upload")) { + uploadHealthReport(platform, sender, resp, arguments); + return; + } + List<Component> report = new LinkedList<>(); report.add(empty()); @@ -209,6 +222,37 @@ public class HealthModule implements CommandModule { resp.reply(report); } + private static void uploadHealthReport(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + SparkProtos.HealthMetadata.Builder metadata = SparkProtos.HealthMetadata.newBuilder(); + SparkMetadata.gather(platform, sender.toData(), platform.getStartupGcStatistics()).writeTo(metadata); + + SparkProtos.HealthData.Builder data = SparkProtos.HealthData.newBuilder() + .setMetadata(metadata); + + Sampler activeSampler = platform.getSamplerContainer().getActiveSampler(); + if (activeSampler != null) { + data.putAllTimeWindowStatistics(activeSampler.exportWindowStatistics()); + } + + try { + String key = platform.getBytebinClient().postContent(data.build(), MediaTypes.SPARK_HEALTH_MEDIA_TYPE).key(); + String url = platform.getViewerUrl() + key; + + resp.broadcastPrefixed(text("Health report:", GOLD)); + resp.broadcast(text() + .content(url) + .color(GRAY) + .clickEvent(ClickEvent.openUrl(url)) + .build() + ); + + platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Health report", url)); + } catch (Exception e) { + resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED)); + e.printStackTrace(); + } + } + private static void addTickStats(List<Component> report, TickStatistics tickStatistics) { report.add(text() .append(text(">", DARK_GRAY, BOLD)) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java index a1df627..4dc53eb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java @@ -34,6 +34,10 @@ public interface CommandSender { UUID getUniqueId(); + default boolean isPlayer() { + return getUniqueId() != null; + } + void sendMessage(Component message); boolean hasPermission(String permission); diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index 52140d5..a5e7039 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -22,6 +22,7 @@ package me.lucko.spark.common.heapdump; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.proto.SparkHeapProtos.HeapData; import me.lucko.spark.proto.SparkHeapProtos.HeapEntry; import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; @@ -130,20 +131,8 @@ public final class HeapDumpSummary { } public HeapData toProto(SparkPlatform platform, CommandSender.Data creator) { - HeapMetadata.Builder metadata = HeapMetadata.newBuilder() - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toProto()); - try { - metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null, true)); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics()); - } catch (Exception e) { - e.printStackTrace(); - } + HeapMetadata.Builder metadata = HeapMetadata.newBuilder(); + SparkMetadata.gather(platform, creator, platform.getStartupGcStatistics()).writeTo(metadata); HeapData.Builder proto = HeapData.newBuilder(); proto.setMetadata(metadata); diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index 11111cd..a17c924 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -22,6 +22,7 @@ package me.lucko.spark.common.platform; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.cpu.CpuInfo; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.disk.DiskUsage; @@ -40,10 +41,16 @@ import me.lucko.spark.proto.SparkProtos.SystemStatistics; import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.MemoryType; import java.lang.management.MemoryUsage; import java.lang.management.RuntimeMXBean; +import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; import java.util.regex.Pattern; +import java.util.stream.Collectors; public class PlatformStatisticsProvider { private final SparkPlatform platform; @@ -139,18 +146,37 @@ public class PlatformStatisticsProvider { return builder.build(); } - public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStatistics> startingGcStatistics, boolean includeWorld) { + public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStatistics> startingGcStatistics, boolean detailed) { PlatformStatistics.Builder builder = PlatformStatistics.newBuilder(); - MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); - builder.setMemory(PlatformStatistics.Memory.newBuilder() - .setHeap(PlatformStatistics.Memory.MemoryPool.newBuilder() - .setUsed(memoryUsage.getUsed()) - .setTotal(memoryUsage.getCommitted()) + PlatformStatistics.Memory.Builder memory = PlatformStatistics.Memory.newBuilder() + .setHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage())) + .setNonHeap(memoryUsageProto(ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage())); + + if (detailed) { + List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans(); + for (MemoryPoolMXBean memoryPool : memoryPoolMXBeans) { + if (memoryPool.getType() != MemoryType.HEAP) { + continue; + } + + MemoryUsage usage = memoryPool.getUsage(); + MemoryUsage collectionUsage = memoryPool.getCollectionUsage(); + + if (usage.getMax() == -1) { + usage = new MemoryUsage(usage.getInit(), usage.getUsed(), usage.getCommitted(), usage.getCommitted()); + } + + memory.addPools(PlatformStatistics.Memory.MemoryPool.newBuilder() + .setName(memoryPool.getName()) + .setUsage(memoryUsageProto(usage)) + .setCollectionUsage(memoryUsageProto(collectionUsage)) .build() - ) - .build() - ); + ); + } + } + + builder.setMemory(memory.build()); long uptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime(); builder.setUptime(uptime); @@ -192,13 +218,29 @@ public class PlatformStatisticsProvider { ); } + List<CommandSender> senders = this.platform.getPlugin().getCommandSenders().collect(Collectors.toList()); + PlatformInfo.Type platformType = this.platform.getPlugin().getPlatformInfo().getType(); if (platformType != PlatformInfo.Type.CLIENT) { - long playerCount = this.platform.getPlugin().getCommandSenders().count() - 1; // includes console + long playerCount = senders.size() - 1; // includes console builder.setPlayerCount(playerCount); } - if (includeWorld) { + UUID anyOnlinePlayerUniqueId = senders.stream() + .filter(CommandSender::isPlayer) + .map(CommandSender::getUniqueId) + .filter(uniqueId -> uniqueId.version() == 4 || uniqueId.version() == 3) + .findAny() + .orElse(null); + + builder.setOnlineMode(anyOnlinePlayerUniqueId == null + ? PlatformStatistics.OnlineMode.UNKNOWN + : anyOnlinePlayerUniqueId.version() == 4 + ? PlatformStatistics.OnlineMode.ONLINE + : PlatformStatistics.OnlineMode.OFFLINE + ); + + if (detailed) { try { WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider( new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()) @@ -208,7 +250,7 @@ public class PlatformStatisticsProvider { builder.setWorld(worldStatistics); } } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to gather world statistics - " + e); } } @@ -225,6 +267,15 @@ public class PlatformStatisticsProvider { .build(); } + public static PlatformStatistics.Memory.MemoryUsage memoryUsageProto(MemoryUsage usage) { + return PlatformStatistics.Memory.MemoryUsage.newBuilder() + .setUsed(usage.getUsed()) + .setCommitted(usage.getCommitted()) + .setInit(usage.getInit()) + .setMax(usage.getMax()) + .build(); + } + static final class VmArgRedactor { private static final Pattern WINDOWS_USERNAME = Pattern.compile("C:\\\\Users\\\\\\w+"); private static final Pattern MACOS_USERNAME = Pattern.compile("/Users/\\w+"); diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java new file mode 100644 index 0000000..4b68921 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/SparkMetadata.java @@ -0,0 +1,154 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; +import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; +import me.lucko.spark.proto.SparkProtos.HealthMetadata; +import me.lucko.spark.proto.SparkProtos.PlatformMetadata; +import me.lucko.spark.proto.SparkProtos.PlatformStatistics; +import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +import java.util.Collection; +import java.util.Locale; +import java.util.Map; +import java.util.logging.Level; + +public class SparkMetadata { + + public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data creator, Map<String, GarbageCollectorStatistics> initialGcStats) { + PlatformMetadata platformMetadata = platform.getPlugin().getPlatformInfo().toData().toProto(); + + PlatformStatistics platformStatistics = null; + try { + platformStatistics = platform.getStatisticsProvider().getPlatformStatistics(initialGcStats, true); + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather platform statistics - " + e); + } + + SystemStatistics systemStatistics = null; + try { + systemStatistics = platform.getStatisticsProvider().getSystemStatistics(); + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather system statistics - " + e); + } + + long generatedTime = System.currentTimeMillis(); + + Map<String, String> serverConfigurations = null; + try { + ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); + if (serverConfigProvider != null) { + serverConfigurations = serverConfigProvider.export(); + } + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather server configurations - " + e); + } + + Collection<SourceMetadata> sources = platform.getPlugin().getKnownSources(); + + Map<String, String> extraPlatformMetadata = null; + try { + MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); + if (extraMetadataProvider != null) { + extraPlatformMetadata = extraMetadataProvider.export(); + } + } catch (Exception e) { + platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata - " + e); + } + + return new SparkMetadata(creator, platformMetadata, platformStatistics, systemStatistics, generatedTime, serverConfigurations, sources, extraPlatformMetadata); + } + + private final CommandSender.Data creator; + private final PlatformMetadata platformMetadata; + private final PlatformStatistics platformStatistics; + private final SystemStatistics systemStatistics; + private final long generatedTime; + private final Map<String, String> serverConfigurations; + private final Collection<SourceMetadata> sources; + private final Map<String, String> extraPlatformMetadata; + + public SparkMetadata(CommandSender.Data creator, PlatformMetadata platformMetadata, PlatformStatistics platformStatistics, SystemStatistics systemStatistics, long generatedTime, Map<String, String> serverConfigurations, Collection<SourceMetadata> sources, Map<String, String> extraPlatformMetadata) { + this.creator = creator; + this.platformMetadata = platformMetadata; + this.platformStatistics = platformStatistics; + this.systemStatistics = systemStatistics; + this.generatedTime = generatedTime; + this.serverConfigurations = serverConfigurations; + this.sources = sources; + this.extraPlatformMetadata = extraPlatformMetadata; + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(HealthMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setGeneratedTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(SamplerMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setEndTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + + @SuppressWarnings("DuplicatedCode") + public void writeTo(HeapMetadata.Builder builder) { + if (this.creator != null) builder.setCreator(this.creator.toProto()); + if (this.platformMetadata != null) builder.setPlatformMetadata(this.platformMetadata); + if (this.platformStatistics != null) builder.setPlatformStatistics(this.platformStatistics); + if (this.systemStatistics != null) builder.setSystemStatistics(this.systemStatistics); + builder.setGeneratedTime(this.generatedTime); + if (this.serverConfigurations != null) builder.putAllServerConfigurations(this.serverConfigurations); + if (this.sources != null) { + for (SourceMetadata source : this.sources) { + builder.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + } + if (this.extraPlatformMetadata != null) builder.putAllExtraPlatformMetadata(this.extraPlatformMetadata); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 8a9c05f..7453074 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -24,6 +24,7 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.platform.MetadataProvider; +import me.lucko.spark.common.platform.SparkMetadata; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; @@ -123,6 +124,11 @@ public abstract class AbstractSampler implements Sampler { } @Override + public Map<Integer, SparkProtos.WindowStatistics> exportWindowStatistics() { + return this.windowStatisticsCollector.export(); + } + + @Override public void start() { this.startTime = System.currentTimeMillis(); } @@ -177,14 +183,13 @@ public abstract class AbstractSampler implements Sampler { protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender.Data creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setSamplerMode(getMode().asProto()) - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toProto()) .setStartTime(this.startTime) - .setEndTime(System.currentTimeMillis()) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(dataAggregator.getMetadata()); + SparkMetadata.gather(platform, creator, getInitialGcStats()).writeTo(metadata); + if (comment != null) { metadata.setComment(comment); } @@ -194,41 +199,6 @@ public abstract class AbstractSampler implements Sampler { metadata.setNumberOfTicks(totalTicks); } - try { - metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats(), true)); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - metadata.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics()); - } catch (Exception e) { - e.printStackTrace(); - } - - try { - ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); - if (serverConfigProvider != null) { - metadata.putAllServerConfigurations(serverConfigProvider.export()); - } - } catch (Exception e) { - e.printStackTrace(); - } - - try { - MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); - if (extraMetadataProvider != null) { - metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); - } - } catch (Exception e) { - e.printStackTrace(); - } - - Collection<SourceMetadata> knownSources = platform.getPlugin().getKnownSources(); - for (SourceMetadata source : knownSources) { - metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); - } - proto.setMetadata(metadata); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 844ab0b..bb74cd2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -25,10 +25,12 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.ws.ViewerSocket; +import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SocketChannelInfo; import java.util.Collection; +import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; @@ -96,6 +98,13 @@ public interface Sampler { */ CompletableFuture<Sampler> getFuture(); + /** + * Exports the current set of window statistics. + * + * @return the window statistics + */ + Map<Integer, SparkProtos.WindowStatistics> exportWindowStatistics(); + // Methods used to export the sampler data to the web viewer. SamplerData toProto(SparkPlatform platform, ExportProps exportProps); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java index c8d5b3c..7a791bc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java @@ -181,6 +181,6 @@ public interface ThreadGrouper { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; } - }; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java index 13dc66d..e5f2b48 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/SourceMetadata.java @@ -21,7 +21,7 @@ package me.lucko.spark.common.sampler.source; import com.google.common.collect.ImmutableList; -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import me.lucko.spark.proto.SparkProtos.PluginOrModMetadata; import java.util.Collection; import java.util.List; @@ -70,10 +70,11 @@ public class SourceMetadata { return this.author; } - public SamplerMetadata.SourceMetadata toProto() { - return SamplerMetadata.SourceMetadata.newBuilder() + public PluginOrModMetadata toProto() { + return PluginOrModMetadata.newBuilder() .setName(this.name) .setVersion(this.version) + .setAuthor(this.author) .build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java b/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java index 2c49540..47a3395 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/MediaTypes.java @@ -25,5 +25,6 @@ public enum MediaTypes { public static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; public static final String SPARK_HEAP_MEDIA_TYPE = "application/x-spark-heap"; + public static final String SPARK_HEALTH_MEDIA_TYPE = "application/x-spark-health"; } diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index 6ffd4e9..779b3c6 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -104,13 +104,24 @@ message PlatformStatistics { Ping ping = 6; // optional int64 player_count = 7; // optional WorldStatistics world = 8; // optional + OnlineMode online_mode = 9; // optional message Memory { - MemoryPool heap = 1; + MemoryUsage heap = 1; + MemoryUsage non_heap = 2; + repeated MemoryPool pools = 3; message MemoryPool { + string name = 1; + MemoryUsage usage = 2; + MemoryUsage collection_usage = 3; + } + + message MemoryUsage { int64 used = 1; - int64 total = 2; + int64 committed = 2; // previously called 'total' + int64 init = 3; // optional + int64 max = 4; // optional } } @@ -134,6 +145,12 @@ message PlatformStatistics { message Ping { RollingAverageValues last15m = 1; } + + enum OnlineMode { + UNKNOWN = 0; + OFFLINE = 1; + ONLINE = 2; + } } message WorldStatistics { @@ -205,3 +222,25 @@ message CommandSenderMetadata { PLAYER = 1; } } + +message PluginOrModMetadata { + string name = 1; + string version = 2; + string author = 3; +} + +message HealthData { + HealthMetadata metadata = 1; + map<int32, WindowStatistics> time_window_statistics = 2; +} + +message HealthMetadata { + CommandSenderMetadata creator = 1; + PlatformMetadata platform_metadata = 2; + PlatformStatistics platform_statistics = 3; + SystemStatistics system_statistics = 4; + int64 generated_time = 5; + map<string, string> server_configurations = 6; + map<string, PluginOrModMetadata> sources = 7; + map<string, string> extra_platform_metadata = 8; +} diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-common/src/main/proto/spark/spark_heap.proto index 59f2b85..aef7888 100644 --- a/spark-common/src/main/proto/spark/spark_heap.proto +++ b/spark-common/src/main/proto/spark/spark_heap.proto @@ -17,6 +17,10 @@ message HeapMetadata { PlatformMetadata platform_metadata = 2; PlatformStatistics platform_statistics = 3; SystemStatistics system_statistics = 4; + int64 generated_time = 5; + map<string, string> server_configurations = 6; + map<string, PluginOrModMetadata> sources = 7; + map<string, string> extra_platform_metadata = 8; } message HeapEntry { diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index dbc336a..10cc6d1 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -31,7 +31,7 @@ message SamplerMetadata { map<string, string> server_configurations = 10; int64 end_time = 11; int32 number_of_ticks = 12; - map<string, SourceMetadata> sources = 13; + map<string, PluginOrModMetadata> sources = 13; map<string, string> extra_platform_metadata = 14; SamplerMode sampler_mode = 15; @@ -65,11 +65,6 @@ message SamplerMetadata { } } - message SourceMetadata { - string name = 1; - string version = 2; - } - enum SamplerMode { EXECUTION = 0; ALLOCATION = 1; diff --git a/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java b/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java new file mode 100644 index 0000000..dda6c5d --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/command/ArgumentsTest.java @@ -0,0 +1,137 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.command; + +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrowsExactly; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ArgumentsTest { + + @Test + public void testInitialParse() { + Arguments arguments = new Arguments(ImmutableList.of("hello"), true); + assertEquals("hello", arguments.subCommand()); + + Arguments.ParseException exception = assertThrowsExactly( + Arguments.ParseException.class, + () -> new Arguments(ImmutableList.of("hello"), false) + ); + assertEquals("Expected flag at position 0 but got 'hello' instead!", exception.getMessage()); + + exception = assertThrowsExactly( + Arguments.ParseException.class, + () -> new Arguments(ImmutableList.of("hello", "world"), true) + ); + assertEquals("Expected flag at position 1 but got 'world' instead!", exception.getMessage()); + } + + @Test + public void testStringFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + Set<String> values = arguments.stringFlag("test-flag"); + assertEquals(1, values.size()); + assertEquals("hello", values.iterator().next()); + } + + @Test + public void testStringFlagWithSpace() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello", "world"), false); + + Set<String> values = arguments.stringFlag("test-flag"); + assertEquals(1, values.size()); + assertEquals("hello world", values.iterator().next()); + } + + @Test + public void testStringFlagWithMultipleValues() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello", "--test-flag", "world"), false); + + Set<String> values = arguments.stringFlag("test-flag"); + assertEquals(2, values.size()); + assertEquals(ImmutableList.of("hello", "world"), ImmutableList.copyOf(values)); + } + + @Test + public void testMissingStringFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + Set<String> values = arguments.stringFlag("missing-flag"); + assertEquals(0, values.size()); + } + + @Test + public void testIntFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "123", "--negative-test", "-100"), false); + + int value = arguments.intFlag("test-flag"); + assertEquals(123, value); + + value = arguments.intFlag("negative-test"); + assertEquals(100, value); + } + + @Test + public void testMissingIntFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + int value = arguments.intFlag("missing-flag"); + assertEquals(-1, value); + } + + @Test + public void testDoubleFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "123.45", "--negative-test", "-100.5"), false); + + double value = arguments.doubleFlag("test-flag"); + assertEquals(123.45, value, 0.0001); + + value = arguments.doubleFlag("negative-test"); + assertEquals(100.5, value, 0.0001); + } + + @Test + public void testMissingDoubleFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag", "hello"), false); + + double value = arguments.doubleFlag("missing-flag"); + assertEquals(-1, value); + } + + @Test + public void testBooleanFlag() { + Arguments arguments = new Arguments(ImmutableList.of("--test-flag"), false); + + boolean value = arguments.boolFlag("test-flag"); + assertTrue(value); + + value = arguments.boolFlag("negative-test"); + assertFalse(value); + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java index 63e62d9..4b26322 100644 --- a/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java @@ -26,6 +26,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.nio.file.Path; +import java.util.Collections; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -42,7 +43,7 @@ public class PlatformStatisticsProviderTest { @Test public void testPlatformStatistics(@TempDir Path directory) { try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { - SparkProtos.PlatformStatistics platformStatistics = new PlatformStatisticsProvider(plugin.platform()).getPlatformStatistics(null, true); + SparkProtos.PlatformStatistics platformStatistics = new PlatformStatisticsProvider(plugin.platform()).getPlatformStatistics(Collections.emptyMap(), true); assertNotNull(platformStatistics); } } diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java new file mode 100644 index 0000000..e2b8374 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/SparkMetadataTest.java @@ -0,0 +1,43 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform; + +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class SparkMetadataTest { + + @Test + public void testGather(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkMetadata metadata = SparkMetadata.gather(plugin.platform(), TestCommandSender.INSTANCE.toData(), Collections.emptyMap()); + assertNotNull(metadata); + } + } + +} |