From bfbbcb3e68e019da4657ef0da22b889de656ae3f Mon Sep 17 00:00:00 2001 From: Luck Date: Tue, 28 Dec 2021 18:12:33 +0000 Subject: Include platform and system statistics in profiler viewer payload --- .../spark/common/command/modules/HealthModule.java | 35 ++++----- .../common/command/modules/HeapAnalysisModule.java | 85 ++-------------------- .../common/command/modules/SamplerModule.java | 4 +- 3 files changed, 23 insertions(+), 101 deletions(-) (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index 51fa905..b036d21 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -30,6 +30,7 @@ import me.lucko.spark.common.command.CommandResponseHandler; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.disk.DiskUsage; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.RollingAverage; @@ -38,15 +39,11 @@ import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.format.TextColor; -import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryPoolMXBean; import java.lang.management.MemoryType; import java.lang.management.MemoryUsage; -import java.nio.file.FileStore; -import java.nio.file.Files; -import java.nio.file.Paths; import java.util.LinkedList; import java.util.List; import java.util.function.Consumer; @@ -65,8 +62,6 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD; public class HealthModule implements CommandModule { - private static final double MSPT_95_PERCENTILE = 0.95d; - @Override public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() @@ -150,11 +145,7 @@ public class HealthModule implements CommandModule { addDetailedMemoryStats(report, memoryMXBean); } - try { - addDiskStats(report); - } catch (IOException e) { - e.printStackTrace(); - } + addDiskStats(report); resp.reply(report); } @@ -309,10 +300,14 @@ public class HealthModule implements CommandModule { } } - private static void addDiskStats(List report) throws IOException { - FileStore fileStore = Files.getFileStore(Paths.get(".")); - long totalSpace = fileStore.getTotalSpace(); - long usedSpace = totalSpace - fileStore.getUsableSpace(); + private static void addDiskStats(List report) { + long total = DiskUsage.getTotal(); + long used = DiskUsage.getUsed(); + + if (total == 0 || used == 0) { + return; + } + report.add(text() .append(text(">", DARK_GRAY, BOLD)) .append(space()) @@ -321,18 +316,18 @@ public class HealthModule implements CommandModule { ); report.add(text() .content(" ") - .append(text(FormatUtil.formatBytes(usedSpace), WHITE)) + .append(text(FormatUtil.formatBytes(used), WHITE)) .append(space()) .append(text("/", GRAY)) .append(space()) - .append(text(FormatUtil.formatBytes(totalSpace), WHITE)) + .append(text(FormatUtil.formatBytes(total), WHITE)) .append(text(" ")) .append(text("(", GRAY)) - .append(text(FormatUtil.percent(usedSpace, totalSpace), GREEN)) + .append(text(FormatUtil.percent(used, total), GREEN)) .append(text(")", GRAY)) .build() ); - report.add(text().content(" ").append(generateDiskUsageDiagram(usedSpace, totalSpace, 40)).build()); + report.add(text().content(" ").append(generateDiskUsageDiagram(used, total, 40)).build()); report.add(empty()); } @@ -355,7 +350,7 @@ public class HealthModule implements CommandModule { .append(text('/', GRAY)) .append(formatTickDuration(average.median())) .append(text('/', GRAY)) - .append(formatTickDuration(average.percentile(MSPT_95_PERCENTILE))) + .append(formatTickDuration(average.percentile95th())) .append(text('/', GRAY)) .append(formatTickDuration(average.max())) .build(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 70f6c3c..491ec1e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -30,20 +30,15 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.heapdump.HeapDump; import me.lucko.spark.common.heapdump.HeapDumpSummary; +import me.lucko.spark.common.util.Compression; import me.lucko.spark.common.util.FormatUtil; -import me.lucko.spark.proto.SparkProtos; +import me.lucko.spark.proto.SparkHeapProtos; import net.kyori.adventure.text.event.ClickEvent; -import org.tukaani.xz.LZMA2Options; -import org.tukaani.xz.LZMAOutputStream; -import org.tukaani.xz.XZOutputStream; - import okhttp3.MediaType; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Iterator; @@ -51,7 +46,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.function.LongConsumer; -import java.util.zip.GZIPOutputStream; import static net.kyori.adventure.text.Component.text; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; @@ -98,7 +92,7 @@ public class HeapAnalysisModule implements CommandModule { return; } - SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender); + SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender); boolean saveToFile = false; if (arguments.boolFlag("save-to-file")) { @@ -175,11 +169,11 @@ public class HeapAnalysisModule implements CommandModule { platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString())); - CompressionMethod compressionMethod = null; + Compression compressionMethod = null; Iterator compressArgs = arguments.stringFlag("compress").iterator(); if (compressArgs.hasNext()) { try { - compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase()); + compressionMethod = Compression.valueOf(compressArgs.next().toUpperCase()); } catch (IllegalArgumentException e) { // ignore } @@ -194,7 +188,7 @@ public class HeapAnalysisModule implements CommandModule { } } - private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException { + private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, Compression method) throws IOException { resp.broadcastPrefixed(text("Compressing heap dump, please wait...")); long size = Files.size(file); @@ -244,71 +238,4 @@ public class HeapAnalysisModule implements CommandModule { ); } - public enum CompressionMethod { - GZIP { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } - }, - XZ { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } - }, - LZMA { - @Override - public Path compress(Path file, LongConsumer progressHandler) throws IOException { - Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma"); - try (InputStream in = Files.newInputStream(file)) { - try (OutputStream out = Files.newOutputStream(compressedFile)) { - try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) { - copy(in, compressionOut, progressHandler); - } - } - } - return compressedFile; - } - }; - - public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException; - - private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException { - byte[] buf = new byte[1024 * 64]; - long total = 0; - long iterations = 0; - while (true) { - int r = from.read(buf); - if (r == -1) { - break; - } - to.write(buf, 0, r); - total += r; - - // report progress every 5MB - if (iterations++ % ((1024 / 64) * 5) == 0) { - progress.accept(total); - } - } - return total; - } - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 2dd07c9..26f20e7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -40,7 +40,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; -import me.lucko.spark.proto.SparkProtos; +import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.event.ClickEvent; @@ -305,7 +305,7 @@ public class SamplerModule implements CommandModule { } private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup()); + SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup()); boolean saveToFile = false; if (saveToFileFlag) { -- cgit