From 7079484d428321c9b3db09394577efda4d591a4e Mon Sep 17 00:00:00 2001 From: Luck Date: Mon, 19 Sep 2022 18:57:02 +0100 Subject: Provide extra metadata about sources in sampler data --- .../main/java/me/lucko/spark/common/command/modules/SamplerModule.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 0a80c31..2afed64 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -38,6 +38,7 @@ import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.ThreadNodeOrder; import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; @@ -303,7 +304,7 @@ public class SamplerModule implements CommandModule { } private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup()); + SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, ClassSourceLookup.create(platform)); boolean saveToFile = false; if (saveToFileFlag) { -- cgit From d31f3c7bdf03c874ff9518d47d060adc18322d6b Mon Sep 17 00:00:00 2001 From: lucko Date: Fri, 7 Oct 2022 20:26:24 +0100 Subject: Split profiler output into windows (#253) --- .../common/command/modules/SamplerModule.java | 14 +- .../spark/common/sampler/AbstractSampler.java | 42 ++-- .../me/lucko/spark/common/sampler/Sampler.java | 4 +- .../spark/common/sampler/ThreadNodeOrder.java | 52 ---- .../common/sampler/async/AsyncDataAggregator.java | 4 +- .../common/sampler/async/AsyncProfilerAccess.java | 4 +- .../common/sampler/async/AsyncProfilerJob.java | 264 ++++++++++++++++++++ .../spark/common/sampler/async/AsyncSampler.java | 255 ++++++-------------- .../common/sampler/async/JfrParsingException.java | 27 +++ .../spark/common/sampler/async/ProfileSegment.java | 50 ++++ .../spark/common/sampler/async/jfr/Dictionary.java | 4 + .../common/sampler/java/JavaDataAggregator.java | 7 +- .../spark/common/sampler/java/JavaSampler.java | 56 ++++- .../common/sampler/java/SimpleDataAggregator.java | 4 +- .../common/sampler/java/TickedDataAggregator.java | 41 ++-- .../spark/common/sampler/node/AbstractNode.java | 70 +++--- .../spark/common/sampler/node/StackTraceNode.java | 77 +----- .../spark/common/sampler/node/ThreadNode.java | 42 +++- .../sampler/window/ProfilingWindowUtils.java | 36 +++ .../common/sampler/window/ProtoTimeEncoder.java | 94 ++++++++ .../sampler/window/WindowStatisticsCollector.java | 267 +++++++++++++++++++++ spark-common/src/main/proto/spark/spark.proto | 9 + .../src/main/proto/spark/spark_sampler.proto | 15 +- 23 files changed, 1027 insertions(+), 411 deletions(-) delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 2afed64..c1e4981 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -35,7 +35,6 @@ import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.ThreadNodeOrder; import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; @@ -94,7 +93,6 @@ public class SamplerModule implements CommandModule { .argumentUsage("not-combined", null) .argumentUsage("force-java-sampler", null) .argumentUsage("stop --comment", "comment") - .argumentUsage("stop --order-by-time", null) .argumentUsage("stop --save-to-file", null) .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { @@ -103,7 +101,7 @@ public class SamplerModule implements CommandModule { } if (arguments.contains("--stop") || arguments.contains("--upload")) { - return TabCompleter.completeForOpts(arguments, "--order-by-time", "--comment", "--save-to-file"); + return TabCompleter.completeForOpts(arguments, "--comment", "--save-to-file"); } List opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel", @@ -249,14 +247,13 @@ public class SamplerModule implements CommandModule { // await the result if (timeoutSeconds != -1) { - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); boolean saveToFile = arguments.boolFlag("save-to-file"); future.thenAcceptAsync(s -> { resp.broadcastPrefixed(text("The active profiler has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile); + handleUpload(platform, resp, s, comment, mergeMode, saveToFile); }); } } @@ -293,18 +290,17 @@ public class SamplerModule implements CommandModule { } else { this.activeSampler.stop(); resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results...")); - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); boolean saveToFile = arguments.boolFlag("save-to-file"); - handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile); + handleUpload(platform, resp, this.activeSampler, comment, mergeMode, saveToFile); this.activeSampler = null; } } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, ClassSourceLookup.create(platform)); + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, String comment, MergeMode mergeMode, boolean saveToFileFlag) { + SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), comment, mergeMode, ClassSourceLookup.create(platform)); boolean saveToFile = false; if (saveToFileFlag) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 6fc5a10..c650738 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -30,7 +30,10 @@ import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -58,12 +61,12 @@ public abstract class AbstractSampler implements Sampler { /** The time when sampling first began */ protected long startTime = -1; - /** The game tick when sampling first began */ - protected int startTick = -1; - /** The unix timestamp (in millis) when this sampler should automatically complete. */ protected final long autoEndTime; // -1 for nothing + /** Collects statistics for each window in the sample */ + protected final WindowStatisticsCollector windowStatisticsCollector; + /** A future to encapsulate the completion of this sampler instance */ protected final CompletableFuture future = new CompletableFuture<>(); @@ -75,6 +78,7 @@ public abstract class AbstractSampler implements Sampler { this.interval = interval; this.threadDumper = threadDumper; this.autoEndTime = autoEndTime; + this.windowStatisticsCollector = new WindowStatisticsCollector(platform); } @Override @@ -106,11 +110,11 @@ public abstract class AbstractSampler implements Sampler { @Override public void start() { this.startTime = System.currentTimeMillis(); + } - TickHook tickHook = this.platform.getTickHook(); - if (tickHook != null) { - this.startTick = tickHook.getCurrentTick(); - } + @Override + public void stop() { + this.windowStatisticsCollector.stop(); } protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { @@ -127,12 +131,9 @@ public abstract class AbstractSampler implements Sampler { metadata.setComment(comment); } - if (this.startTick != -1) { - TickHook tickHook = this.platform.getTickHook(); - if (tickHook != null) { - int numberOfTicks = tickHook.getCurrentTick() - this.startTick; - metadata.setNumberOfTicks(numberOfTicks); - } + int totalTicks = this.windowStatisticsCollector.getTotalTicks(); + if (totalTicks != -1) { + metadata.setNumberOfTicks(totalTicks); } try { @@ -171,14 +172,23 @@ public abstract class AbstractSampler implements Sampler { proto.setMetadata(metadata); } - protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { List data = dataAggregator.exportData(); - data.sort(outputOrder); + data.sort(Comparator.comparing(ThreadNode::getThreadLabel)); ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); + ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(data); + int[] timeWindows = timeEncoder.getKeys(); + for (int timeWindow : timeWindows) { + proto.addTimeWindows(timeWindow); + } + + this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows); + proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export()); + for (ThreadNode entry : data) { - proto.addThreads(entry.toProto(mergeMode)); + proto.addThreads(entry.toProto(mergeMode, timeEncoder)); classSourceVisitor.visit(entry); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 98281de..e06cba6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -23,11 +23,9 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import java.util.Comparator; import java.util.concurrent.CompletableFuture; /** @@ -67,6 +65,6 @@ public interface Sampler { CompletableFuture getFuture(); // Methods used to export the sampler data to the web viewer. - SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); + SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java deleted file mode 100644 index adcedcd..0000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.common.sampler.node.ThreadNode; - -import java.util.Comparator; - -/** - * Methods of ordering {@link ThreadNode}s in the output data. - */ -public enum ThreadNodeOrder implements Comparator { - - /** - * Order by the name of the thread (alphabetically) - */ - BY_NAME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return o1.getThreadLabel().compareTo(o2.getThreadLabel()); - } - }, - - /** - * Order by the time taken by the thread (most time taken first) - */ - BY_TIME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return -Double.compare(o1.getTotalTime(), o2.getTotalTime()); - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index 3de3943..402330a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -47,10 +47,10 @@ public class AsyncDataAggregator extends AbstractDataAggregator { .build(); } - public void insertData(ProfileSegment element) { + public void insertData(ProfileSegment element, int window) { try { ThreadNode node = getNode(this.threadGrouper.getGroup(element.getNativeThreadId(), element.getThreadName())); - node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime()); + node.log(STACK_TRACE_DESCRIBER, element.getStackTrace(), element.getTime(), window); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index abde21d..1480650 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -87,11 +87,11 @@ public class AsyncProfilerAccess { this.setupException = setupException; } - public AsyncProfiler getProfiler() { + public AsyncProfilerJob startNewProfilerJob() { if (this.profiler == null) { throw new UnsupportedOperationException("async-profiler not supported", this.setupException); } - return this.profiler; + return AsyncProfilerJob.createNew(this, this.profiler); } public ProfilingEvent getProfilingEvent() { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java new file mode 100644 index 0000000..7b123a7 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -0,0 +1,264 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.common.sampler.async.jfr.JfrReader; + +import one.profiler.AsyncProfiler; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; + +/** + * Represents a profiling job within async-profiler. + * + *

Only one job can be running at a time. This is guarded by + * {@link #createNew(AsyncProfilerAccess, AsyncProfiler)}.

+ */ +public class AsyncProfilerJob { + + /** + * The currently active job. + */ + private static final AtomicReference ACTIVE = new AtomicReference<>(); + + /** + * Creates a new {@link AsyncProfilerJob}. + * + *

Will throw an {@link IllegalStateException} if another job is already active.

+ * + * @param access the profiler access object + * @param profiler the profiler + * @return the job + */ + static AsyncProfilerJob createNew(AsyncProfilerAccess access, AsyncProfiler profiler) { + synchronized (ACTIVE) { + AsyncProfilerJob existing = ACTIVE.get(); + if (existing != null) { + throw new IllegalStateException("Another profiler is already active: " + existing); + } + + AsyncProfilerJob job = new AsyncProfilerJob(access, profiler); + ACTIVE.set(job); + return job; + } + } + + /** The async-profiler access object */ + private final AsyncProfilerAccess access; + /** The async-profiler instance */ + private final AsyncProfiler profiler; + + // Set on init + /** The platform */ + private SparkPlatform platform; + /** The sampling interval in microseconds */ + private int interval; + /** The thread dumper */ + private ThreadDumper threadDumper; + /** The profiling window */ + private int window; + + /** The file used by async-profiler to output data */ + private Path outputFile; + + private AsyncProfilerJob(AsyncProfilerAccess access, AsyncProfiler profiler) { + this.access = access; + this.profiler = profiler; + } + + /** + * Executes an async-profiler command. + * + * @param command the command + * @return the output + */ + private String execute(String command) { + try { + return this.profiler.execute(command); + } catch (IOException e) { + throw new RuntimeException("Exception whilst executing profiler command", e); + } + } + + /** + * Checks to ensure that this job is still active. + */ + private void checkActive() { + if (ACTIVE.get() != this) { + throw new IllegalStateException("Profiler job no longer active!"); + } + } + + // Initialise the job + public void init(SparkPlatform platform, int interval, ThreadDumper threadDumper, int window) { + this.platform = platform; + this.interval = interval; + this.threadDumper = threadDumper; + this.window = window; + } + + /** + * Starts the job. + */ + public void start() { + checkActive(); + + try { + // create a new temporary output file + try { + this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp"); + } catch (IOException e) { + throw new RuntimeException("Unable to create temporary output file", e); + } + + // construct a command to send to async-profiler + String command = "start,event=" + this.access.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); + if (this.threadDumper instanceof ThreadDumper.Specific) { + command += ",filter"; + } + + // start the profiler + String resp = execute(command).trim(); + + if (!resp.equalsIgnoreCase("profiling started")) { + throw new RuntimeException("Unexpected response: " + resp); + } + + // append threads to be profiled, if necessary + if (this.threadDumper instanceof ThreadDumper.Specific) { + ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; + for (Thread thread : threadDumper.getThreads()) { + this.profiler.addThread(thread); + } + } + + } catch (Exception e) { + try { + this.profiler.stop(); + } catch (Exception e2) { + // ignore + } + close(); + + throw e; + } + } + + /** + * Stops the job. + */ + public void stop() { + checkActive(); + + try { + this.profiler.stop(); + } catch (IllegalStateException e) { + if (!e.getMessage().equals("Profiler is not active")) { // ignore + throw e; + } + } finally { + close(); + } + } + + /** + * Aggregates the collected data. + */ + public void aggregate(AsyncDataAggregator dataAggregator) { + + Predicate threadFilter; + if (this.threadDumper instanceof ThreadDumper.Specific) { + ThreadDumper.Specific specificDumper = (ThreadDumper.Specific) this.threadDumper; + threadFilter = n -> specificDumper.getThreadNames().contains(n.toLowerCase()); + } else { + threadFilter = n -> true; + } + + // read the jfr file produced by async-profiler + try (JfrReader reader = new JfrReader(this.outputFile)) { + readSegments(reader, threadFilter, dataAggregator, this.window); + } catch (Exception e) { + boolean fileExists; + try { + fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0; + } catch (IOException ex) { + fileExists = false; + } + + if (fileExists) { + throw new JfrParsingException("Error parsing JFR data from profiler output", e); + } else { + throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e); + } + } + + // delete the output file after reading + try { + Files.deleteIfExists(this.outputFile); + } catch (IOException e) { + // ignore + } + + } + + private void readSegments(JfrReader reader, Predicate threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException { + List samples = reader.readAllEvents(JfrReader.ExecutionSample.class); + for (int i = 0; i < samples.size(); i++) { + JfrReader.ExecutionSample sample = samples.get(i); + + long duration; + if (i == 0) { + // we don't really know the duration of the first sample, so just use the sampling + // interval + duration = this.interval; + } else { + // calculate the duration of the sample by calculating the time elapsed since the + // previous sample + duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time); + } + + String threadName = reader.threads.get(sample.tid); + if (!threadFilter.test(threadName)) { + continue; + } + + // parse the segment and give it to the data aggregator + ProfileSegment segment = ProfileSegment.parseSegment(reader, sample, threadName, duration); + dataAggregator.insertData(segment, window); + } + } + + public int getWindow() { + return this.window; + } + + private void close() { + ACTIVE.compareAndSet(this, null); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 7d9cb81..2c9bb5f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -27,61 +27,41 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; +import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import one.profiler.AsyncProfiler; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Comparator; -import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import java.util.function.Predicate; /** * A sampler implementation using async-profiler. */ public class AsyncSampler extends AbstractSampler { - private final AsyncProfiler profiler; + private final AsyncProfilerAccess profilerAccess; /** Responsible for aggregating and then outputting collected sampling data */ private final AsyncDataAggregator dataAggregator; - /** Flag to mark if the output has been completed */ - private boolean outputComplete = false; + /** Mutex for the current profiler job */ + private final Object[] currentJobMutex = new Object[0]; - /** The temporary output file */ - private Path outputFile; + /** Current profiler job */ + private AsyncProfilerJob currentJob; - /** The executor used for timeouts */ - private ScheduledExecutorService timeoutExecutor; + /** The executor used for scheduling and management */ + private ScheduledExecutorService scheduler; public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { super(platform, interval, threadDumper, endTime); - this.profiler = AsyncProfilerAccess.getInstance(platform).getProfiler(); + this.profilerAccess = AsyncProfilerAccess.getInstance(platform); this.dataAggregator = new AsyncDataAggregator(threadGrouper); - } - - /** - * Executes a profiler command. - * - * @param command the command to execute - * @return the response - */ - private String execute(String command) { - try { - return this.profiler.execute(command); - } catch (IOException e) { - throw new RuntimeException("Exception whilst executing profiler command", e); - } + this.scheduler = Executors.newSingleThreadScheduledExecutor( + new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-worker-thread").build() + ); } /** @@ -91,33 +71,58 @@ public class AsyncSampler extends AbstractSampler { public void start() { super.start(); - try { - this.outputFile = this.platform.getTemporaryFiles().create("spark-", "-profile-data.jfr.tmp"); - } catch (IOException e) { - throw new RuntimeException("Unable to create temporary output file", e); + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + this.windowStatisticsCollector.startCountingTicks(tickHook); } - String command = "start,event=" + AsyncProfilerAccess.getInstance(this.platform).getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); - if (this.threadDumper instanceof ThreadDumper.Specific) { - command += ",filter"; - } + int window = ProfilingWindowUtils.unixMillisToWindow(System.currentTimeMillis()); - String resp = execute(command).trim(); - if (!resp.equalsIgnoreCase("profiling started")) { - throw new RuntimeException("Unexpected response: " + resp); - } + AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob(); + job.init(this.platform, this.interval, this.threadDumper, window); + job.start(); + this.currentJob = job; - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; - for (Thread thread : threadDumper.getThreads()) { - this.profiler.addThread(thread); - } - } + // rotate the sampler job every minute to put data into a new window + this.scheduler.scheduleAtFixedRate(this::rotateProfilerJob, 1, 1, TimeUnit.MINUTES); recordInitialGcStats(); scheduleTimeout(); } + private void rotateProfilerJob() { + try { + synchronized (this.currentJobMutex) { + AsyncProfilerJob previousJob = this.currentJob; + if (previousJob == null) { + return; + } + + try { + // stop the previous job + previousJob.stop(); + + // collect statistics for the window + this.windowStatisticsCollector.measureNow(previousJob.getWindow()); + } catch (Exception e) { + e.printStackTrace(); + } + + // start a new job + int window = previousJob.getWindow() + 1; + AsyncProfilerJob newJob = this.profilerAccess.startNewProfilerJob(); + newJob.init(this.platform, this.interval, this.threadDumper, window); + newJob.start(); + this.currentJob = newJob; + + // aggregate the output of the previous job + previousJob.aggregate(this.dataAggregator); + } + } catch (Throwable e) { + e.printStackTrace(); + } + } + private void scheduleTimeout() { if (this.autoEndTime == -1) { return; @@ -128,11 +133,7 @@ public class AsyncSampler extends AbstractSampler { return; } - this.timeoutExecutor = Executors.newSingleThreadScheduledExecutor( - new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-timeout-thread").build() - ); - - this.timeoutExecutor.schedule(() -> { + this.scheduler.schedule(() -> { stop(); this.future.complete(this); }, delay, TimeUnit.MILLISECONDS); @@ -143,145 +144,27 @@ public class AsyncSampler extends AbstractSampler { */ @Override public void stop() { - try { - this.profiler.stop(); - } catch (IllegalStateException e) { - if (!e.getMessage().equals("Profiler is not active")) { // ignore - throw e; - } - } + super.stop(); + synchronized (this.currentJobMutex) { + this.currentJob.stop(); + this.windowStatisticsCollector.measureNow(this.currentJob.getWindow()); + this.currentJob.aggregate(this.dataAggregator); + this.currentJob = null; + } - if (this.timeoutExecutor != null) { - this.timeoutExecutor.shutdown(); - this.timeoutExecutor = null; + if (this.scheduler != null) { + this.scheduler.shutdown(); + this.scheduler = null; } } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); - aggregateOutput(); - writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); + writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup); return proto.build(); } - private void aggregateOutput() { - if (this.outputComplete) { - return; - } - this.outputComplete = true; - - Predicate threadFilter; - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; - threadFilter = n -> threadDumper.getThreadNames().contains(n.toLowerCase()); - } else { - threadFilter = n -> true; - } - - // read the jfr file produced by async-profiler - try (JfrReader reader = new JfrReader(this.outputFile)) { - readSegments(reader, threadFilter); - } catch (Exception e) { - boolean fileExists; - try { - fileExists = Files.exists(this.outputFile) && Files.size(this.outputFile) != 0; - } catch (IOException ex) { - fileExists = false; - } - - if (fileExists) { - throw new JfrParsingException("Error parsing JFR data from profiler output", e); - } else { - throw new JfrParsingException("Error parsing JFR data from profiler output - file " + this.outputFile + " does not exist!", e); - } - } - - // delete the output file after reading - try { - Files.deleteIfExists(this.outputFile); - } catch (IOException e) { - // ignore - } - } - - private void readSegments(JfrReader reader, Predicate threadFilter) throws IOException { - List samples = reader.readAllEvents(JfrReader.ExecutionSample.class); - for (int i = 0; i < samples.size(); i++) { - JfrReader.ExecutionSample sample = samples.get(i); - - long duration; - if (i == 0) { - // we don't really know the duration of the first sample, so just use the sampling - // interval - duration = this.interval; - } else { - // calculate the duration of the sample by calculating the time elapsed since the - // previous sample - duration = TimeUnit.NANOSECONDS.toMicros(sample.time - samples.get(i - 1).time); - } - - String threadName = reader.threads.get(sample.tid); - if (!threadFilter.test(threadName)) { - continue; - } - - // parse the segment and give it to the data aggregator - ProfileSegment segment = parseSegment(reader, sample, threadName, duration); - this.dataAggregator.insertData(segment); - } - } - - private static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) { - JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId); - int len = stackTrace.methods.length; - - AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len]; - for (int i = 0; i < len; i++) { - stack[i] = parseStackFrame(reader, stackTrace.methods[i]); - } - - return new ProfileSegment(sample.tid, threadName, stack, duration); - } - - private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) { - AsyncStackTraceElement result = reader.stackFrames.get(methodId); - if (result != null) { - return result; - } - - JfrReader.MethodRef methodRef = reader.methods.get(methodId); - JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls); - - byte[] className = reader.symbols.get(classRef.name); - byte[] methodName = reader.symbols.get(methodRef.name); - - if (className == null || className.length == 0) { - // native call - result = new AsyncStackTraceElement( - AsyncStackTraceElement.NATIVE_CALL, - new String(methodName, StandardCharsets.UTF_8), - null - ); - } else { - // java method - byte[] methodDesc = reader.symbols.get(methodRef.sig); - result = new AsyncStackTraceElement( - new String(className, StandardCharsets.UTF_8).replace('/', '.'), - new String(methodName, StandardCharsets.UTF_8), - new String(methodDesc, StandardCharsets.UTF_8) - ); - } - - reader.stackFrames.put(methodId, result); - return result; - } - - private static final class JfrParsingException extends RuntimeException { - public JfrParsingException(String message, Throwable cause) { - super(message, cause); - } - } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java new file mode 100644 index 0000000..6dab359 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/JfrParsingException.java @@ -0,0 +1,27 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.async; + +public class JfrParsingException extends RuntimeException { + public JfrParsingException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java index 154e6fe..26debaf 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/ProfileSegment.java @@ -20,6 +20,10 @@ package me.lucko.spark.common.sampler.async; +import me.lucko.spark.common.sampler.async.jfr.JfrReader; + +import java.nio.charset.StandardCharsets; + /** * Represents a profile "segment". * @@ -58,4 +62,50 @@ public class ProfileSegment { public long getTime() { return this.time; } + + public static ProfileSegment parseSegment(JfrReader reader, JfrReader.ExecutionSample sample, String threadName, long duration) { + JfrReader.StackTrace stackTrace = reader.stackTraces.get(sample.stackTraceId); + int len = stackTrace.methods.length; + + AsyncStackTraceElement[] stack = new AsyncStackTraceElement[len]; + for (int i = 0; i < len; i++) { + stack[i] = parseStackFrame(reader, stackTrace.methods[i]); + } + + return new ProfileSegment(sample.tid, threadName, stack, duration); + } + + private static AsyncStackTraceElement parseStackFrame(JfrReader reader, long methodId) { + AsyncStackTraceElement result = reader.stackFrames.get(methodId); + if (result != null) { + return result; + } + + JfrReader.MethodRef methodRef = reader.methods.get(methodId); + JfrReader.ClassRef classRef = reader.classes.get(methodRef.cls); + + byte[] className = reader.symbols.get(classRef.name); + byte[] methodName = reader.symbols.get(methodRef.name); + + if (className == null || className.length == 0) { + // native call + result = new AsyncStackTraceElement( + AsyncStackTraceElement.NATIVE_CALL, + new String(methodName, StandardCharsets.UTF_8), + null + ); + } else { + // java method + byte[] methodDesc = reader.symbols.get(methodRef.sig); + result = new AsyncStackTraceElement( + new String(className, StandardCharsets.UTF_8).replace('/', '.'), + new String(methodName, StandardCharsets.UTF_8), + new String(methodDesc, StandardCharsets.UTF_8) + ); + } + + reader.stackFrames.put(methodId, result); + return result; + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java index 23223a2..60f6543 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/jfr/Dictionary.java @@ -37,6 +37,10 @@ public class Dictionary { size = 0; } + public int size() { + return this.size; + } + public void put(long key, T value) { if (key == 0) { throw new IllegalArgumentException("Zero key not allowed"); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index cc530d6..c51ec05 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -66,10 +66,11 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { * Inserts sampling data into this aggregator * * @param threadInfo the thread info + * @param window the window */ - public abstract void insertData(ThreadInfo threadInfo); + public abstract void insertData(ThreadInfo threadInfo, int window); - protected void writeData(ThreadInfo threadInfo) { + protected void writeData(ThreadInfo threadInfo, int window) { if (this.ignoreSleeping && isSleeping(threadInfo)) { return; } @@ -79,7 +80,7 @@ public abstract class JavaDataAggregator extends AbstractDataAggregator { try { ThreadNode node = getNode(this.threadGrouper.getGroup(threadInfo.getThreadId(), threadInfo.getThreadName())); - node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval); + node.log(STACK_TRACE_DESCRIBER, threadInfo.getStackTrace(), this.interval, window); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 0f73a9f..8c96fd3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -28,15 +28,17 @@ import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; +import org.checkerframework.checker.units.qual.A; + import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; -import java.util.Comparator; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; @@ -62,6 +64,9 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; + + /** The last window that was profiled */ + private final AtomicInteger lastWindow = new AtomicInteger(); public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { super(platform, interval, threadDumper, endTime); @@ -76,12 +81,28 @@ public class JavaSampler extends AbstractSampler implements Runnable { @Override public void start() { super.start(); + + TickHook tickHook = this.platform.getTickHook(); + if (tickHook != null) { + if (this.dataAggregator instanceof TickedDataAggregator) { + WindowStatisticsCollector.ExplicitTickCounter counter = this.windowStatisticsCollector.startCountingTicksExplicit(tickHook); + ((TickedDataAggregator) this.dataAggregator).setTickCounter(counter); + } else { + this.windowStatisticsCollector.startCountingTicks(tickHook); + } + } + this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MICROSECONDS); } @Override public void stop() { + super.stop(); + this.task.cancel(false); + + // collect statistics for the final window + this.windowStatisticsCollector.measureNow(this.lastWindow.get()); } @Override @@ -89,27 +110,30 @@ public class JavaSampler extends AbstractSampler implements Runnable { // this is effectively synchronized, the worker pool will not allow this task // to concurrently execute. try { - if (this.autoEndTime != -1 && this.autoEndTime <= System.currentTimeMillis()) { - this.future.complete(this); + long time = System.currentTimeMillis(); + + if (this.autoEndTime != -1 && this.autoEndTime <= time) { stop(); + this.future.complete(this); return; } + int window = ProfilingWindowUtils.unixMillisToWindow(time); ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean); - this.workerPool.execute(new InsertDataTask(this.dataAggregator, threadDumps)); + this.workerPool.execute(new InsertDataTask(threadDumps, window)); } catch (Throwable t) { - this.future.completeExceptionally(t); stop(); + this.future.completeExceptionally(t); } } - private static final class InsertDataTask implements Runnable { - private final JavaDataAggregator dataAggregator; + private final class InsertDataTask implements Runnable { private final ThreadInfo[] threadDumps; + private final int window; - InsertDataTask(JavaDataAggregator dataAggregator, ThreadInfo[] threadDumps) { - this.dataAggregator = dataAggregator; + InsertDataTask(ThreadInfo[] threadDumps, int window) { this.threadDumps = threadDumps; + this.window = window; } @Override @@ -118,16 +142,22 @@ public class JavaSampler extends AbstractSampler implements Runnable { if (threadInfo.getThreadName() == null || threadInfo.getStackTrace() == null) { continue; } - this.dataAggregator.insertData(threadInfo); + JavaSampler.this.dataAggregator.insertData(threadInfo, this.window); + } + + // if we have just stepped over into a new window, collect statistics for the previous window + int previousWindow = JavaSampler.this.lastWindow.getAndSet(this.window); + if (previousWindow != 0 && previousWindow != this.window) { + JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow); } } } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); - writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); + writeDataToProto(proto, this.dataAggregator, mergeMode, classSourceLookup); return proto.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java index 39e21aa..54173fe 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java @@ -44,8 +44,8 @@ public class SimpleDataAggregator extends JavaDataAggregator { } @Override - public void insertData(ThreadInfo threadInfo) { - writeData(threadInfo); + public void insertData(ThreadInfo threadInfo, int window) { + writeData(threadInfo, window); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e062f31..d537b96 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -23,6 +23,7 @@ package me.lucko.spark.common.sampler.java; import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.ThreadNode; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -31,7 +32,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; /** * Implementation of {@link DataAggregator} which supports only including sampling data from "ticks" @@ -48,14 +48,15 @@ public class TickedDataAggregator extends JavaDataAggregator { /** The expected number of samples in each tick */ private final int expectedSize; - /** The number of ticks aggregated so far */ - private final AtomicInteger numberOfTicks = new AtomicInteger(); - - private final Object mutex = new Object(); + /** Counts the number of ticks aggregated */ + private WindowStatisticsCollector.ExplicitTickCounter tickCounter; // state private int currentTick = -1; - private TickList currentData = new TickList(0); + private TickList currentData = null; + + // guards currentData + private final Object mutex = new Object(); public TickedDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { super(workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); @@ -66,29 +67,34 @@ public class TickedDataAggregator extends JavaDataAggregator { this.expectedSize = (int) ((50 / intervalMilliseconds) + 10); } + public void setTickCounter(WindowStatisticsCollector.ExplicitTickCounter tickCounter) { + this.tickCounter = tickCounter; + } + @Override public SamplerMetadata.DataAggregator getMetadata() { // push the current tick (so numberOfTicks is accurate) synchronized (this.mutex) { pushCurrentTick(); + this.currentData = null; } return SamplerMetadata.DataAggregator.newBuilder() .setType(SamplerMetadata.DataAggregator.Type.TICKED) .setThreadGrouper(this.threadGrouper.asProto()) .setTickLengthThreshold(this.tickLengthThreshold) - .setNumberOfIncludedTicks(this.numberOfTicks.get()) + .setNumberOfIncludedTicks(this.tickCounter.getTotalCountedTicks()) .build(); } @Override - public void insertData(ThreadInfo threadInfo) { + public void insertData(ThreadInfo threadInfo, int window) { synchronized (this.mutex) { int tick = this.tickHook.getCurrentTick(); - if (this.currentTick != tick) { + if (this.currentTick != tick || this.currentData == null) { pushCurrentTick(); this.currentTick = tick; - this.currentData = new TickList(this.expectedSize); + this.currentData = new TickList(this.expectedSize, window); } this.currentData.addData(threadInfo); @@ -98,6 +104,9 @@ public class TickedDataAggregator extends JavaDataAggregator { // guarded by 'mutex' private void pushCurrentTick() { TickList currentData = this.currentData; + if (currentData == null) { + return; + } // approximate how long the tick lasted int tickLengthMicros = currentData.getList().size() * this.interval; @@ -107,8 +116,8 @@ public class TickedDataAggregator extends JavaDataAggregator { return; } - this.numberOfTicks.incrementAndGet(); this.workerPool.submit(currentData); + this.tickCounter.increment(); } @Override @@ -121,21 +130,19 @@ public class TickedDataAggregator extends JavaDataAggregator { return super.exportData(); } - public int getNumberOfTicks() { - return this.numberOfTicks.get(); - } - private final class TickList implements Runnable { private final List list; + private final int window; - TickList(int expectedSize) { + TickList(int expectedSize, int window) { this.list = new ArrayList<>(expectedSize); + this.window = window; } @Override public void run() { for (ThreadInfo data : this.list) { - writeData(data); + writeData(data, this.window); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index fd2be8d..fe1afcd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -20,6 +20,9 @@ package me.lucko.spark.common.sampler.node; +import me.lucko.spark.common.sampler.async.jfr.Dictionary; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; + import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -27,62 +30,63 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.LongAdder; +import java.util.stream.IntStream; /** * Encapsulates a timed node in the sampling stack. */ public abstract class AbstractNode { - private static final int MAX_STACK_DEPTH = 300; + protected static final int MAX_STACK_DEPTH = 300; /** A map of the nodes children */ private final Map children = new ConcurrentHashMap<>(); /** The accumulated sample time for this node, measured in microseconds */ - private final LongAdder totalTime = new LongAdder(); + // long key = the window (effectively System.currentTimeMillis() / 60_000) + // LongAdder value = accumulated time in microseconds + private final Dictionary times = new Dictionary<>(); /** - * Gets the total sample time logged for this node in milliseconds. + * Gets the time accumulator for a given window * - * @return the total time + * @param window the window + * @return the accumulator */ - public double getTotalTime() { - return this.totalTime.longValue() / 1000d; + protected LongAdder getTimeAccumulator(int window) { + LongAdder adder = this.times.get(window); + if (adder == null) { + adder = new LongAdder(); + this.times.put(window, adder); + } + return adder; } - public Collection getChildren() { - return this.children.values(); + /** + * Gets the time windows that have been logged for this node. + * + * @return the time windows + */ + public IntStream getTimeWindows() { + IntStream.Builder keys = IntStream.builder(); + this.times.forEach((key, value) -> keys.add((int) key)); + return keys.build(); } /** - * Logs the given stack trace against this node and its children. + * Gets the encoded total sample times logged for this node in milliseconds. * - * @param describer the function that describes the elements of the stack - * @param stack the stack - * @param time the total time to log - * @param the stack trace element type + * @return the total times */ - public void log(StackTraceNode.Describer describer, T[] stack, long time) { - if (stack.length == 0) { - return; - } - - this.totalTime.add(time); - - AbstractNode node = this; - T previousElement = null; - - for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) { - T element = stack[(stack.length - 1) - offset]; - - node = node.resolveChild(describer.describe(element, previousElement)); - node.totalTime.add(time); + protected double[] encodeTimesForProto(ProtoTimeEncoder encoder) { + return encoder.encode(this.times); + } - previousElement = element; - } + public Collection getChildren() { + return this.children.values(); } - private StackTraceNode resolveChild(StackTraceNode.Description description) { + protected StackTraceNode resolveChild(StackTraceNode.Description description) { StackTraceNode result = this.children.get(description); // fast path if (result != null) { return result; @@ -96,7 +100,7 @@ public abstract class AbstractNode { * @param other the other node */ protected void merge(AbstractNode other) { - this.totalTime.add(other.totalTime.longValue()); + other.times.forEach((key, value) -> getTimeAccumulator((int) key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } @@ -123,7 +127,7 @@ public abstract class AbstractNode { list.add(child); } - list.sort(null); + //list.sort(null); return list; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java index b0d9237..ed938d5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/StackTraceNode.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.sampler.node; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; @@ -30,7 +31,7 @@ import java.util.Objects; /** * Represents a stack trace element within the {@link AbstractNode node} structure. */ -public final class StackTraceNode extends AbstractNode implements Comparable { +public final class StackTraceNode extends AbstractNode { /** * Magic number to denote "no present" line number for a node. @@ -64,12 +65,16 @@ public final class StackTraceNode extends AbstractNode implements Comparable= 0) { proto.setLineNumber(this.description.lineNumber); } @@ -87,26 +92,12 @@ public final class StackTraceNode extends AbstractNode implements Comparable { + public static final class Description { private final String className; private final String methodName; @@ -162,54 +153,6 @@ public final class StackTraceNode extends AbstractNode implements Comparable> int nullCompare(T a, T b) { - if (a == null && b == null) { - return 0; - } else if (a == null) { - return -1; - } else if (b == null) { - return 1; - } else { - return a.compareTo(b); - } - } - - @Override - public int compareTo(Description that) { - if (this == that) { - return 0; - } - - int i = this.className.compareTo(that.className); - if (i != 0) { - return i; - } - - i = this.methodName.compareTo(that.methodName); - if (i != 0) { - return i; - } - - i = nullCompare(this.methodDescription, that.methodDescription); - if (i != 0) { - return i; - } - - if (this.methodDescription != null && that.methodDescription != null) { - i = this.methodDescription.compareTo(that.methodDescription); - if (i != 0) { - return i; - } - } - - i = Integer.compare(this.lineNumber, that.lineNumber); - if (i != 0) { - return i; - } - - return Integer.compare(this.parentLineNumber, that.parentLineNumber); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index ed97443..1dce523 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.sampler.node; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.proto.SparkSamplerProtos; /** @@ -53,13 +54,46 @@ public final class ThreadNode extends AbstractNode { this.label = label; } - public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) { + /** + * Logs the given stack trace against this node and its children. + * + * @param describer the function that describes the elements of the stack + * @param stack the stack + * @param time the total time to log + * @param window the window + * @param the stack trace element type + */ + public void log(StackTraceNode.Describer describer, T[] stack, long time, int window) { + if (stack.length == 0) { + return; + } + + getTimeAccumulator(window).add(time); + + AbstractNode node = this; + T previousElement = null; + + for (int offset = 0; offset < Math.min(MAX_STACK_DEPTH, stack.length); offset++) { + T element = stack[(stack.length - 1) - offset]; + + node = node.resolveChild(describer.describe(element, previousElement)); + node.getTimeAccumulator(window).add(time); + + previousElement = element; + } + } + + public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()) - .setTime(getTotalTime()); + .setName(getThreadLabel()); + + double[] times = encodeTimesForProto(timeEncoder); + for (double time : times) { + proto.addTimes(time); + } for (StackTraceNode child : exportChildren(mergeMode)) { - proto.addChildren(child.toProto(mergeMode)); + proto.addChildren(child.toProto(mergeMode, timeEncoder)); } return proto.build(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java new file mode 100644 index 0000000..109adb3 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java @@ -0,0 +1,36 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +public enum ProfilingWindowUtils { + ; + + /** + * Gets the profiling window for the given time in unix-millis. + * + * @param time the time in milliseconds + * @return the window + */ + public static int unixMillisToWindow(long time) { + // one window per minute + return (int) (time / 60_000); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java new file mode 100644 index 0000000..edb2309 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java @@ -0,0 +1,94 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import me.lucko.spark.common.sampler.async.jfr.Dictionary; +import me.lucko.spark.common.sampler.node.AbstractNode; +import me.lucko.spark.common.sampler.node.ThreadNode; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.IntStream; + +/** + * Encodes a map of int->double into a double array. + */ +public class ProtoTimeEncoder { + /** A sorted array of all possible keys to encode */ + private final int[] keys; + /** A map of key value -> index in the keys array */ + private final Map keysToIndex; + + public ProtoTimeEncoder(List sourceData) { + // get an array of all keys that show up in the source data + this.keys = sourceData.stream() + .map(AbstractNode::getTimeWindows) + .reduce(IntStream.empty(), IntStream::concat) + .distinct() + .sorted() + .toArray(); + + // construct a reverse index lookup + this.keysToIndex = new HashMap<>(this.keys.length); + for (int i = 0; i < this.keys.length; i++) { + this.keysToIndex.put(this.keys[i], i); + } + } + + /** + * Gets an array of the keys that could be encoded by this encoder. + * + * @return an array of keys + */ + public int[] getKeys() { + return this.keys; + } + + /** + * Encode a {@link Dictionary} (map) of times/durations into a double array. + * + * @param times a dictionary of times (unix-time millis -> duration in microseconds) + * @return the times encoded as a double array + */ + public double[] encode(Dictionary times) { + // construct an array of values - length needs to exactly match the + // number of keys, even if some values are zero. + double[] array = new double[this.keys.length]; + + times.forEach((key, value) -> { + // get the index for the given key + Integer idx = this.keysToIndex.get((int) key); + if (idx == null) { + throw new RuntimeException("No index for key " + key + " in " + this.keysToIndex.keySet()); + } + + // convert the duration from microseconds -> milliseconds + double durationInMilliseconds = value.longValue() / 1000d; + + // store in the array + array[idx] = durationInMilliseconds; + }); + + return array; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java new file mode 100644 index 0000000..47f739d --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -0,0 +1,267 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler.window; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.monitor.cpu.CpuMonitor; +import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.RollingAverage; +import me.lucko.spark.proto.SparkProtos; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Collects statistics for each profiling window. + */ +public class WindowStatisticsCollector { + private static final SparkProtos.WindowStatistics ZERO = SparkProtos.WindowStatistics.newBuilder().build(); + + /** The platform */ + private final SparkPlatform platform; + + /** Map of profiling window -> statistics */ + private final Map stats; + + private TickCounter tickCounter; + + public WindowStatisticsCollector(SparkPlatform platform) { + this.platform = platform; + this.stats = new ConcurrentHashMap<>(); + } + + /** + * Indicates to the statistics collector that it should count the number + * of ticks in each window using the provided {@link TickHook}. + * + * @param hook the tick hook + */ + public void startCountingTicks(TickHook hook) { + this.tickCounter = new NormalTickCounter(this.platform, hook); + } + + /** + * Indicates to the statistics collector that it should count the number + * of ticks in each window, according to how many times the + * {@link ExplicitTickCounter#increment()} method is called. + * + * @param hook the tick hook + * @return the counter + */ + public ExplicitTickCounter startCountingTicksExplicit(TickHook hook) { + ExplicitTickCounter counter = new ExplicitTickCounter(this.platform, hook); + this.tickCounter = counter; + return counter; + } + + public void stop() { + if (this.tickCounter != null) { + this.tickCounter.stop(); + } + } + + /** + * Gets the total number of ticks that have passed between the time + * when the profiler started and stopped. + * + *

Importantly, note that this metric is different to the total number of ticks in a window + * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()}) or the total number + * of observed ticks if the 'only-ticks-over' aggregator is being used + * (which is recorded by {@link SparkProtos.WindowStatistics#getTicks()} + * and {@link ExplicitTickCounter#getTotalCountedTicks()}.

+ * + * @return the total number of ticks in the profile + */ + public int getTotalTicks() { + return this.tickCounter == null ? -1 : this.tickCounter.getTotalTicks(); + } + + /** + * Measures statistics for the given window if none have been recorded yet. + * + * @param window the window + */ + public void measureNow(int window) { + this.stats.computeIfAbsent(window, w -> measure()); + } + + /** + * Ensures that the exported map has statistics (even if they are zeroed) for all windows. + * + * @param windows the expected windows + */ + public void ensureHasStatisticsForAllWindows(int[] windows) { + for (int window : windows) { + this.stats.computeIfAbsent(window, w -> ZERO); + } + } + + public Map export() { + return this.stats; + } + + /** + * Measures current statistics, where possible averaging over the last minute. (1 min = 1 window) + * + * @return the current statistics + */ + private SparkProtos.WindowStatistics measure() { + SparkProtos.WindowStatistics.Builder builder = SparkProtos.WindowStatistics.newBuilder(); + + TickStatistics tickStatistics = this.platform.getTickStatistics(); + if (tickStatistics != null) { + builder.setTps(tickStatistics.tps1Min()); + + RollingAverage mspt = tickStatistics.duration1Min(); + if (mspt != null) { + builder.setMsptMedian(mspt.median()); + builder.setMsptMax(mspt.max()); + } + } + + if (this.tickCounter != null) { + int ticks = this.tickCounter.getCountedTicksThisWindowAndReset(); + builder.setTicks(ticks); + } + + builder.setCpuProcess(CpuMonitor.processLoad1MinAvg()); + builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg()); + + return builder.build(); + } + + /** + * Responsible for counting the number of ticks in a profile/window. + */ + public interface TickCounter { + + /** + * Stop the counter. + */ + void stop(); + + /** + * Get the total number of ticks. + * + *

See {@link WindowStatisticsCollector#getTotalTicks()} for a longer explanation + * of what this means exactly.

+ * + * @return the total ticks + */ + int getTotalTicks(); + + /** + * Gets the total number of ticks counted in the last window, + * and resets the counter to zero. + * + * @return the number of ticks counted since the last time this method was called + */ + int getCountedTicksThisWindowAndReset(); + } + + private static abstract class BaseTickCounter implements TickCounter { + protected final SparkPlatform platform; + protected final TickHook tickHook; + + /** The game tick when sampling first began */ + private final int startTick; + + /** The game tick when sampling stopped */ + private int stopTick = -1; + + BaseTickCounter(SparkPlatform platform, TickHook tickHook) { + this.platform = platform; + this.tickHook = tickHook; + this.startTick = this.tickHook.getCurrentTick(); + } + + @Override + public void stop() { + this.stopTick = this.tickHook.getCurrentTick(); + } + + @Override + public int getTotalTicks() { + if (this.startTick == -1) { + throw new IllegalStateException("start tick not recorded"); + } + if (this.stopTick == -1) { + throw new IllegalStateException("stop tick not recorded"); + } + + return this.stopTick - this.startTick; + } + } + + /** + * Counts the number of ticks in a window using a {@link TickHook}. + */ + public static final class NormalTickCounter extends BaseTickCounter { + private int last; + + NormalTickCounter(SparkPlatform platform, TickHook tickHook) { + super(platform, tickHook); + this.last = this.tickHook.getCurrentTick(); + } + + @Override + public int getCountedTicksThisWindowAndReset() { + synchronized (this) { + int now = this.tickHook.getCurrentTick(); + int ticks = now - this.last; + this.last = now; + return ticks; + } + } + } + + /** + * Counts the number of ticks in a window according to the number of times + * {@link #increment()} is called. + * + * Used by the {@link me.lucko.spark.common.sampler.java.TickedDataAggregator}. + */ + public static final class ExplicitTickCounter extends BaseTickCounter { + private final AtomicInteger counted = new AtomicInteger(); + private final AtomicInteger total = new AtomicInteger(); + + ExplicitTickCounter(SparkPlatform platform, TickHook tickHook) { + super(platform, tickHook); + } + + public void increment() { + this.counted.incrementAndGet(); + this.total.incrementAndGet(); + } + + public int getTotalCountedTicks() { + return this.total.get(); + } + + @Override + public int getCountedTicksThisWindowAndReset() { + return this.counted.getAndSet(0); + } + } + +} diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index 2ea341f..be76bd7 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -152,6 +152,15 @@ message WorldStatistics { } } +message WindowStatistics { + int32 ticks = 1; + double cpu_process = 2; + double cpu_system = 3; + double tps = 4; + double mspt_median = 5; + double mspt_max = 6; +} + message RollingAverageValues { double mean = 1; double max = 2; diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-common/src/main/proto/spark/spark_sampler.proto index 3f30fb2..2cb08f1 100644 --- a/spark-common/src/main/proto/spark/spark_sampler.proto +++ b/spark-common/src/main/proto/spark/spark_sampler.proto @@ -13,6 +13,8 @@ message SamplerData { map class_sources = 3; // optional map method_sources = 4; // optional map line_sources = 5; // optional + repeated int32 time_windows = 6; + map time_window_statistics = 7; } message SamplerMetadata { @@ -69,16 +71,25 @@ message SamplerMetadata { message ThreadNode { string name = 1; - double time = 2; + + // replaced + reserved 2; + reserved "time"; + repeated StackTraceNode children = 3; + repeated double times = 4; } message StackTraceNode { - double time = 1; + // replaced + reserved 1; + reserved "time"; + repeated StackTraceNode children = 2; string class_name = 3; string method_name = 4; int32 parent_line_number = 5; // optional int32 line_number = 6; // optional string method_desc = 7; // optional + repeated double times = 8; } -- cgit From fafc14712fa78001b431241bd961ca429d6f74bc Mon Sep 17 00:00:00 2001 From: Luck Date: Thu, 27 Oct 2022 23:35:27 +0100 Subject: Tidy up command feedback messages --- .../java/me/lucko/spark/common/SparkPlatform.java | 23 ++++-- .../common/command/modules/SamplerModule.java | 91 +++++++++++++--------- 2 files changed, 74 insertions(+), 40 deletions(-) (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 2790a3c..4c3875c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -52,6 +52,7 @@ import me.lucko.spark.common.util.BytebinClient; import me.lucko.spark.common.util.Configuration; import me.lucko.spark.common.util.TemporaryFiles; +import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; @@ -78,7 +79,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; import static net.kyori.adventure.text.format.NamedTextColor.WHITE; import static net.kyori.adventure.text.format.TextDecoration.BOLD; -import static net.kyori.adventure.text.format.TextDecoration.UNDERLINED; /** * Abstract spark implementation used by all platforms. @@ -362,14 +362,15 @@ public class SparkPlatform { .append(text("v" + getPlugin().getVersion(), GRAY)) .build() ); + + String helpCmd = "/" + getPlugin().getCommandName() + " help"; resp.replyPrefixed(text() .color(GRAY) - .append(text("Use ")) + .append(text("Run ")) .append(text() - .content("/" + getPlugin().getCommandName() + " help") + .content(helpCmd) .color(WHITE) - .decoration(UNDERLINED, true) - .clickEvent(ClickEvent.runCommand("/" + getPlugin().getCommandName() + " help")) + .clickEvent(ClickEvent.runCommand(helpCmd)) .build() ) .append(text(" to view usage information.")) @@ -462,6 +463,18 @@ public class SparkPlatform { } } } + + sender.reply(Component.empty()); + sender.replyPrefixed(text() + .append(text("For full usage information, please go to: ")) + .append(text() + .content("https://spark.lucko.me/docs/Command-Usage") + .color(WHITE) + .clickEvent(ClickEvent.openUrl("https://spark.lucko.me/docs/Command-Usage")) + .build() + ) + .build() + ); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index c1e4981..6dbf913 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -42,6 +42,7 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; +import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; @@ -62,6 +63,7 @@ import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; +import static net.kyori.adventure.text.format.NamedTextColor.WHITE; public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; @@ -83,17 +85,11 @@ public class SamplerModule implements CommandModule { .aliases("profiler", "sampler") .argumentUsage("info", null) .argumentUsage("stop", null) - .argumentUsage("cancel", null) - .argumentUsage("interval", "interval millis") + .argumentUsage("timeout", "timeout seconds") + .argumentUsage("thread *", null) .argumentUsage("thread", "thread name") .argumentUsage("only-ticks-over", "tick length millis") - .argumentUsage("timeout", "timeout seconds") - .argumentUsage("regex --thread", "thread regex") - .argumentUsage("combine-all", null) - .argumentUsage("not-combined", null) - .argumentUsage("force-java-sampler", null) - .argumentUsage("stop --comment", "comment") - .argumentUsage("stop --save-to-file", null) + .argumentUsage("interval", "interval millis") .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { if (arguments.contains("--info") || arguments.contains("--cancel")) { @@ -120,7 +116,7 @@ public class SamplerModule implements CommandModule { private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { if (arguments.boolFlag("info")) { - profilerInfo(resp); + profilerInfo(platform, resp); return; } @@ -138,6 +134,11 @@ public class SamplerModule implements CommandModule { } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + if (this.activeSampler != null) { + profilerInfo(platform, resp); + return; + } + int timeoutSeconds = arguments.intFlag("timeout"); if (timeoutSeconds != -1 && timeoutSeconds <= 10) { resp.replyPrefixed(text("The specified timeout is not long enough for accurate results to be formed. " + @@ -194,12 +195,7 @@ public class SamplerModule implements CommandModule { } } - if (this.activeSampler != null) { - resp.replyPrefixed(text("An active profiler is already running.")); - return; - } - - resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); + resp.broadcastPrefixed(text("Starting a new profiler, please wait...")); SamplerBuilder builder = new SamplerBuilder(); builder.threadDumper(threadDumper); @@ -217,13 +213,16 @@ public class SamplerModule implements CommandModule { Sampler sampler = this.activeSampler = builder.start(platform); resp.broadcastPrefixed(text() - .append(text("Profiler now active!", GOLD)) + .append(text("Profiler is now running!", GOLD)) .append(space()) .append(text("(" + (sampler instanceof AsyncSampler ? "async" : "built-in java") + ")", DARK_GRAY)) .build() ); + if (timeoutSeconds == -1) { - resp.broadcastPrefixed(text("Use '/" + platform.getPlugin().getCommandName() + " profiler --stop' to stop profiling and upload the results.")); + resp.broadcastPrefixed(text("It will run in the background until it is stopped by an admin.")); + resp.broadcastPrefixed(text("To stop the profiler and upload the results, run:")); + resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); } else { resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } @@ -258,20 +257,28 @@ public class SamplerModule implements CommandModule { } } - private void profilerInfo(CommandResponseHandler resp) { + private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) { if (this.activeSampler == null) { - resp.replyPrefixed(text("There isn't an active profiler running.")); + resp.replyPrefixed(text("The profiler isn't running!")); + resp.replyPrefixed(text("To start a new one, run:")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler")); } else { + resp.replyPrefixed(text("Profiler is already running!", GOLD)); + + long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + resp.replyPrefixed(text("So far, it has profiled for " + runningTime + " seconds.")); + long timeout = this.activeSampler.getAutoEndTime(); if (timeout == -1) { - resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); + resp.replyPrefixed(text("To stop the profiler and upload the results, run:")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); } else { long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - resp.replyPrefixed(text("There is an active profiler currently running, due to timeout in " + timeoutDiff + " seconds.")); + resp.replyPrefixed(text("It is due to complete automatically and upload results in " + timeoutDiff + " seconds.")); } - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; - resp.replyPrefixed(text("It has been profiling for " + runningTime + " seconds so far.")); + resp.replyPrefixed(text("To cancel the profiler without uploading the results, run:")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --cancel")); } } @@ -280,7 +287,7 @@ public class SamplerModule implements CommandModule { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { close(); - resp.broadcastPrefixed(text("The active profiler has been cancelled.", GOLD)); + resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD)); } } @@ -289,11 +296,17 @@ public class SamplerModule implements CommandModule { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { this.activeSampler.stop(); - resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results...")); + + boolean saveToFile = arguments.boolFlag("save-to-file"); + if (saveToFile) { + resp.broadcastPrefixed(text("Stopping the profiler & saving results, please wait...")); + } else { + resp.broadcastPrefixed(text("Stopping the profiler & uploading results, please wait...")); + } + String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); - boolean saveToFile = arguments.boolFlag("save-to-file"); handleUpload(platform, resp, this.activeSampler, comment, mergeMode, saveToFile); this.activeSampler = null; } @@ -310,7 +323,7 @@ public class SamplerModule implements CommandModule { String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); String url = platform.getViewerUrl() + key; - resp.broadcastPrefixed(text("Profiler results:", GOLD)); + resp.broadcastPrefixed(text("Profiler stopped & upload complete!", GOLD)); resp.broadcast(text() .content(url) .color(GRAY) @@ -331,13 +344,9 @@ public class SamplerModule implements CommandModule { try { Files.write(file, output.toByteArray()); - resp.broadcastPrefixed(text() - .content("Profile written to: ") - .color(GOLD) - .append(text(file.toString(), GRAY)) - .build() - ); - resp.broadcastPrefixed(text("You can read the profile file using the viewer web-app - " + platform.getViewerUrl(), GRAY)); + resp.broadcastPrefixed(text("Profiler stopped & save complete!", GOLD)); + resp.broadcastPrefixed(text("Data has been written to: " + file)); + resp.broadcastPrefixed(text("You can view the profile file using the web app @ " + platform.getViewerUrl(), GRAY)); platform.getActivityLog().addToLog(Activity.fileActivity(resp.sender(), System.currentTimeMillis(), "Profiler", file.toString())); } catch (IOException e) { @@ -346,4 +355,16 @@ public class SamplerModule implements CommandModule { } } } + + private static Component cmdPrompt(String cmd) { + return text() + .append(text(" ")) + .append(text() + .content(cmd) + .color(WHITE) + .clickEvent(ClickEvent.runCommand(cmd)) + .build() + ) + .build(); + } } -- cgit From 76f43ab59d3839600bd9e040ff2d09199ebe778a Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 19:15:54 +0000 Subject: Limit profile length to 1 hour --- .../java/me/lucko/spark/common/SparkPlatform.java | 8 +++ .../common/command/modules/SamplerModule.java | 50 ++++++-------- .../lucko/spark/common/sampler/SamplerBuilder.java | 23 +++++-- .../spark/common/sampler/SamplerContainer.java | 76 ++++++++++++++++++++++ .../sampler/aggregator/AbstractDataAggregator.java | 6 ++ .../common/sampler/aggregator/DataAggregator.java | 8 +++ .../spark/common/sampler/async/AsyncSampler.java | 14 +++- .../spark/common/sampler/java/JavaSampler.java | 9 ++- .../spark/common/sampler/node/AbstractNode.java | 28 +++++--- .../spark/common/sampler/node/ThreadNode.java | 44 +++++++++++++ .../sampler/window/ProfilingWindowUtils.java | 38 ++++++++++- .../common/sampler/window/ProtoTimeEncoder.java | 7 +- 12 files changed, 254 insertions(+), 57 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 4c3875c..a015e42 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -45,6 +45,7 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.PlatformStatisticsProvider; +import me.lucko.spark.common.sampler.SamplerContainer; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -98,6 +99,7 @@ public class SparkPlatform { private final List commands; private final ReentrantLock commandExecuteLock = new ReentrantLock(true); private final ActivityLog activityLog; + private final SamplerContainer samplerContainer; private final TickHook tickHook; private final TickReporter tickReporter; private final TickStatistics tickStatistics; @@ -137,6 +139,8 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); + this.samplerContainer = new SamplerContainer(); + this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); this.tickStatistics = this.tickHook != null || this.tickReporter != null ? new TickStatistics() : null; @@ -229,6 +233,10 @@ public class SparkPlatform { return this.activityLog; } + public SamplerContainer getSamplerContainer() { + return this.samplerContainer; + } + public TickHook getTickHook() { return this.tickHook; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 6dbf913..00bf1a9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -68,17 +68,6 @@ import static net.kyori.adventure.text.format.NamedTextColor.WHITE; public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; - /** The sampler instance currently running, if any */ - private Sampler activeSampler = null; - - @Override - public void close() { - if (this.activeSampler != null) { - this.activeSampler.stop(); - this.activeSampler = null; - } - } - @Override public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() @@ -121,7 +110,7 @@ public class SamplerModule implements CommandModule { } if (arguments.boolFlag("cancel")) { - profilerCancel(resp); + profilerCancel(platform, resp); return; } @@ -134,7 +123,7 @@ public class SamplerModule implements CommandModule { } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (this.activeSampler != null) { + if (platform.getSamplerContainer().getActiveSampler() != null) { profilerInfo(platform, resp); return; } @@ -210,7 +199,8 @@ public class SamplerModule implements CommandModule { if (ticksOver != -1) { builder.ticksOver(ticksOver, tickHook); } - Sampler sampler = this.activeSampler = builder.start(platform); + Sampler sampler = builder.start(platform); + platform.getSamplerContainer().setActiveSampler(sampler); resp.broadcastPrefixed(text() .append(text("Profiler is now running!", GOLD)) @@ -227,7 +217,7 @@ public class SamplerModule implements CommandModule { resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } - CompletableFuture future = this.activeSampler.getFuture(); + CompletableFuture future = sampler.getFuture(); // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { @@ -238,11 +228,7 @@ public class SamplerModule implements CommandModule { }); // set activeSampler to null when complete. - future.whenCompleteAsync((s, throwable) -> { - if (sampler == this.activeSampler) { - this.activeSampler = null; - } - }); + sampler.getFuture().whenCompleteAsync((s, throwable) -> platform.getSamplerContainer().unsetActiveSampler(s)); // await the result if (timeoutSeconds != -1) { @@ -258,17 +244,18 @@ public class SamplerModule implements CommandModule { } private void profilerInfo(SparkPlatform platform, CommandResponseHandler resp) { - if (this.activeSampler == null) { + Sampler sampler = platform.getSamplerContainer().getActiveSampler(); + if (sampler == null) { resp.replyPrefixed(text("The profiler isn't running!")); resp.replyPrefixed(text("To start a new one, run:")); resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler")); } else { resp.replyPrefixed(text("Profiler is already running!", GOLD)); - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + long runningTime = (System.currentTimeMillis() - sampler.getStartTime()) / 1000L; resp.replyPrefixed(text("So far, it has profiled for " + runningTime + " seconds.")); - long timeout = this.activeSampler.getAutoEndTime(); + long timeout = sampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("To stop the profiler and upload the results, run:")); resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); @@ -282,20 +269,24 @@ public class SamplerModule implements CommandModule { } } - private void profilerCancel(CommandResponseHandler resp) { - if (this.activeSampler == null) { + private void profilerCancel(SparkPlatform platform, CommandResponseHandler resp) { + Sampler sampler = platform.getSamplerContainer().getActiveSampler(); + if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - close(); + platform.getSamplerContainer().stopActiveSampler(); resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD)); } } private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (this.activeSampler == null) { + Sampler sampler = platform.getSamplerContainer().getActiveSampler(); + + if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - this.activeSampler.stop(); + platform.getSamplerContainer().unsetActiveSampler(sampler); + sampler.stop(); boolean saveToFile = arguments.boolFlag("save-to-file"); if (saveToFile) { @@ -307,8 +298,7 @@ public class SamplerModule implements CommandModule { String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); - handleUpload(platform, resp, this.activeSampler, comment, mergeMode, saveToFile); - this.activeSampler = null; + handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 52a7387..382950a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -93,15 +93,28 @@ public class SamplerBuilder { } public Sampler start(SparkPlatform platform) { + boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null; + boolean canUseAsyncProfiler = this.useAsyncProfiler && + !(this.ignoreSleeping || this.ignoreNative) && + !(this.threadDumper instanceof ThreadDumper.Regex) && + AsyncProfilerAccess.getInstance(platform).checkSupported(platform); + + int intervalMicros = (int) (this.samplingInterval * 1000d); Sampler sampler; - if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); - } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.getInstance(platform).checkSupported(platform)) { - sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); + if (onlyTicksOverMode) { + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, + this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, + this.tickHook, this.ticksOver); + + } else if (canUseAsyncProfiler) { + sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, + this.threadGrouper, this.timeout); + } else { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, + this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java new file mode 100644 index 0000000..55913d8 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -0,0 +1,76 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import java.util.concurrent.atomic.AtomicReference; + +/** + * Container for the active sampler. + */ +public class SamplerContainer implements AutoCloseable { + + private final AtomicReference activeSampler = new AtomicReference<>(); + + /** + * Gets the active sampler, or null if a sampler is not active. + * + * @return the active sampler + */ + public Sampler getActiveSampler() { + return this.activeSampler.get(); + } + + /** + * Sets the active sampler, throwing an exception if another sampler is already active. + * + * @param sampler the sampler + */ + public void setActiveSampler(Sampler sampler) { + if (!this.activeSampler.compareAndSet(null, sampler)) { + throw new IllegalStateException("Attempted to set active sampler when another was already active!"); + } + } + + /** + * Unsets the active sampler, if the provided sampler is active. + * + * @param sampler the sampler + */ + public void unsetActiveSampler(Sampler sampler) { + this.activeSampler.compareAndSet(sampler, null); + } + + /** + * Stops the active sampler, if there is one. + */ + public void stopActiveSampler() { + Sampler sampler = this.activeSampler.getAndSet(null); + if (sampler != null) { + sampler.stop(); + } + } + + @Override + public void close() { + stopActiveSampler(); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index ad9dee4..2c003e5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.IntPredicate; /** * Abstract implementation of {@link DataAggregator}. @@ -51,6 +52,11 @@ public abstract class AbstractDataAggregator implements DataAggregator { return this.threadData.computeIfAbsent(group, ThreadNode::new); } + @Override + public void pruneData(IntPredicate timeWindowPredicate) { + this.threadData.values().removeIf(node -> node.removeTimeWindowsRecursively(timeWindowPredicate)); + } + @Override public List exportData() { List data = new ArrayList<>(this.threadData.values()); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java index 5590a96..ed33204 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/DataAggregator.java @@ -24,6 +24,7 @@ import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; import java.util.List; +import java.util.function.IntPredicate; /** * Aggregates sampling data. @@ -37,6 +38,13 @@ public interface DataAggregator { */ List exportData(); + /** + * Prunes windows of data from this aggregator if the given {@code timeWindowPredicate} returns true. + * + * @param timeWindowPredicate the predicate + */ + void pruneData(IntPredicate timeWindowPredicate); + /** * Gets metadata about the data aggregator instance. */ diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 2c9bb5f..cbc81c7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -76,15 +76,20 @@ public class AsyncSampler extends AbstractSampler { this.windowStatisticsCollector.startCountingTicks(tickHook); } - int window = ProfilingWindowUtils.unixMillisToWindow(System.currentTimeMillis()); + int window = ProfilingWindowUtils.windowNow(); AsyncProfilerJob job = this.profilerAccess.startNewProfilerJob(); job.init(this.platform, this.interval, this.threadDumper, window); job.start(); this.currentJob = job; - // rotate the sampler job every minute to put data into a new window - this.scheduler.scheduleAtFixedRate(this::rotateProfilerJob, 1, 1, TimeUnit.MINUTES); + // rotate the sampler job to put data into a new window + this.scheduler.scheduleAtFixedRate( + this::rotateProfilerJob, + ProfilingWindowUtils.WINDOW_SIZE_SECONDS, + ProfilingWindowUtils.WINDOW_SIZE_SECONDS, + TimeUnit.SECONDS + ); recordInitialGcStats(); scheduleTimeout(); @@ -117,6 +122,9 @@ public class AsyncSampler extends AbstractSampler { // aggregate the output of the previous job previousJob.aggregate(this.dataAggregator); + + // prune data older than the history size + this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(window)); } } catch (Throwable e) { e.printStackTrace(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 8c96fd3..6aad5e3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -34,8 +34,6 @@ import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import org.checkerframework.checker.units.qual.A; - import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; @@ -145,10 +143,15 @@ public class JavaSampler extends AbstractSampler implements Runnable { JavaSampler.this.dataAggregator.insertData(threadInfo, this.window); } - // if we have just stepped over into a new window, collect statistics for the previous window + // if we have just stepped over into a new window... int previousWindow = JavaSampler.this.lastWindow.getAndSet(this.window); if (previousWindow != 0 && previousWindow != this.window) { + + // collect statistics for the previous window JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow); + + // prune data older than the history size + JavaSampler.this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(this.window)); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java index e6f6cf5..2e4b055 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/AbstractNode.java @@ -20,17 +20,18 @@ package me.lucko.spark.common.sampler.node; -import me.lucko.spark.common.sampler.async.jfr.Dictionary; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.LongAdder; -import java.util.stream.IntStream; +import java.util.function.IntPredicate; /** * Encapsulates a timed node in the sampling stack. @@ -43,9 +44,9 @@ public abstract class AbstractNode { private final Map children = new ConcurrentHashMap<>(); /** The accumulated sample time for this node, measured in microseconds */ - // long key = the window (effectively System.currentTimeMillis() / 60_000) + // Integer key = the window (effectively System.currentTimeMillis() / 60_000) // LongAdder value = accumulated time in microseconds - private final Dictionary times = new Dictionary<>(); + private final Map times = new HashMap<>(); /** * Gets the time accumulator for a given window @@ -67,10 +68,18 @@ public abstract class AbstractNode { * * @return the time windows */ - public IntStream getTimeWindows() { - IntStream.Builder keys = IntStream.builder(); - this.times.forEach((key, value) -> keys.add((int) key)); - return keys.build(); + public Set getTimeWindows() { + return this.times.keySet(); + } + + /** + * Removes time windows from this node if they pass the given {@code predicate} test. + * + * @param predicate the predicate + * @return true if any time windows were removed + */ + public boolean removeTimeWindows(IntPredicate predicate) { + return this.times.keySet().removeIf(predicate::test); } /** @@ -100,7 +109,7 @@ public abstract class AbstractNode { * @param other the other node */ protected void merge(AbstractNode other) { - other.times.forEach((key, value) -> getTimeAccumulator((int) key).add(value.longValue())); + other.times.forEach((key, value) -> getTimeAccumulator(key).add(value.longValue())); for (Map.Entry child : other.children.entrySet()) { resolveChild(child.getKey()).merge(child.getValue()); } @@ -127,7 +136,6 @@ public abstract class AbstractNode { list.add(child); } - //list.sort(null); return list; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 9faece6..5035046 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -25,9 +25,13 @@ import me.lucko.spark.common.util.IndexedListBuilder; import me.lucko.spark.proto.SparkSamplerProtos; import java.util.ArrayDeque; +import java.util.Collection; import java.util.Deque; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Queue; +import java.util.function.IntPredicate; /** * The root of a sampling stack for a given thread / thread group. @@ -89,6 +93,46 @@ public final class ThreadNode extends AbstractNode { } } + /** + * Removes time windows that match the given {@code predicate}. + * + * @param predicate the predicate to use to test the time windows + * @return true if this node is now empty + */ + public boolean removeTimeWindowsRecursively(IntPredicate predicate) { + Queue queue = new ArrayDeque<>(); + queue.add(this); + + while (!queue.isEmpty()) { + AbstractNode node = queue.remove(); + Collection children = node.getChildren(); + + boolean needToProcessChildren = false; + + for (Iterator it = children.iterator(); it.hasNext(); ) { + StackTraceNode child = it.next(); + + boolean windowsWereRemoved = child.removeTimeWindows(predicate); + boolean childIsNowEmpty = child.getTimeWindows().isEmpty(); + + if (childIsNowEmpty) { + it.remove(); + continue; + } + + if (windowsWereRemoved) { + needToProcessChildren = true; + } + } + + if (needToProcessChildren) { + queue.addAll(children); + } + } + + return getTimeWindows().isEmpty(); + } + public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode, ProtoTimeEncoder timeEncoder) { SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() .setName(getThreadLabel()); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java index 109adb3..be6f08a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProfilingWindowUtils.java @@ -20,9 +20,25 @@ package me.lucko.spark.common.sampler.window; +import me.lucko.spark.common.sampler.aggregator.DataAggregator; + +import java.util.function.IntPredicate; + public enum ProfilingWindowUtils { ; + /** + * The size/duration of a profiling window in seconds. + * (1 window = 1 minute) + */ + public static final int WINDOW_SIZE_SECONDS = 60; + + /** + * The number of windows to record in continuous profiling before data is dropped. + * (60 windows * 1 minute = 1 hour of profiling data) + */ + public static final int HISTORY_SIZE = Integer.getInteger("spark.continuousProfilingHistorySize", 60); + /** * Gets the profiling window for the given time in unix-millis. * @@ -30,7 +46,25 @@ public enum ProfilingWindowUtils { * @return the window */ public static int unixMillisToWindow(long time) { - // one window per minute - return (int) (time / 60_000); + return (int) (time / (WINDOW_SIZE_SECONDS * 1000L)); + } + + /** + * Gets the window at the current time. + * + * @return the window + */ + public static int windowNow() { + return unixMillisToWindow(System.currentTimeMillis()); + } + + /** + * Gets a prune predicate that can be passed to {@link DataAggregator#pruneData(IntPredicate)}. + * + * @return the prune predicate + */ + public static IntPredicate keepHistoryBefore(int currentWindow) { + // windows that were earlier than (currentWindow minus history size) should be pruned + return window -> window < (currentWindow - HISTORY_SIZE); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java index edb2309..03da075 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/ProtoTimeEncoder.java @@ -21,7 +21,6 @@ package me.lucko.spark.common.sampler.window; import me.lucko.spark.common.sampler.async.jfr.Dictionary; -import me.lucko.spark.common.sampler.node.AbstractNode; import me.lucko.spark.common.sampler.node.ThreadNode; import java.util.HashMap; @@ -42,7 +41,7 @@ public class ProtoTimeEncoder { public ProtoTimeEncoder(List sourceData) { // get an array of all keys that show up in the source data this.keys = sourceData.stream() - .map(AbstractNode::getTimeWindows) + .map(n -> n.getTimeWindows().stream().mapToInt(i -> i)) .reduce(IntStream.empty(), IntStream::concat) .distinct() .sorted() @@ -70,14 +69,14 @@ public class ProtoTimeEncoder { * @param times a dictionary of times (unix-time millis -> duration in microseconds) * @return the times encoded as a double array */ - public double[] encode(Dictionary times) { + public double[] encode(Map times) { // construct an array of values - length needs to exactly match the // number of keys, even if some values are zero. double[] array = new double[this.keys.length]; times.forEach((key, value) -> { // get the index for the given key - Integer idx = this.keysToIndex.get((int) key); + Integer idx = this.keysToIndex.get(key); if (idx == null) { throw new RuntimeException("No index for key " + key + " in " + this.keysToIndex.keySet()); } -- cgit From f2d77d875f32f107987c93da1f90529fc6812444 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 21:24:57 +0000 Subject: Background profiler --- .../java/me/lucko/spark/common/SparkPlatform.java | 82 ++++++++++----- .../me/lucko/spark/common/command/Arguments.java | 11 +- .../me/lucko/spark/common/command/Command.java | 58 ++++++++++- .../common/command/modules/GcMonitoringModule.java | 22 +--- .../common/command/modules/SamplerModule.java | 115 +++++++++++++++------ .../spark/common/sampler/AbstractSampler.java | 19 ++-- .../me/lucko/spark/common/sampler/Sampler.java | 7 ++ .../lucko/spark/common/sampler/SamplerBuilder.java | 28 ++--- .../spark/common/sampler/SamplerContainer.java | 9 ++ .../spark/common/sampler/SamplerSettings.java | 61 +++++++++++ .../spark/common/sampler/async/AsyncSampler.java | 14 +-- .../spark/common/sampler/java/JavaSampler.java | 20 ++-- .../spark/common/sampler/node/ThreadNode.java | 1 + .../sampler/window/WindowStatisticsCollector.java | 5 + .../me/lucko/spark/common/util/Configuration.java | 10 ++ .../me/lucko/spark/common/util/FormatUtil.java | 20 ++++ 16 files changed, 362 insertions(+), 120 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index a015e42..5461ed4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -45,7 +45,10 @@ import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.platform.PlatformStatisticsProvider; +import me.lucko.spark.common.sampler.Sampler; +import me.lucko.spark.common.sampler.SamplerBuilder; import me.lucko.spark.common.sampler.SamplerContainer; +import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -64,6 +67,7 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -74,7 +78,6 @@ import java.util.stream.Collectors; import static net.kyori.adventure.text.Component.space; import static net.kyori.adventure.text.Component.text; -import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; import static net.kyori.adventure.text.format.NamedTextColor.GRAY; import static net.kyori.adventure.text.format.NamedTextColor.RED; @@ -139,7 +142,7 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); - this.samplerContainer = new SamplerContainer(); + this.samplerContainer = new SamplerContainer(this.configuration.getBoolean("backgroundProfiler", true)); this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); @@ -179,6 +182,16 @@ public class SparkPlatform { SparkApi api = new SparkApi(this); this.plugin.registerApi(api); SparkApi.register(api); + + if (this.samplerContainer.isBackgroundProfilerEnabled()) { + this.plugin.log(Level.INFO, "Starting background profiler..."); + try { + startBackgroundProfiler(); + this.plugin.log(Level.INFO, "... done!"); + } catch (Exception e) { + e.printStackTrace(); + } + } } public void disable() { @@ -196,6 +209,8 @@ public class SparkPlatform { module.close(); } + this.samplerContainer.close(); + SparkApi.unregister(); this.temporaryFiles.deleteTemporaryFiles(); @@ -269,6 +284,17 @@ public class SparkPlatform { return this.serverNormalOperationStartTime; } + public void startBackgroundProfiler() { + Sampler sampler = new SamplerBuilder() + .background(true) + .threadDumper(this.plugin.getDefaultThreadDumper()) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(this.configuration.getInteger("backgroundProfilerInterval", 10)) + .start(this); + + this.samplerContainer.setActiveSampler(sampler); + } + public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { @@ -394,7 +420,7 @@ public class SparkPlatform { if (command.aliases().contains(alias)) { resp.setCommandPrimaryAlias(command.primaryAlias()); try { - command.executor().execute(this, sender, resp, new Arguments(rawArgs)); + command.executor().execute(this, sender, resp, new Arguments(rawArgs, command.allowSubCommand())); } catch (Arguments.ParseException e) { resp.replyPrefixed(text(e.getMessage(), RED)); } @@ -442,32 +468,38 @@ public class SparkPlatform { ); for (Command command : commands) { String usage = "/" + getPlugin().getCommandName() + " " + command.primaryAlias(); - ClickEvent clickEvent = ClickEvent.suggestCommand(usage); - sender.reply(text() - .append(text(">", GOLD, BOLD)) - .append(space()) - .append(text().content(usage).color(GRAY).clickEvent(clickEvent).build()) - .build() - ); - for (Command.ArgumentInfo arg : command.arguments()) { - if (arg.requiresParameter()) { + + if (command.allowSubCommand()) { + Map> argumentsBySubCommand = command.arguments().stream() + .collect(Collectors.groupingBy(Command.ArgumentInfo::subCommandName, LinkedHashMap::new, Collectors.toList())); + + argumentsBySubCommand.forEach((subCommand, arguments) -> { + String subCommandUsage = usage + " " + subCommand; + sender.reply(text() - .content(" ") - .append(text("[", DARK_GRAY)) - .append(text("--" + arg.argumentName(), GRAY)) + .append(text(">", GOLD, BOLD)) .append(space()) - .append(text("<" + arg.parameterDescription() + ">", DARK_GRAY)) - .append(text("]", DARK_GRAY)) - .build() - ); - } else { - sender.reply(text() - .content(" ") - .append(text("[", DARK_GRAY)) - .append(text("--" + arg.argumentName(), GRAY)) - .append(text("]", DARK_GRAY)) + .append(text().content(subCommandUsage).color(GRAY).clickEvent(ClickEvent.suggestCommand(subCommandUsage)).build()) .build() ); + + for (Command.ArgumentInfo arg : arguments) { + if (arg.argumentName().isEmpty()) { + continue; + } + sender.reply(arg.toComponent(" ")); + } + }); + } else { + sender.reply(text() + .append(text(">", GOLD, BOLD)) + .append(space()) + .append(text().content(usage).color(GRAY).clickEvent(ClickEvent.suggestCommand(usage)).build()) + .build() + ); + + for (Command.ArgumentInfo arg : command.arguments()) { + sender.reply(arg.toComponent(" ")); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java index 17c49e2..ad8c777 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Arguments.java @@ -38,8 +38,9 @@ public class Arguments { private final List rawArgs; private final SetMultimap parsedArgs; + private String parsedSubCommand = null; - public Arguments(List rawArgs) { + public Arguments(List rawArgs, boolean allowSubCommand) { this.rawArgs = rawArgs; this.parsedArgs = HashMultimap.create(); @@ -52,7 +53,9 @@ public class Arguments { Matcher matcher = FLAG_REGEX.matcher(arg); boolean matches = matcher.matches(); - if (flag == null || matches) { + if (i == 0 && allowSubCommand && !matches) { + this.parsedSubCommand = arg; + } else if (flag == null || matches) { if (!matches) { throw new ParseException("Expected flag at position " + i + " but got '" + arg + "' instead!"); } @@ -80,6 +83,10 @@ public class Arguments { return this.rawArgs; } + public String subCommand() { + return this.parsedSubCommand; + } + public int intFlag(String key) { Iterator it = this.parsedArgs.get(key).iterator(); if (it.hasNext()) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java index dad15e6..c6871a9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/Command.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/Command.java @@ -25,10 +25,17 @@ import com.google.common.collect.ImmutableList; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; +import net.kyori.adventure.text.Component; + import java.util.Collections; import java.util.List; import java.util.Objects; +import static net.kyori.adventure.text.Component.space; +import static net.kyori.adventure.text.Component.text; +import static net.kyori.adventure.text.format.NamedTextColor.DARK_GRAY; +import static net.kyori.adventure.text.format.NamedTextColor.GRAY; + public class Command { public static Builder builder() { @@ -39,12 +46,14 @@ public class Command { private final List arguments; private final Executor executor; private final TabCompleter tabCompleter; + private final boolean allowSubCommand; - private Command(List aliases, List arguments, Executor executor, TabCompleter tabCompleter) { + private Command(List aliases, List arguments, Executor executor, TabCompleter tabCompleter, boolean allowSubCommand) { this.aliases = aliases; this.arguments = arguments; this.executor = executor; this.tabCompleter = tabCompleter; + this.allowSubCommand = allowSubCommand; } public List aliases() { @@ -67,11 +76,16 @@ public class Command { return this.aliases.get(0); } + public boolean allowSubCommand() { + return this.allowSubCommand; + } + public static final class Builder { private final ImmutableList.Builder aliases = ImmutableList.builder(); private final ImmutableList.Builder arguments = ImmutableList.builder(); private Executor executor = null; private TabCompleter tabCompleter = null; + private boolean allowSubCommand = false; Builder() { @@ -82,8 +96,13 @@ public class Command { return this; } + public Builder argumentUsage(String subCommandName, String argumentName, String parameterDescription) { + this.arguments.add(new ArgumentInfo(subCommandName, argumentName, parameterDescription)); + return this; + } + public Builder argumentUsage(String argumentName, String parameterDescription) { - this.arguments.add(new ArgumentInfo(argumentName, parameterDescription)); + this.arguments.add(new ArgumentInfo("", argumentName, parameterDescription)); return this; } @@ -97,6 +116,11 @@ public class Command { return this; } + public Builder allowSubCommand(boolean allowSubCommand) { + this.allowSubCommand = allowSubCommand; + return this; + } + public Command build() { List aliases = this.aliases.build(); if (aliases.isEmpty()) { @@ -108,7 +132,7 @@ public class Command { if (this.tabCompleter == null) { this.tabCompleter = TabCompleter.empty(); } - return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter); + return new Command(aliases, this.arguments.build(), this.executor, this.tabCompleter, this.allowSubCommand); } } @@ -127,14 +151,20 @@ public class Command { } public static final class ArgumentInfo { + private final String subCommandName; private final String argumentName; private final String parameterDescription; - public ArgumentInfo(String argumentName, String parameterDescription) { + public ArgumentInfo(String subCommandName, String argumentName, String parameterDescription) { + this.subCommandName = subCommandName; this.argumentName = argumentName; this.parameterDescription = parameterDescription; } + public String subCommandName() { + return this.subCommandName; + } + public String argumentName() { return this.argumentName; } @@ -146,6 +176,26 @@ public class Command { public boolean requiresParameter() { return this.parameterDescription != null; } + + public Component toComponent(String padding) { + if (requiresParameter()) { + return text() + .content(padding) + .append(text("[", DARK_GRAY)) + .append(text("--" + argumentName(), GRAY)) + .append(space()) + .append(text("<" + parameterDescription() + ">", DARK_GRAY)) + .append(text("]", DARK_GRAY)) + .build(); + } else { + return text() + .content(padding) + .append(text("[", DARK_GRAY)) + .append(text("--" + argumentName(), GRAY)) + .append(text("]", DARK_GRAY)) + .build(); + } + } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java index 2ce83fd..a2da0a0 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/GcMonitoringModule.java @@ -123,7 +123,7 @@ public class GcMonitoringModule implements CommandModule { ); report.add(text() .content(" ") - .append(text(formatTime((long) averageFrequency), WHITE)) + .append(text(FormatUtil.formatSeconds((long) averageFrequency / 1000), WHITE)) .append(text(" avg frequency", GRAY)) .build() ); @@ -153,26 +153,6 @@ public class GcMonitoringModule implements CommandModule { ); } - private static String formatTime(long millis) { - if (millis <= 0) { - return "0s"; - } - - long second = millis / 1000; - long minute = second / 60; - second = second % 60; - - StringBuilder sb = new StringBuilder(); - if (minute != 0) { - sb.append(minute).append("m "); - } - if (second != 0) { - sb.append(second).append("s "); - } - - return sb.toString().trim(); - } - private static class ReportingGcMonitor extends GarbageCollectionMonitor implements GarbageCollectionMonitor.Listener { private final SparkPlatform platform; private final CommandResponseHandler resp; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 00bf1a9..6a76748 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -39,6 +39,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; +import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; @@ -72,31 +73,36 @@ public class SamplerModule implements CommandModule { public void registerCommands(Consumer consumer) { consumer.accept(Command.builder() .aliases("profiler", "sampler") - .argumentUsage("info", null) - .argumentUsage("stop", null) - .argumentUsage("timeout", "timeout seconds") - .argumentUsage("thread *", null) - .argumentUsage("thread", "thread name") - .argumentUsage("only-ticks-over", "tick length millis") - .argumentUsage("interval", "interval millis") + .allowSubCommand(true) + .argumentUsage("info", "", null) + .argumentUsage("start", "timeout", "timeout seconds") + .argumentUsage("start", "thread *", null) + .argumentUsage("start", "thread", "thread name") + .argumentUsage("start", "only-ticks-over", "tick length millis") + .argumentUsage("start", "interval", "interval millis") + .argumentUsage("stop", "", null) + .argumentUsage("cancel", "", null) .executor(this::profiler) .tabCompleter((platform, sender, arguments) -> { - if (arguments.contains("--info") || arguments.contains("--cancel")) { - return Collections.emptyList(); + List opts = Collections.emptyList(); + + if (arguments.size() > 0) { + String subCommand = arguments.get(0); + if (subCommand.equals("stop") || subCommand.equals("upload")) { + opts = new ArrayList<>(Arrays.asList("--comment", "--save-to-file")); + opts.removeAll(arguments); + } + if (subCommand.equals("start")) { + opts = new ArrayList<>(Arrays.asList("--timeout", "--regex", "--combine-all", + "--not-combined", "--interval", "--only-ticks-over", "--force-java-sampler")); + opts.removeAll(arguments); + opts.add("--thread"); // allowed multiple times + } } - if (arguments.contains("--stop") || arguments.contains("--upload")) { - return TabCompleter.completeForOpts(arguments, "--comment", "--save-to-file"); - } - - List opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel", - "--timeout", "--regex", "--combine-all", "--not-combined", "--interval", - "--only-ticks-over", "--force-java-sampler")); - opts.removeAll(arguments); - opts.add("--thread"); // allowed multiple times - return TabCompleter.create() - .from(0, CompletionSupplier.startsWith(opts)) + .at(0, CompletionSupplier.startsWith(Arrays.asList("info", "start", "stop", "cancel"))) + .from(1, CompletionSupplier.startsWith(opts)) .complete(arguments); }) .build() @@ -104,28 +110,48 @@ public class SamplerModule implements CommandModule { } private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (arguments.boolFlag("info")) { + String subCommand = arguments.subCommand() == null ? "" : arguments.subCommand(); + + if (subCommand.equals("info") || arguments.boolFlag("info")) { profilerInfo(platform, resp); return; } - if (arguments.boolFlag("cancel")) { + if (subCommand.equals("cancel") || arguments.boolFlag("cancel")) { profilerCancel(platform, resp); return; } - if (arguments.boolFlag("stop") || arguments.boolFlag("upload")) { + if (subCommand.equals("stop") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) { profilerStop(platform, sender, resp, arguments); return; } - profilerStart(platform, sender, resp, arguments); + if (subCommand.equals("start") || arguments.boolFlag("start")) { + profilerStart(platform, sender, resp, arguments); + return; + } + + if (arguments.raw().isEmpty()) { + profilerInfo(platform, resp); + } else { + profilerStart(platform, sender, resp, arguments); + } } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (platform.getSamplerContainer().getActiveSampler() != null) { - profilerInfo(platform, resp); - return; + Sampler previousSampler = platform.getSamplerContainer().getActiveSampler(); + if (previousSampler != null) { + if (previousSampler.isRunningInBackground()) { + // there is a background profiler running - stop that first + resp.replyPrefixed(text("Stopping the background profiler before starting... please wait")); + previousSampler.stop(); + platform.getSamplerContainer().unsetActiveSampler(previousSampler); + } else { + // there is a non-background profiler running - tell the user + profilerInfo(platform, resp); + return; + } } int timeoutSeconds = arguments.intFlag("timeout"); @@ -212,9 +238,9 @@ public class SamplerModule implements CommandModule { if (timeoutSeconds == -1) { resp.broadcastPrefixed(text("It will run in the background until it is stopped by an admin.")); resp.broadcastPrefixed(text("To stop the profiler and upload the results, run:")); - resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); + resp.broadcastPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop")); } else { - resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); + resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + FormatUtil.formatSeconds(timeoutSeconds) + ".")); } CompletableFuture future = sampler.getFuture(); @@ -248,24 +274,34 @@ public class SamplerModule implements CommandModule { if (sampler == null) { resp.replyPrefixed(text("The profiler isn't running!")); resp.replyPrefixed(text("To start a new one, run:")); - resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler start")); } else { resp.replyPrefixed(text("Profiler is already running!", GOLD)); long runningTime = (System.currentTimeMillis() - sampler.getStartTime()) / 1000L; - resp.replyPrefixed(text("So far, it has profiled for " + runningTime + " seconds.")); + + if (sampler.isRunningInBackground()) { + resp.replyPrefixed(text() + .append(text("It was started ")) + .append(text("automatically", WHITE)) + .append(text(" when spark enabled and has been running in the background for " + FormatUtil.formatSeconds(runningTime) + ".")) + .build() + ); + } else { + resp.replyPrefixed(text("So far, it has profiled for " + FormatUtil.formatSeconds(runningTime) + ".")); + } long timeout = sampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("To stop the profiler and upload the results, run:")); - resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --stop")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler stop")); } else { long timeoutDiff = (timeout - System.currentTimeMillis()) / 1000L; - resp.replyPrefixed(text("It is due to complete automatically and upload results in " + timeoutDiff + " seconds.")); + resp.replyPrefixed(text("It is due to complete automatically and upload results in " + FormatUtil.formatSeconds(timeoutDiff) + ".")); } resp.replyPrefixed(text("To cancel the profiler without uploading the results, run:")); - resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler --cancel")); + resp.replyPrefixed(cmdPrompt("/" + platform.getPlugin().getCommandName() + " profiler cancel")); } } @@ -299,6 +335,17 @@ public class SamplerModule implements CommandModule { MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile); + + // if the previous sampler was running in the background, create a new one + if (platform.getSamplerContainer().isBackgroundProfilerEnabled()) { + platform.startBackgroundProfiler(); + + resp.broadcastPrefixed(text() + .append(text("Restarted the background profiler. ")) + .append(text("(If you don't want this to happen, run: /" + platform.getPlugin().getCommandName() + " profiler cancel)", DARK_GRAY)) + .build() + ); + } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index c650738..feefd66 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -32,8 +32,6 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.source.SourceMetadata; import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; -import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.proto.SparkProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; @@ -64,6 +62,9 @@ public abstract class AbstractSampler implements Sampler { /** The unix timestamp (in millis) when this sampler should automatically complete. */ protected final long autoEndTime; // -1 for nothing + /** If the sampler is running in the background */ + protected boolean background; + /** Collects statistics for each window in the sample */ protected final WindowStatisticsCollector windowStatisticsCollector; @@ -73,11 +74,12 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + protected AbstractSampler(SparkPlatform platform, SamplerSettings settings) { this.platform = platform; - this.interval = interval; - this.threadDumper = threadDumper; - this.autoEndTime = autoEndTime; + this.interval = settings.interval(); + this.threadDumper = settings.threadDumper(); + this.autoEndTime = settings.autoEndTime(); + this.background = settings.runningInBackground(); this.windowStatisticsCollector = new WindowStatisticsCollector(platform); } @@ -94,6 +96,11 @@ public abstract class AbstractSampler implements Sampler { return this.autoEndTime; } + @Override + public boolean isRunningInBackground() { + return this.background; + } + @Override public CompletableFuture getFuture() { return this.future; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index e06cba6..5d2026d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -57,6 +57,13 @@ public interface Sampler { */ long getAutoEndTime(); + /** + * If this sampler is running in the background. (wasn't started by a specific user) + * + * @return true if the sampler is running in the background + */ + boolean isRunningInBackground(); + /** * Gets a future to encapsulate the completion of the sampler * diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 382950a..ec635ef 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -38,7 +38,8 @@ public class SamplerBuilder { private boolean ignoreSleeping = false; private boolean ignoreNative = false; private boolean useAsyncProfiler = true; - private long timeout = -1; + private long autoEndTime = -1; + private boolean background = false; private ThreadDumper threadDumper = ThreadDumper.ALL; private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; @@ -57,7 +58,12 @@ public class SamplerBuilder { if (timeout <= 0) { throw new IllegalArgumentException("timeout > 0"); } - this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); + this.autoEndTime = System.currentTimeMillis() + unit.toMillis(timeout); + return this; + } + + public SamplerBuilder background(boolean background) { + this.background = background; return this; } @@ -95,26 +101,22 @@ public class SamplerBuilder { public Sampler start(SparkPlatform platform) { boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null; boolean canUseAsyncProfiler = this.useAsyncProfiler && + !onlyTicksOverMode && !(this.ignoreSleeping || this.ignoreNative) && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.getInstance(platform).checkSupported(platform); int intervalMicros = (int) (this.samplingInterval * 1000d); + SamplerSettings settings = new SamplerSettings(intervalMicros, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background); Sampler sampler; - if (onlyTicksOverMode) { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, - this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, - this.tickHook, this.ticksOver); - - } else if (canUseAsyncProfiler) { - sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, - this.threadGrouper, this.timeout); - + if (canUseAsyncProfiler) { + sampler = new AsyncSampler(platform, settings); + } else if (onlyTicksOverMode) { + sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); } else { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, - this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java index 55913d8..f56dee5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -28,6 +28,11 @@ import java.util.concurrent.atomic.AtomicReference; public class SamplerContainer implements AutoCloseable { private final AtomicReference activeSampler = new AtomicReference<>(); + private final boolean backgroundProfilerEnabled; + + public SamplerContainer(boolean backgroundProfilerEnabled) { + this.backgroundProfilerEnabled = backgroundProfilerEnabled; + } /** * Gets the active sampler, or null if a sampler is not active. @@ -68,6 +73,10 @@ public class SamplerContainer implements AutoCloseable { } } + public boolean isBackgroundProfilerEnabled() { + return this.backgroundProfilerEnabled; + } + @Override public void close() { stopActiveSampler(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java new file mode 100644 index 0000000..6e55a43 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +/** + * Base settings for all samplers + */ +public class SamplerSettings { + + private final int interval; + private final ThreadDumper threadDumper; + private final ThreadGrouper threadGrouper; + private final long autoEndTime; + private final boolean runningInBackground; + + public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground) { + this.interval = interval; + this.threadDumper = threadDumper; + this.threadGrouper = threadGrouper; + this.autoEndTime = autoEndTime; + this.runningInBackground = runningInBackground; + } + + public int interval() { + return this.interval; + } + + public ThreadDumper threadDumper() { + return this.threadDumper; + } + + public ThreadGrouper threadGrouper() { + return this.threadGrouper; + } + + public long autoEndTime() { + return this.autoEndTime; + } + + public boolean runningInBackground() { + return this.runningInBackground; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index cbc81c7..d6cfd4f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -25,8 +25,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.SamplerSettings; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; @@ -36,6 +35,7 @@ import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.IntPredicate; /** * A sampler implementation using async-profiler. @@ -55,10 +55,10 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for scheduling and management */ private ScheduledExecutorService scheduler; - public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(platform, interval, threadDumper, endTime); + public AsyncSampler(SparkPlatform platform, SamplerSettings settings) { + super(platform, settings); this.profilerAccess = AsyncProfilerAccess.getInstance(platform); - this.dataAggregator = new AsyncDataAggregator(threadGrouper); + this.dataAggregator = new AsyncDataAggregator(settings.threadGrouper()); this.scheduler = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder().setNameFormat("spark-asyncsampler-worker-thread").build() ); @@ -124,7 +124,9 @@ public class AsyncSampler extends AbstractSampler { previousJob.aggregate(this.dataAggregator); // prune data older than the history size - this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(window)); + IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(window); + this.dataAggregator.pruneData(predicate); + this.windowStatisticsCollector.pruneStatistics(predicate); } } catch (Throwable e) { e.printStackTrace(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 6aad5e3..95c3508 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -25,8 +25,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.SamplerSettings; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.sampler.window.ProfilingWindowUtils; @@ -42,6 +41,7 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.IntPredicate; /** * A sampler implementation using Java (WarmRoast). @@ -66,14 +66,14 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** The last window that was profiled */ private final AtomicInteger lastWindow = new AtomicInteger(); - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(platform, interval, threadDumper, endTime); - this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); + public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative) { + super(platform, settings); + this.dataAggregator = new SimpleDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative); } - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(platform, interval, threadDumper, endTime); - this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); + public JavaSampler(SparkPlatform platform, SamplerSettings settings, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(platform, settings); + this.dataAggregator = new TickedDataAggregator(this.workerPool, settings.threadGrouper(), settings.interval(), ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @Override @@ -151,7 +151,9 @@ public class JavaSampler extends AbstractSampler implements Runnable { JavaSampler.this.windowStatisticsCollector.measureNow(previousWindow); // prune data older than the history size - JavaSampler.this.dataAggregator.pruneData(ProfilingWindowUtils.keepHistoryBefore(this.window)); + IntPredicate predicate = ProfilingWindowUtils.keepHistoryBefore(this.window); + JavaSampler.this.dataAggregator.pruneData(predicate); + JavaSampler.this.windowStatisticsCollector.pruneStatistics(predicate); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index 5035046..37ff359 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -130,6 +130,7 @@ public final class ThreadNode extends AbstractNode { } } + removeTimeWindows(predicate); return getTimeWindows().isEmpty(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java index 47f739d..7da62fa 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -30,6 +30,7 @@ import me.lucko.spark.proto.SparkProtos; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.IntPredicate; /** * Collects statistics for each profiling window. @@ -116,6 +117,10 @@ public class WindowStatisticsCollector { } } + public void pruneStatistics(IntPredicate predicate) { + this.stats.keySet().removeIf(predicate::test); + } + public Map export() { return this.stats; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java index 7588645..ce63878 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java @@ -67,4 +67,14 @@ public final class Configuration { return val.isBoolean() ? val.getAsBoolean() : def; } + public int getInteger(String path, int def) { + JsonElement el = this.root.get(path); + if (el == null || !el.isJsonPrimitive()) { + return def; + } + + JsonPrimitive val = el.getAsJsonPrimitive(); + return val.isBoolean() ? val.getAsInt() : def; + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java index c4a3d66..1ee3b0f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java @@ -62,4 +62,24 @@ public enum FormatUtil { .append(Component.text(unit)) .build(); } + + public static String formatSeconds(long seconds) { + if (seconds <= 0) { + return "0s"; + } + + long second = seconds; + long minute = second / 60; + second = second % 60; + + StringBuilder sb = new StringBuilder(); + if (minute != 0) { + sb.append(minute).append("m "); + } + if (second != 0) { + sb.append(second).append("s "); + } + + return sb.toString().trim(); + } } -- cgit From b9f0e49ed17a7c32f36f31141c02529359944d03 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 13 Nov 2022 22:42:49 +0000 Subject: Add upload subcommand as per documentation oops --- .../main/java/me/lucko/spark/common/command/modules/SamplerModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 6a76748..00cd4fa 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -122,7 +122,7 @@ public class SamplerModule implements CommandModule { return; } - if (subCommand.equals("stop") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) { + if (subCommand.equals("stop") || subCommand.equals("upload") || arguments.boolFlag("stop") || arguments.boolFlag("upload")) { profilerStop(platform, sender, resp, arguments); return; } -- cgit From 65f9460a1a27e930b3749525766fd44d57b65300 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 26 Nov 2022 23:00:53 +0000 Subject: Include player/entity/chunk counts in window statistics --- .../spark/bukkit/BukkitWorldInfoProvider.java | 44 ++++++++++- .../common/command/modules/SamplerModule.java | 6 +- .../platform/PlatformStatisticsProvider.java | 7 +- .../platform/world/AsyncWorldInfoProvider.java | 90 ++++++++++++++++++++++ .../common/platform/world/WorldInfoProvider.java | 57 ++++++++++++-- .../platform/world/WorldStatisticsProvider.java | 37 +-------- .../spark/common/sampler/AbstractSampler.java | 2 +- .../me/lucko/spark/common/sampler/Sampler.java | 2 +- .../spark/common/sampler/SamplerContainer.java | 6 +- .../common/sampler/async/AsyncProfilerJob.java | 6 +- .../spark/common/sampler/async/AsyncSampler.java | 14 ++-- .../spark/common/sampler/java/JavaSampler.java | 14 ++-- .../sampler/window/WindowStatisticsCollector.java | 15 ++++ spark-common/src/main/proto/spark/spark.proto | 6 ++ .../spark/fabric/FabricWorldInfoProvider.java | 42 +++++++++- .../fabric/mixin/ClientEntityManagerAccessor.java | 4 + .../fabric/mixin/ServerEntityManagerAccessor.java | 4 + .../lucko/spark/forge/ForgeWorldInfoProvider.java | 42 +++++++++- .../main/resources/META-INF/accesstransformer.cfg | 4 +- .../spark/sponge/Sponge7WorldInfoProvider.java | 21 ++++- .../spark/sponge/Sponge8WorldInfoProvider.java | 21 ++++- 21 files changed, 366 insertions(+), 78 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java index 79c2715..8f876cf 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitWorldInfoProvider.java @@ -34,6 +34,21 @@ import java.util.ArrayList; import java.util.List; public class BukkitWorldInfoProvider implements WorldInfoProvider { + private static final boolean SUPPORTS_PAPER_COUNT_METHODS; + + static { + boolean supportsPaperCountMethods = false; + try { + World.class.getMethod("getEntityCount"); + World.class.getMethod("getTileEntityCount"); + World.class.getMethod("getChunkCount"); + supportsPaperCountMethods = true; + } catch (Exception e) { + // ignored + } + SUPPORTS_PAPER_COUNT_METHODS = supportsPaperCountMethods; + } + private final Server server; public BukkitWorldInfoProvider(Server server) { @@ -41,8 +56,33 @@ public class BukkitWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getOnlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (World world : this.server.getWorlds()) { + if (SUPPORTS_PAPER_COUNT_METHODS) { + entities += world.getEntityCount(); + tileEntities += world.getTileEntityCount(); + chunks += world.getChunkCount(); + } else { + entities += world.getEntities().size(); + Chunk[] chunksArray = world.getLoadedChunks(); + for (Chunk chunk : chunksArray) { + tileEntities += chunk.getTileEntities().length; + } + chunks += chunksArray.length; + } + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (World world : this.server.getWorlds()) { Chunk[] chunks = world.getLoadedChunks(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 00cd4fa..f576eac 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -145,7 +145,7 @@ public class SamplerModule implements CommandModule { if (previousSampler.isRunningInBackground()) { // there is a background profiler running - stop that first resp.replyPrefixed(text("Stopping the background profiler before starting... please wait")); - previousSampler.stop(); + previousSampler.stop(true); platform.getSamplerContainer().unsetActiveSampler(previousSampler); } else { // there is a non-background profiler running - tell the user @@ -310,7 +310,7 @@ public class SamplerModule implements CommandModule { if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - platform.getSamplerContainer().stopActiveSampler(); + platform.getSamplerContainer().stopActiveSampler(true); resp.broadcastPrefixed(text("Profiler has been cancelled.", GOLD)); } } @@ -322,7 +322,7 @@ public class SamplerModule implements CommandModule { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { platform.getSamplerContainer().unsetActiveSampler(sampler); - sampler.stop(); + sampler.stop(false); boolean saveToFile = arguments.boolFlag("save-to-file"); if (saveToFile) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index 1eb9753..fc7e78a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -31,7 +31,7 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.os.OperatingSystemInfo; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; -import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider; import me.lucko.spark.common.platform.world.WorldStatisticsProvider; import me.lucko.spark.proto.SparkProtos.PlatformStatistics; import me.lucko.spark.proto.SparkProtos.SystemStatistics; @@ -188,8 +188,9 @@ public class PlatformStatisticsProvider { } try { - WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider(); - WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo); + WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider( + new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()) + ); WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics(); if (worldStatistics != null) { builder.setWorld(worldStatistics); diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java new file mode 100644 index 0000000..82cddef --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AsyncWorldInfoProvider.java @@ -0,0 +1,90 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.platform.world; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.SparkPlugin; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Function; +import java.util.logging.Level; + +/** + * Async-friendly wrapper around {@link WorldInfoProvider}. + */ +public class AsyncWorldInfoProvider { + private static final int TIMEOUT_SECONDS = 5; + + private final SparkPlatform platform; + private final WorldInfoProvider provider; + + public AsyncWorldInfoProvider(SparkPlatform platform, WorldInfoProvider provider) { + this.platform = platform; + this.provider = provider == WorldInfoProvider.NO_OP ? null : provider; + } + + private CompletableFuture async(Function function) { + if (this.provider == null) { + return null; + } + + if (this.provider.mustCallSync()) { + SparkPlugin plugin = this.platform.getPlugin(); + return CompletableFuture.supplyAsync(() -> function.apply(this.provider), plugin::executeSync); + } else { + return CompletableFuture.completedFuture(function.apply(this.provider)); + } + } + + private T get(CompletableFuture future) { + if (future == null) { + return null; + } + + try { + return future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } catch (TimeoutException e) { + this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); + return null; + } + } + + public CompletableFuture pollCounts() { + return async(WorldInfoProvider::pollCounts); + } + + public CompletableFuture>> pollChunks() { + return async(WorldInfoProvider::pollChunks); + } + + public WorldInfoProvider.CountsResult getCounts() { + return get(pollCounts()); + } + + public WorldInfoProvider.ChunksResult> getChunks() { + return get(pollChunks()); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java index 9494816..7fb581d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldInfoProvider.java @@ -29,20 +29,37 @@ import java.util.Map; */ public interface WorldInfoProvider { - WorldInfoProvider NO_OP = () -> null; + WorldInfoProvider NO_OP = new WorldInfoProvider() { + @Override + public CountsResult pollCounts() { + return null; + } + + @Override + public ChunksResult> pollChunks() { + return null; + } + }; + + /** + * Polls for counts. + * + * @return the counts + */ + CountsResult pollCounts(); /** - * Polls for information. + * Polls for chunk information. * - * @return the information + * @return the chunk information */ - Result> poll(); + ChunksResult> pollChunks(); default boolean mustCallSync() { return true; } - final class Result { + final class ChunksResult> { private final Map> worlds = new HashMap<>(); public void put(String worldName, List chunks) { @@ -54,4 +71,34 @@ public interface WorldInfoProvider { } } + final class CountsResult { + private final int players; + private final int entities; + private final int tileEntities; + private final int chunks; + + public CountsResult(int players, int entities, int tileEntities, int chunks) { + this.players = players; + this.entities = entities; + this.tileEntities = tileEntities; + this.chunks = chunks; + } + + public int players() { + return this.players; + } + + public int entities() { + return this.entities; + } + + public int tileEntities() { + return this.tileEntities; + } + + public int chunks() { + return this.chunks; + } + } + } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java index 80c35a6..7e63222 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/WorldStatisticsProvider.java @@ -20,8 +20,6 @@ package me.lucko.spark.common.platform.world; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.util.ArrayList; @@ -30,46 +28,17 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; -import java.util.logging.Level; public class WorldStatisticsProvider { - private final SparkPlatform platform; - private final WorldInfoProvider provider; + private final AsyncWorldInfoProvider provider; - public WorldStatisticsProvider(SparkPlatform platform, WorldInfoProvider provider) { - this.platform = platform; + public WorldStatisticsProvider(AsyncWorldInfoProvider provider) { this.provider = provider; } public WorldStatistics getWorldStatistics() { - if (this.provider == WorldInfoProvider.NO_OP) { - return null; - } - - CompletableFuture>> future; - - if (this.provider.mustCallSync()) { - SparkPlugin plugin = this.platform.getPlugin(); - future = CompletableFuture.supplyAsync(this.provider::poll, plugin::executeSync); - } else { - future = CompletableFuture.completedFuture(this.provider.poll()); - } - - WorldInfoProvider.Result> result; - try { - result = future.get(5, TimeUnit.SECONDS); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } catch (TimeoutException e) { - this.platform.getPlugin().log(Level.WARNING, "Timed out waiting for world statistics"); - return null; - } - + WorldInfoProvider.ChunksResult> result = provider.getChunks(); if (result == null) { return null; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 59e873c..e324fd3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -120,7 +120,7 @@ public abstract class AbstractSampler implements Sampler { } @Override - public void stop() { + public void stop(boolean cancelled) { this.windowStatisticsCollector.stop(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 5d2026d..36a63f1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -41,7 +41,7 @@ public interface Sampler { /** * Stops the sampler. */ - void stop(); + void stop(boolean cancelled); /** * Gets the time when the sampler started (unix timestamp in millis) diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java index f56dee5..d55909c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -66,10 +66,10 @@ public class SamplerContainer implements AutoCloseable { /** * Stops the active sampler, if there is one. */ - public void stopActiveSampler() { + public void stopActiveSampler(boolean cancelled) { Sampler sampler = this.activeSampler.getAndSet(null); if (sampler != null) { - sampler.stop(); + sampler.stop(cancelled); } } @@ -79,7 +79,7 @@ public class SamplerContainer implements AutoCloseable { @Override public void close() { - stopActiveSampler(); + stopActiveSampler(true); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java index db1808c..d74b75f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerJob.java @@ -224,13 +224,15 @@ public class AsyncProfilerJob { } } - // delete the output file after reading + deleteOutputFile(); + } + + public void deleteOutputFile() { try { Files.deleteIfExists(this.outputFile); } catch (IOException e) { // ignore } - } private void readSegments(JfrReader reader, Predicate threadFilter, AsyncDataAggregator dataAggregator, int window) throws IOException { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index f2e7191..178f055 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -144,7 +144,7 @@ public class AsyncSampler extends AbstractSampler { } this.scheduler.schedule(() -> { - stop(); + stop(false); this.future.complete(this); }, delay, TimeUnit.MILLISECONDS); } @@ -153,13 +153,17 @@ public class AsyncSampler extends AbstractSampler { * Stops the profiler. */ @Override - public void stop() { - super.stop(); + public void stop(boolean cancelled) { + super.stop(cancelled); synchronized (this.currentJobMutex) { this.currentJob.stop(); - this.windowStatisticsCollector.measureNow(this.currentJob.getWindow()); - this.currentJob.aggregate(this.dataAggregator); + if (!cancelled) { + this.windowStatisticsCollector.measureNow(this.currentJob.getWindow()); + this.currentJob.aggregate(this.dataAggregator); + } else { + this.currentJob.deleteOutputFile(); + } this.currentJob = null; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 42a457d..72a37e8 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -94,13 +94,15 @@ public class JavaSampler extends AbstractSampler implements Runnable { } @Override - public void stop() { - super.stop(); + public void stop(boolean cancelled) { + super.stop(cancelled); this.task.cancel(false); - // collect statistics for the final window - this.windowStatisticsCollector.measureNow(this.lastWindow.get()); + if (!cancelled) { + // collect statistics for the final window + this.windowStatisticsCollector.measureNow(this.lastWindow.get()); + } } @Override @@ -111,7 +113,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { long time = System.currentTimeMillis(); if (this.autoEndTime != -1 && this.autoEndTime <= time) { - stop(); + stop(false); this.future.complete(this); return; } @@ -120,7 +122,7 @@ public class JavaSampler extends AbstractSampler implements Runnable { ThreadInfo[] threadDumps = this.threadDumper.dumpThreads(this.threadBean); this.workerPool.execute(new InsertDataTask(threadDumps, window)); } catch (Throwable t) { - stop(); + stop(false); this.future.completeExceptionally(t); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java index 7da62fa..ce65013 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -23,6 +23,8 @@ package me.lucko.spark.common.sampler.window; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.world.AsyncWorldInfoProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.RollingAverage; import me.lucko.spark.proto.SparkProtos; @@ -152,6 +154,19 @@ public class WindowStatisticsCollector { builder.setCpuProcess(CpuMonitor.processLoad1MinAvg()); builder.setCpuSystem(CpuMonitor.systemLoad1MinAvg()); + try { + AsyncWorldInfoProvider worldInfoProvider = new AsyncWorldInfoProvider(this.platform, this.platform.getPlugin().createWorldInfoProvider()); + WorldInfoProvider.CountsResult counts = worldInfoProvider.getCounts(); + if (counts != null) { + builder.setPlayers(counts.players()); + builder.setEntities(counts.entities()); + builder.setTileEntities(counts.tileEntities()); + builder.setChunks(counts.chunks()); + } + } catch (Exception e) { + e.printStackTrace(); + } + return builder.build(); } diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index be76bd7..f61e585 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -159,6 +159,12 @@ message WindowStatistics { double tps = 4; double mspt_median = 5; double mspt_max = 6; + + // world + int32 players = 7; + int32 entities = 8; + int32 tile_entities = 9; + int32 chunks = 10; } message RollingAverageValues { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java index f2f7b96..156db89 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/FabricWorldInfoProvider.java @@ -40,6 +40,7 @@ import net.minecraft.server.MinecraftServer; import net.minecraft.server.world.ServerEntityManager; import net.minecraft.server.world.ServerWorld; import net.minecraft.util.math.ChunkPos; +import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.EntityTrackingSection; import net.minecraft.world.entity.SectionedEntityCache; @@ -72,8 +73,25 @@ public abstract class FabricWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getCurrentPlayerCount(); + int entities = 0; + int chunks = 0; + + for (ServerWorld world : this.server.getWorlds()) { + ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ServerEntityManagerAccessor) entityManager).getIndex(); + + entities += entityIndex.size(); + chunks += world.getChunkManager().getLoadedChunkCount(); + } + + return new CountsResult(players, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.getWorlds()) { ServerEntityManager entityManager = ((ServerWorldAccessor) world).getEntityManager(); @@ -95,8 +113,24 @@ public abstract class FabricWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + ClientWorld world = this.client.world; + if (world == null) { + return null; + } + + ClientEntityManager entityManager = ((ClientWorldAccessor) world).getEntityManager(); + EntityIndex entityIndex = ((ClientEntityManagerAccessor) entityManager).getIndex(); + + int entities = entityIndex.size(); + int chunks = world.getChunkManager().getLoadedChunkCount(); + + return new CountsResult(-1, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); ClientWorld world = this.client.world; if (world == null) { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java index 88c9521..994c9a3 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ClientEntityManagerAccessor.java @@ -22,6 +22,7 @@ package me.lucko.spark.fabric.mixin; import net.minecraft.client.world.ClientEntityManager; import net.minecraft.entity.Entity; +import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; import org.spongepowered.asm.mixin.Mixin; @@ -33,4 +34,7 @@ public interface ClientEntityManagerAccessor { @Accessor SectionedEntityCache getCache(); + @Accessor + EntityIndex getIndex(); + } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java index 160a12b..2c67502 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/mixin/ServerEntityManagerAccessor.java @@ -22,6 +22,7 @@ package me.lucko.spark.fabric.mixin; import net.minecraft.entity.Entity; import net.minecraft.server.world.ServerEntityManager; +import net.minecraft.world.entity.EntityIndex; import net.minecraft.world.entity.SectionedEntityCache; import org.spongepowered.asm.mixin.Mixin; @@ -33,4 +34,7 @@ public interface ServerEntityManagerAccessor { @Accessor SectionedEntityCache getCache(); + @Accessor + EntityIndex getIndex(); + } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java index 1d65d6a..4750c08 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/ForgeWorldInfoProvider.java @@ -34,6 +34,7 @@ import net.minecraft.server.level.ServerLevel; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.EntityType; import net.minecraft.world.level.ChunkPos; +import net.minecraft.world.level.entity.EntityLookup; import net.minecraft.world.level.entity.EntitySection; import net.minecraft.world.level.entity.EntitySectionStorage; import net.minecraft.world.level.entity.PersistentEntitySectionManager; @@ -68,8 +69,25 @@ public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getPlayerCount(); + int entities = 0; + int chunks = 0; + + for (ServerLevel level : this.server.getAllLevels()) { + PersistentEntitySectionManager entityManager = level.entityManager; + EntityLookup entityIndex = entityManager.visibleEntityStorage; + + entities += entityIndex.count(); + chunks += level.getChunkSource().getLoadedChunksCount(); + } + + return new CountsResult(players, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerLevel level : this.server.getAllLevels()) { PersistentEntitySectionManager entityManager = level.entityManager; @@ -91,8 +109,24 @@ public abstract class ForgeWorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + ClientLevel level = this.client.level; + if (level == null) { + return null; + } + + TransientEntitySectionManager entityManager = level.entityStorage; + EntityLookup entityIndex = entityManager.entityStorage; + + int entities = entityIndex.count(); + int chunks = level.getChunkSource().getLoadedChunksCount(); + + return new CountsResult(-1, entities, -1, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); ClientLevel level = this.client.level; if (level == null) { diff --git a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg index 39e9c1a..2699a0e 100644 --- a/spark-forge/src/main/resources/META-INF/accesstransformer.cfg +++ b/spark-forge/src/main/resources/META-INF/accesstransformer.cfg @@ -1,5 +1,7 @@ public net.minecraft.server.level.ServerLevel f_143244_ # entityManager public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157495_ # sectionStorage +public net.minecraft.world.level.entity.PersistentEntitySectionManager f_157494_ # visibleEntityStorage public net.minecraft.client.multiplayer.ClientLevel f_171631_ # entityStorage public net.minecraft.world.level.entity.TransientEntitySectionManager f_157638_ # sectionStorage -public net.minecraft.client.Minecraft f_91018_ # gameThread \ No newline at end of file +public net.minecraft.world.level.entity.TransientEntitySectionManager f_157637_ # entityStorage +public net.minecraft.client.Minecraft f_91018_ # gameThread diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java index fa6fa6b..df58028 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7WorldInfoProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.sponge; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import me.lucko.spark.common.platform.world.AbstractChunkInfo; @@ -44,8 +45,24 @@ public class Sponge7WorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.getOnlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (World world : this.server.getWorlds()) { + entities += world.getEntities().size(); + tileEntities += world.getTileEntities().size(); + chunks += Iterables.size(world.getLoadedChunks()); + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (World world : this.server.getWorlds()) { List chunks = Lists.newArrayList(world.getLoadedChunks()); diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java index bff4d6e..69b4515 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8WorldInfoProvider.java @@ -20,6 +20,7 @@ package me.lucko.spark.sponge; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import me.lucko.spark.common.platform.world.AbstractChunkInfo; @@ -45,8 +46,24 @@ public class Sponge8WorldInfoProvider implements WorldInfoProvider { } @Override - public Result poll() { - Result data = new Result<>(); + public CountsResult pollCounts() { + int players = this.server.onlinePlayers().size(); + int entities = 0; + int tileEntities = 0; + int chunks = 0; + + for (ServerWorld world : this.server.worldManager().worlds()) { + entities += world.entities().size(); + tileEntities += world.blockEntities().size(); + chunks += Iterables.size(world.loadedChunks()); + } + + return new CountsResult(players, entities, tileEntities, chunks); + } + + @Override + public ChunksResult pollChunks() { + ChunksResult data = new ChunksResult<>(); for (ServerWorld world : this.server.worldManager().worlds()) { List chunks = Lists.newArrayList(world.loadedChunks()); -- cgit From fc1e371d67551e9548491e9bf50534d91ce5d170 Mon Sep 17 00:00:00 2001 From: Luck Date: Sun, 27 Nov 2022 23:38:21 +0000 Subject: Temporary solution to async-profiler JVM crashing issues (#271, #273, #274) --- .../java/me/lucko/spark/common/SparkPlatform.java | 36 ++----- .../common/command/modules/SamplerModule.java | 4 +- .../common/sampler/BackgroundSamplerManager.java | 116 +++++++++++++++++++++ .../spark/common/sampler/SamplerContainer.java | 9 -- .../me/lucko/spark/common/util/Configuration.java | 60 +++++++++-- 5 files changed, 179 insertions(+), 46 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java (limited to 'spark-common/src/main/java/me/lucko/spark/common/command') diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 2574443..dae04ff 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -44,12 +44,9 @@ import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.monitor.tick.TickStatistics; -import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.PlatformStatisticsProvider; -import me.lucko.spark.common.sampler.Sampler; -import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.sampler.BackgroundSamplerManager; import me.lucko.spark.common.sampler.SamplerContainer; -import me.lucko.spark.common.sampler.ThreadGrouper; import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -104,6 +101,7 @@ public class SparkPlatform { private final ReentrantLock commandExecuteLock = new ReentrantLock(true); private final ActivityLog activityLog; private final SamplerContainer samplerContainer; + private final BackgroundSamplerManager backgroundSamplerManager; private final TickHook tickHook; private final TickReporter tickReporter; private final TickStatistics tickStatistics; @@ -143,10 +141,8 @@ public class SparkPlatform { this.activityLog = new ActivityLog(plugin.getPluginDirectory().resolve("activity.json")); this.activityLog.load(); - this.samplerContainer = new SamplerContainer(this.configuration.getBoolean( - "backgroundProfiler", - plugin.getPlatformInfo().getType() == PlatformInfo.Type.SERVER - )); + this.samplerContainer = new SamplerContainer(); + this.backgroundSamplerManager = new BackgroundSamplerManager(this, this.configuration); this.tickHook = plugin.createTickHook(); this.tickReporter = plugin.createTickReporter(); @@ -187,14 +183,7 @@ public class SparkPlatform { this.plugin.registerApi(api); SparkApi.register(api); - if (this.samplerContainer.isBackgroundProfilerEnabled()) { - this.plugin.log(Level.INFO, "Starting background profiler..."); - try { - startBackgroundProfiler(); - } catch (Throwable e) { - e.printStackTrace(); - } - } + this.backgroundSamplerManager.initialise(); } public void disable() { @@ -255,6 +244,10 @@ public class SparkPlatform { return this.samplerContainer; } + public BackgroundSamplerManager getBackgroundSamplerManager() { + return this.backgroundSamplerManager; + } + public TickHook getTickHook() { return this.tickHook; } @@ -287,17 +280,6 @@ public class SparkPlatform { return this.serverNormalOperationStartTime; } - public void startBackgroundProfiler() { - Sampler sampler = new SamplerBuilder() - .background(true) - .threadDumper(this.plugin.getDefaultThreadDumper()) - .threadGrouper(ThreadGrouper.BY_POOL) - .samplingInterval(this.configuration.getInteger("backgroundProfilerInterval", 10)) - .start(this); - - this.samplerContainer.setActiveSampler(sampler); - } - public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index f576eac..cd00f0d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -337,9 +337,7 @@ public class SamplerModule implements CommandModule { handleUpload(platform, resp, sampler, comment, mergeMode, saveToFile); // if the previous sampler was running in the background, create a new one - if (platform.getSamplerContainer().isBackgroundProfilerEnabled()) { - platform.startBackgroundProfiler(); - + if (platform.getBackgroundSamplerManager().restartBackgroundSampler()) { resp.broadcastPrefixed(text() .append(text("Restarted the background profiler. ")) .append(text("(If you don't want this to happen, run: /" + platform.getPlugin().getCommandName() + " profiler cancel)", DARK_GRAY)) diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java new file mode 100644 index 0000000..d655739 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -0,0 +1,116 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.Configuration; + +import java.util.logging.Level; + +public class BackgroundSamplerManager { + + private static final String OPTION_ENABLED = "backgroundProfiler"; + private static final String OPTION_ENGINE = "backgroundProfilerEngine"; + private static final String OPTION_INTERVAL = "backgroundProfilerInterval"; + + private static final String MARKER_FAILED = "_marker_background_profiler_failed"; + + private final SparkPlatform platform; + private final Configuration configuration; + private final boolean enabled; + + public BackgroundSamplerManager(SparkPlatform platform, Configuration configuration) { + this.platform = platform; + this.configuration = configuration; + this.enabled = this.configuration.getBoolean( + OPTION_ENABLED, + this.platform.getPlugin().getPlatformInfo().getType() == PlatformInfo.Type.SERVER + ); + } + + public void initialise() { + if (!this.enabled) { + return; + } + + // are we enabling the background profiler by default for the first time? + boolean didEnableByDefault = false; + if (!this.configuration.contains(OPTION_ENABLED)) { + this.configuration.setBoolean(OPTION_ENABLED, true); + didEnableByDefault = true; + } + + // did the background profiler fail to start on the previous attempt? + if (this.configuration.getBoolean(MARKER_FAILED, false)) { + this.platform.getPlugin().log(Level.WARNING, "It seems the background profiler failed to start when spark was last enabled. Sorry about that!"); + this.platform.getPlugin().log(Level.WARNING, "In the future, spark will try to use the built-in Java profiling engine instead."); + + this.configuration.remove(MARKER_FAILED); + this.configuration.setString(OPTION_ENGINE, "java"); + this.configuration.save(); + } + + this.platform.getPlugin().log(Level.INFO, "Starting background profiler..."); + + if (didEnableByDefault) { + // set the failed marker and save before we try to start the profiler, + // then remove the marker afterwards if everything goes ok! + this.configuration.setBoolean(MARKER_FAILED, true); + this.configuration.save(); + } + + try { + startSampler(); + + if (didEnableByDefault) { + this.configuration.remove(MARKER_FAILED); + this.configuration.save(); + } + + } catch (Throwable e) { + e.printStackTrace(); + } + } + + public boolean restartBackgroundSampler() { + if (this.enabled) { + startSampler(); + return true; + } + return false; + } + + private void startSampler() { + boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java"); + + Sampler sampler = new SamplerBuilder() + .background(true) + .threadDumper(this.platform.getPlugin().getDefaultThreadDumper()) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(this.configuration.getInteger(OPTION_INTERVAL, 10)) + .forceJavaSampler(forceJavaEngine) + .start(this.platform); + + this.platform.getSamplerContainer().setActiveSampler(sampler); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java index d55909c..15b1029 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -28,11 +28,6 @@ import java.util.concurrent.atomic.AtomicReference; public class SamplerContainer implements AutoCloseable { private final AtomicReference activeSampler = new AtomicReference<>(); - private final boolean backgroundProfilerEnabled; - - public SamplerContainer(boolean backgroundProfilerEnabled) { - this.backgroundProfilerEnabled = backgroundProfilerEnabled; - } /** * Gets the active sampler, or null if a sampler is not active. @@ -73,10 +68,6 @@ public class SamplerContainer implements AutoCloseable { } } - public boolean isBackgroundProfilerEnabled() { - return this.backgroundProfilerEnabled; - } - @Override public void close() { stopActiveSampler(true); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java index ce63878..32f3bc6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/Configuration.java @@ -20,32 +20,58 @@ package me.lucko.spark.common.util; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; public final class Configuration { - private static final JsonParser PARSER = new JsonParser(); + private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); - private final JsonObject root; + private final Path file; + private JsonObject root; public Configuration(Path file) { + this.file = file; + load(); + } + + public void load() { JsonObject root = null; - if (Files.exists(file)) { - try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { - root = PARSER.parse(reader).getAsJsonObject(); + if (Files.exists(this.file)) { + try (BufferedReader reader = Files.newBufferedReader(this.file, StandardCharsets.UTF_8)) { + root = GSON.fromJson(reader, JsonObject.class); } catch (IOException e) { e.printStackTrace(); } } - this.root = root != null ? root : new JsonObject(); + if (root == null) { + root = new JsonObject(); + root.addProperty("_header", "spark configuration file - https://spark.lucko.me/docs/Configuration"); + } + this.root = root; + } + + public void save() { + try { + Files.createDirectories(this.file.getParent()); + } catch (IOException e) { + // ignore + } + + try (BufferedWriter writer = Files.newBufferedWriter(this.file, StandardCharsets.UTF_8)) { + GSON.toJson(this.root, writer); + } catch (IOException e) { + e.printStackTrace(); + } } public String getString(String path, String def) { @@ -77,4 +103,24 @@ public final class Configuration { return val.isBoolean() ? val.getAsInt() : def; } + public void setString(String path, String value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setBoolean(String path, boolean value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public void setInteger(String path, int value) { + this.root.add(path, new JsonPrimitive(value)); + } + + public boolean contains(String path) { + return this.root.has(path); + } + + public void remove(String path) { + this.root.remove(path); + } + } -- cgit