diff options
| author | lucko <git@lucko.me> | 2022-12-27 09:17:54 +0000 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2022-12-27 09:17:54 +0000 |
| commit | e5b278047ccb7bc6b301d787474c51d162911867 (patch) | |
| tree | 11bba64e8f28ce8b83adc05252b75f17e2ccbf6a /spark-common/src/main/java/me/lucko/spark/common/sampler | |
| parent | 4a16a1a2f4eb09f706b4a541e3d31618de29420b (diff) | |
| parent | 1075665def4a41cf0064255a6da1d1a652f5d473 (diff) | |
| download | spark-e5b278047ccb7bc6b301d787474c51d162911867.tar.gz spark-e5b278047ccb7bc6b301d787474c51d162911867.tar.bz2 spark-e5b278047ccb7bc6b301d787474c51d162911867.zip | |
Merge pull request #284 from embeddedt/forge-1.7.10
Align 1.7.10 with master
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common/sampler')
29 files changed, 2185 insertions, 397 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index ce466a0..e324fd3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -23,16 +23,22 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; +import me.lucko.spark.common.platform.MetadataProvider; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.SourceMetadata; +import me.lucko.spark.common.sampler.window.ProtoTimeEncoder; +import me.lucko.spark.common.sampler.window.WindowStatisticsCollector; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import java.util.Collection; import java.util.Comparator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -41,6 +47,9 @@ import java.util.concurrent.CompletableFuture; */ public abstract class AbstractSampler implements Sampler { + /** The spark platform instance */ + protected final SparkPlatform platform; + /** The interval to wait between sampling, in microseconds */ protected final int interval; @@ -51,7 +60,13 @@ public abstract class AbstractSampler implements Sampler { protected long startTime = -1; /** The unix timestamp (in millis) when this sampler should automatically complete. */ - protected final long endTime; // -1 for nothing + protected final long autoEndTime; // -1 for nothing + + /** If the sampler is running in the background */ + protected boolean background; + + /** Collects statistics for each window in the sample */ + protected final WindowStatisticsCollector windowStatisticsCollector; /** A future to encapsulate the completion of this sampler instance */ protected final CompletableFuture<Sampler> future = new CompletableFuture<>(); @@ -59,10 +74,13 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map<String, GarbageCollectorStatistics> initialGcStats; - protected AbstractSampler(int interval, ThreadDumper threadDumper, long endTime) { - this.interval = interval; - this.threadDumper = threadDumper; - this.endTime = endTime; + protected AbstractSampler(SparkPlatform platform, SamplerSettings settings) { + this.platform = platform; + this.interval = settings.interval(); + this.threadDumper = settings.threadDumper(); + this.autoEndTime = settings.autoEndTime(); + this.background = settings.runningInBackground(); + this.windowStatisticsCollector = new WindowStatisticsCollector(platform); } @Override @@ -74,8 +92,13 @@ public abstract class AbstractSampler implements Sampler { } @Override - public long getEndTime() { - return this.endTime; + public long getAutoEndTime() { + return this.autoEndTime; + } + + @Override + public boolean isRunningInBackground() { + return this.background; } @Override @@ -91,6 +114,16 @@ public abstract class AbstractSampler implements Sampler { return this.initialGcStats; } + @Override + public void start() { + this.startTime = System.currentTimeMillis(); + } + + @Override + public void stop(boolean cancelled) { + this.windowStatisticsCollector.stop(); + } + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) @@ -105,6 +138,11 @@ public abstract class AbstractSampler implements Sampler { metadata.setComment(comment); } + int totalTicks = this.windowStatisticsCollector.getTotalTicks(); + if (totalTicks != -1) { + metadata.setNumberOfTicks(totalTicks); + } + try { metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(getInitialGcStats())); } catch (Exception e) { @@ -119,27 +157,60 @@ public abstract class AbstractSampler implements Sampler { try { ServerConfigProvider serverConfigProvider = platform.getPlugin().createServerConfigProvider(); - metadata.putAllServerConfigurations(serverConfigProvider.exportServerConfigurations()); + if (serverConfigProvider != null) { + metadata.putAllServerConfigurations(serverConfigProvider.export()); + } } catch (Exception e) { e.printStackTrace(); } + try { + MetadataProvider extraMetadataProvider = platform.getPlugin().createExtraMetadataProvider(); + if (extraMetadataProvider != null) { + metadata.putAllExtraPlatformMetadata(extraMetadataProvider.export()); + } + } catch (Exception e) { + e.printStackTrace(); + } + + Collection<SourceMetadata> knownSources = platform.getPlugin().getKnownSources(); + for (SourceMetadata source : knownSources) { + metadata.putSources(source.getName().toLowerCase(Locale.ROOT), source.toProto()); + } + proto.setMetadata(metadata); } - protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, Comparator<ThreadNode> outputOrder, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAggregator, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { List<ThreadNode> data = dataAggregator.exportData(); - data.sort(outputOrder); + data.sort(Comparator.comparing(ThreadNode::getThreadLabel)); ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(classSourceLookup); + ProtoTimeEncoder timeEncoder = new ProtoTimeEncoder(data); + int[] timeWindows = timeEncoder.getKeys(); + for (int timeWindow : timeWindows) { + proto.addTimeWindows(timeWindow); + } + + this.windowStatisticsCollector.ensureHasStatisticsForAllWindows(timeWindows); + proto.putAllTimeWindowStatistics(this.windowStatisticsCollector.export()); + for (ThreadNode entry : data) { - proto.addThreads(entry.toProto(mergeMode)); + proto.addThreads(entry.toProto(mergeMode, timeEncoder)); classSourceVisitor.visit(entry); } - if (classSourceVisitor.hasMappings()) { - proto.putAllClassSources(classSourceVisitor.getMapping()); + if (classSourceVisitor.hasClassSourceMappings()) { + proto.putAllClassSources(classSourceVisitor.getClassSourceMapping()); + } + + if (classSourceVisitor.hasMethodSourceMappings()) { + proto.putAllMethodSources(classSourceVisitor.getMethodSourceMapping()); + } + + if (classSourceVisitor.hasLineSourceMappings()) { + proto.putAllLineSources(classSourceVisitor.getLineSourceMapping()); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java new file mode 100644 index 0000000..7e3b6b4 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -0,0 +1,115 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.Configuration; + +import java.util.logging.Level; + +public class BackgroundSamplerManager { + + private static final String OPTION_ENABLED = "backgroundProfiler"; + private static final String OPTION_ENGINE = "backgroundProfilerEngine"; + private static final String OPTION_INTERVAL = "backgroundProfilerInterval"; + + private static final String MARKER_FAILED = "_marker_background_profiler_failed"; + + private final SparkPlatform platform; + private final Configuration configuration; + private final boolean enabled; + + public BackgroundSamplerManager(SparkPlatform platform, Configuration configuration) { + this.platform = platform; + this.configuration = configuration; + + PlatformInfo.Type type = this.platform.getPlugin().getPlatformInfo().getType(); + this.enabled = type != PlatformInfo.Type.CLIENT && this.configuration.getBoolean(OPTION_ENABLED, type == PlatformInfo.Type.SERVER); + } + + public void initialise() { + if (!this.enabled) { + return; + } + + // are we enabling the background profiler by default for the first time? + boolean didEnableByDefault = false; + if (!this.configuration.contains(OPTION_ENABLED)) { + this.configuration.setBoolean(OPTION_ENABLED, true); + didEnableByDefault = true; + } + + // did the background profiler fail to start on the previous attempt? + if (this.configuration.getBoolean(MARKER_FAILED, false)) { + this.platform.getPlugin().log(Level.WARNING, "It seems the background profiler failed to start when spark was last enabled. Sorry about that!"); + this.platform.getPlugin().log(Level.WARNING, "In the future, spark will try to use the built-in Java profiling engine instead."); + + this.configuration.remove(MARKER_FAILED); + this.configuration.setString(OPTION_ENGINE, "java"); + this.configuration.save(); + } + + this.platform.getPlugin().log(Level.INFO, "Starting background profiler..."); + + if (didEnableByDefault) { + // set the failed marker and save before we try to start the profiler, + // then remove the marker afterwards if everything goes ok! + this.configuration.setBoolean(MARKER_FAILED, true); + this.configuration.save(); + } + + try { + startSampler(); + + if (didEnableByDefault) { + this.configuration.remove(MARKER_FAILED); + this.configuration.save(); + } + + } catch (Throwable e) { + e.printStackTrace(); + } + } + + public boolean restartBackgroundSampler() { + if (this.enabled) { + startSampler(); + return true; + } + return false; + } + + private void startSampler() { + boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java"); + + Sampler sampler = new SamplerBuilder() + .background(true) + .threadDumper(this.platform.getPlugin().getDefaultThreadDumper()) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(this.configuration.getInteger(OPTION_INTERVAL, 10)) + .forceJavaSampler(forceJavaEngine) + .start(this.platform); + + this.platform.getSamplerContainer().setActiveSampler(sampler); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 845043f..36a63f1 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -23,11 +23,9 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.sampler.node.ThreadNode; -import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; -import java.util.Comparator; import java.util.concurrent.CompletableFuture; /** @@ -43,7 +41,7 @@ public interface Sampler { /** * Stops the sampler. */ - void stop(); + void stop(boolean cancelled); /** * Gets the time when the sampler started (unix timestamp in millis) @@ -57,7 +55,14 @@ public interface Sampler { * * @return the end time, or -1 if undefined */ - long getEndTime(); + long getAutoEndTime(); + + /** + * If this sampler is running in the background. (wasn't started by a specific user) + * + * @return true if the sampler is running in the background + */ + boolean isRunningInBackground(); /** * Gets a future to encapsulate the completion of the sampler @@ -67,6 +72,6 @@ public interface Sampler { CompletableFuture<Sampler> getFuture(); // Methods used to export the sampler data to the web viewer. - SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator<ThreadNode> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); + SamplerData toProto(SparkPlatform platform, CommandSender creator, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88cf018..ec635ef 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -38,7 +38,8 @@ public class SamplerBuilder { private boolean ignoreSleeping = false; private boolean ignoreNative = false; private boolean useAsyncProfiler = true; - private long timeout = -1; + private long autoEndTime = -1; + private boolean background = false; private ThreadDumper threadDumper = ThreadDumper.ALL; private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; @@ -57,7 +58,12 @@ public class SamplerBuilder { if (timeout <= 0) { throw new IllegalArgumentException("timeout > 0"); } - this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); + this.autoEndTime = System.currentTimeMillis() + unit.toMillis(timeout); + return this; + } + + public SamplerBuilder background(boolean background) { + this.background = background; return this; } @@ -93,15 +99,24 @@ public class SamplerBuilder { } public Sampler start(SparkPlatform platform) { + boolean onlyTicksOverMode = this.ticksOver != -1 && this.tickHook != null; + boolean canUseAsyncProfiler = this.useAsyncProfiler && + !onlyTicksOverMode && + !(this.ignoreSleeping || this.ignoreNative) && + !(this.threadDumper instanceof ThreadDumper.Regex) && + AsyncProfilerAccess.getInstance(platform).checkSupported(platform); + + int intervalMicros = (int) (this.samplingInterval * 1000d); + SamplerSettings settings = new SamplerSettings(intervalMicros, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background); Sampler sampler; - if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); - } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); + if (canUseAsyncProfiler) { + sampler = new AsyncSampler(platform, settings); + } else if (onlyTicksOverMode) { + sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); } else { - sampler = new JavaSampler(intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); + sampler = new JavaSampler(platform, settings, this.ignoreSleeping, this.ignoreNative); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java new file mode 100644 index 0000000..15b1029 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerContainer.java @@ -0,0 +1,76 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.sampler; + +import java.util.concurrent.atomic.AtomicReference; + +/** + * Container for the active sampler. + */ +public class SamplerContainer implements AutoCloseable { + + private final AtomicReference<Sampler> activeSampler = new AtomicReference<>(); + + /** + * Gets the active sampler, or null if a sampler is not active. + * + * @return the active sampler + */ + public Sampler getActiveSampler() { + return this.activeSampler.get(); + } + + /** + * Sets the active sampler, throwing an exception if another sampler is already active. + * + * @param sampler the sampler + */ + public void setActiveSampler(Sampler sampler) { + if (!this.activeSampler.compareAndSet(null, sampler)) { + throw new IllegalStateException("Attempted to set active sampler when another was already active!"); + } + } + + /** + * Unsets the active sampler, if the provided sampler is active. + * + * @param sampler the sampler + */ + public void unsetActiveSampler(Sampler sampler) { + this.activeSampler.compareAndSet(sampler, null); + } + + /** + * Stops the active sampler, if there is one. + */ + public void stopActiveSampler(boolean cancelled) { + Sampler sampler = this.activeSampler.getAndSet(null); + if (sampler != null) { + sampler.stop(cancelled); + } + } + + @Override + public void close() { + stopActiveSampler(true); + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java new file mode 100644 index 0000000..6e55a43 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerSettings.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.sampler; + +/** + * Base settings for all samplers + */ +public class SamplerSettings { + + private final int interval; + private final ThreadDumper threadDumper; + private final ThreadGrouper threadGrouper; + private final long autoEndTime; + private final boolean runningInBackground; + + public SamplerSettings(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long autoEndTime, boolean runningInBackground) { + this.interval = interval; + this.threadDumper = threadDumper; + this.threadGrouper = threadGrouper; + this.autoEndTime = autoEndTime; + this.runningInBackground = runningInBackground; + } + + public int interval() { + return this.interval; + } + + public ThreadDumper threadDumper() { + return this.threadDumper; + } + + public ThreadGrouper threadGrouper() { + return this.threadGrouper; + } + + public long autoEndTime() { + return this.autoEndTime; + } + + public boolean runningInBackground() { + return this.runningInBackground; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java index 9d54f50..fd0c413 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java @@ -76,17 +76,29 @@ public interface ThreadDumper { * the game (server/client) thread. */ final class GameThread implements Supplier<ThreadDumper> { + private Supplier<Thread> threadSupplier; private Specific dumper = null; + public GameThread() { + + } + + public GameThread(Supplier<Thread> threadSupplier) { + this.threadSupplier = threadSupplier; + } + @Override public ThreadDumper get() { + if (this.dumper == null) { + setThread(this.threadSupplier.get()); + this.threadSupplier = null; + } + return Objects.requireNonNull(this.dumper, "dumper"); } - public void ensureSetup() { - if (this.dumper == null) { - this.dumper = new Specific(new long[]{Thread.currentThread().getId()}); - } + public void setThread(Thread thread) { + this.dumper = new Specific(new long[]{thread.getId()}); } } @@ -98,6 +110,10 @@ public interface ThreadDumper { private Set<Thread> threads; private Set<String> threadNamesLowerCase; + public Specific(Thread thread) { + this.ids = new long[]{thread.getId()}; + } + public Specific(long[] ids) { this.ids = ids; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index ad9dee4..2c003e5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.IntPredicate; /** * Abstract implementation of {@link DataAggregator}. @@ -52,6 +53,11 @@ public abstract class AbstractDataAggregator implements DataAggregator { } @Override + public void pruneData(IntPredicate timeWindowPredicate) { + this.threadData.values().removeIf(node -> node.removeTimeWindowsRecursively(timeWindowPredicate)); + } + + @Override public List<ThreadNode> exportData() { List<ThreadNode> data = new ArrayList<>(this.threadData.values()); for (ThreadNode node : data) { |
