diff options
author | Luck <git@lucko.me> | 2018-11-02 22:12:00 +0000 |
---|---|---|
committer | Luck <git@lucko.me> | 2018-11-02 22:12:00 +0000 |
commit | 7a60c09a9783f25d297aad063e087354b95aaa21 (patch) | |
tree | 9ab42f0104f3d54c89b5dbe3894fc0dd76278812 /spark-common/src/main/java | |
parent | bd5056787953c0d59a3bab133d1c4eba7e2d398e (diff) | |
download | spark-7a60c09a9783f25d297aad063e087354b95aaa21.tar.gz spark-7a60c09a9783f25d297aad063e087354b95aaa21.tar.bz2 spark-7a60c09a9783f25d297aad063e087354b95aaa21.zip |
Add --include-line-numbers flag (#6)
Diffstat (limited to 'spark-common/src/main/java')
8 files changed, 75 insertions, 28 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java index 5d4d84c..cf43c18 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/MonitoringModule.java @@ -29,7 +29,7 @@ import me.lucko.spark.monitor.TickMonitor; import me.lucko.spark.sampler.TickCounter; import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.List; import java.util.function.Consumer; @@ -65,7 +65,7 @@ public class MonitoringModule<S> implements CommandModule<S> { } }) .tabCompleter((platform, sender, arguments) -> { - List<String> opts = new ArrayList<>(Collections.singletonList("--threshold")); + List<String> opts = new ArrayList<>(Arrays.asList("--threshold", "--without-gc")); opts.removeAll(arguments); return TabCompleter.create() diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 2b814e3..693ffd9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -57,6 +57,7 @@ public class SamplerModule<S> implements CommandModule<S> { .argumentUsage("not-combined", null) .argumentUsage("interval", "interval millis") .argumentUsage("only-ticks-over", "tick length millis") + .argumentUsage("include-line-numbers", null) .executor((platform, sender, arguments) -> { int timeoutSeconds = arguments.intFlag("timeout"); if (timeoutSeconds != -1 && timeoutSeconds <= 10) { @@ -73,6 +74,8 @@ public class SamplerModule<S> implements CommandModule<S> { intervalMillis = 4; } + boolean includeLineNumbers = arguments.boolFlag("include-line-numbers"); + Set<String> threads = arguments.stringFlag("thread"); ThreadDumper threadDumper; if (threads.isEmpty()) { @@ -118,6 +121,7 @@ public class SamplerModule<S> implements CommandModule<S> { builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); } builder.samplingInterval(intervalMillis); + builder.includeLineNumbers(includeLineNumbers); if (ticksOver != -1) { builder.ticksOver(ticksOver, tickCounter); } @@ -160,7 +164,7 @@ public class SamplerModule<S> implements CommandModule<S> { }) .tabCompleter((platform, sender, arguments) -> { List<String> opts = new ArrayList<>(Arrays.asList("--timeout", "--interval", - "--not-combined", "--only-ticks-over")); + "--not-combined", "--only-ticks-over", "--include-line-numbers")); opts.removeAll(arguments); opts.add("--thread"); // allowed multiple times diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java index 1931ca6..7ad7e7b 100644 --- a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java @@ -79,16 +79,16 @@ public class Sampler implements Runnable { /** The unix timestamp (in millis) when this sampler should automatically complete.*/ private final long endTime; // -1 for nothing - public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean includeLineNumbers) { this.threadDumper = threadDumper; - this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval); + this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, includeLineNumbers); this.interval = interval; this.endTime = endTime; } - public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, TickCounter tickCounter, int tickLengthThreshold) { + public Sampler(int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean includeLineNumbers, TickCounter tickCounter, int tickLengthThreshold) { this.threadDumper = threadDumper; - this.dataAggregator = new TickedDataAggregator(this.workerPool, tickCounter, threadGrouper, interval, tickLengthThreshold); + this.dataAggregator = new TickedDataAggregator(this.workerPool, tickCounter, threadGrouper, interval, includeLineNumbers, tickLengthThreshold); this.interval = interval; this.endTime = endTime; } diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java index 2936c65..07449ec 100644 --- a/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/sampler/SamplerBuilder.java @@ -28,6 +28,7 @@ import java.util.concurrent.TimeUnit; public class SamplerBuilder { private int samplingInterval = 4; + private boolean includeLineNumbers = false; private long timeout = -1; private ThreadDumper threadDumper = ThreadDumper.ALL; private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; @@ -67,12 +68,17 @@ public class SamplerBuilder { return this; } + public SamplerBuilder includeLineNumbers(boolean includeLineNumbers) { + this.includeLineNumbers = includeLineNumbers; + return this; + } + public Sampler start() { Sampler sampler; if (this.ticksOver != -1 && this.tickCounter != null) { - sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout, this.tickCounter, this.ticksOver); + sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout, this.includeLineNumbers, this.tickCounter, this.ticksOver); } else { - sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout); + sampler = new Sampler(this.samplingInterval, this.threadDumper, this.threadGrouper, this.timeout, this.includeLineNumbers); } sampler.start(); diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java index 6f01dd5..25e2071 100644 --- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/SimpleDataAggregator.java @@ -46,10 +46,14 @@ public class SimpleDataAggregator implements DataAggregator { /** The interval to wait between sampling, in milliseconds */ private final int interval; - public SimpleDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval) { + /** If line numbers should be included in the output */ + private final boolean includeLineNumbers; + + public SimpleDataAggregator(ExecutorService workerPool, ThreadGrouper threadGrouper, int interval, boolean includeLineNumbers) { this.workerPool = workerPool; this.threadGrouper = threadGrouper; this.interval = interval; + this.includeLineNumbers = includeLineNumbers; } @Override @@ -57,7 +61,7 @@ public class SimpleDataAggregator implements DataAggregator { try { String group = this.threadGrouper.getGroup(threadName); AbstractNode node = this.threadData.computeIfAbsent(group, ThreadNode::new); - node.log(stack, this.interval); + node.log(stack, this.interval, this.includeLineNumbers); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java index d78a2a4..dc241a9 100644 --- a/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/sampler/aggregator/TickedDataAggregator.java @@ -53,6 +53,9 @@ public class TickedDataAggregator implements DataAggregator { /** The interval to wait between sampling, in milliseconds */ private final int interval; + /** If line numbers should be included in the output */ + private final boolean includeLineNumbers; + /** Tick durations under this threshold will not be inserted */ private final int tickLengthThreshold; @@ -65,11 +68,12 @@ public class TickedDataAggregator implements DataAggregator { private long currentTick = -1; private TickList currentData = new TickList(0); - public TickedDataAggregator(ExecutorService workerPool, TickCounter tickCounter, ThreadGrouper threadGrouper, int interval, int tickLengthThreshold) { + public TickedDataAggregator(ExecutorService workerPool, TickCounter tickCounter, ThreadGrouper threadGrouper, int interval, boolean includeLineNumbers, int tickLengthThreshold) { this.workerPool = workerPool; this.tickCounter = tickCounter; this.threadGrouper = threadGrouper; this.interval = interval; + this.includeLineNumbers = includeLineNumbers; this.tickLengthThreshold = tickLengthThreshold; // 50 millis in a tick, plus 10 so we have a bit of room to go over this.expectedSize = (50 / interval) + 10; @@ -139,7 +143,7 @@ public class TickedDataAggregator implements DataAggregator { try { String group = this.threadGrouper.getGroup(data.threadName); AbstractNode node = this.threadData.computeIfAbsent(group, ThreadNode::new); - node.log(data.stack, this.interval); + node.log(data.stack, this.interval, this.includeLineNumbers); } catch (Exception e) { e.printStackTrace(); } diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java b/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java index e660140..75632c4 100644 --- a/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java +++ b/spark-common/src/main/java/me/lucko/spark/sampler/node/AbstractNode.java @@ -53,30 +53,47 @@ public abstract class AbstractNode { return this.totalTime.longValue(); } - private AbstractNode resolveChild(String className, String methodName) { + private AbstractNode resolveChild(String className, String methodName, int lineNumber) { return this.children.computeIfAbsent( - StackTraceNode.generateKey(className, methodName), - name -> new StackTraceNode(className, methodName) + StackTraceNode.generateKey(className, methodName, lineNumber), + name -> new StackTraceNode(className, methodName, lineNumber) ); } - public void log(StackTraceElement[] elements, long time) { - log(elements, 0, time); + public void log(StackTraceElement[] elements, long time, boolean includeLineNumbers) { + log(elements, 0, time, includeLineNumbers); } - private void log(StackTraceElement[] elements, int skip, long time) { + private void log(StackTraceElement[] elements, int offset, long time, boolean includeLineNumbers) { this.totalTime.add(time); - if (skip >= MAX_STACK_DEPTH) { + if (offset >= MAX_STACK_DEPTH) { return; } - if (elements.length - skip == 0) { + if (elements.length - offset == 0) { return; } - - StackTraceElement bottom = elements[elements.length - (skip + 1)]; - resolveChild(bottom.getClassName(), bottom.getMethodName()).log(elements, skip + 1, time); + + // the first element in the array is the top of the call stack, and the last is the root + // offset starts at 0. + + // pointer is determined by subtracting the offset from the index of the last element + int pointer = (elements.length - 1) - offset; + StackTraceElement element = elements[pointer]; + + // the parent stack element is located at pointer+1. + // when the current offset is 0, we know the current pointer is at the last element in the + // array (the root) and therefore there is no parent. + StackTraceElement parent = offset == 0 ? null : elements[pointer + 1]; + + // get the line number of the parent element - the line which called "us" + int lineNumber = parent == null || !includeLineNumbers ? StackTraceNode.NULL_LINE_NUMBER : parent.getLineNumber(); + + // resolve a child element within the structure for the element at pointer + AbstractNode child = resolveChild(element.getClassName(), element.getMethodName(), lineNumber); + // call the log method on the found child, with an incremented offset. + child.log(elements, offset + 1, time, includeLineNumbers); } private Collection<? extends AbstractNode> getChildren() { diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java b/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java index d161b42..ee9e985 100644 --- a/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java +++ b/spark-common/src/main/java/me/lucko/spark/sampler/node/StackTraceNode.java @@ -31,34 +31,46 @@ import java.io.IOException; public final class StackTraceNode extends AbstractNode implements Comparable<StackTraceNode> { /** + * Magic number to denote "no present" line number for a node. + */ + public static final int NULL_LINE_NUMBER = -1; + + /** * Forms a key to represent the given node. * * @param className the name of the class * @param methodName the name of the method + * @param lineNumber the line number of the parent method call * @return the key */ - static String generateKey(String className, String methodName) { - return className + "." + methodName; + static String generateKey(String className, String methodName, int lineNumber) { + return className + "." + methodName + "." + lineNumber; } /** The name of the class */ private final String className; /** The name of the method */ private final String methodName; + /** The line number of the invocation which created this node */ + private final int lineNumber; - public StackTraceNode(String className, String methodName) { + public StackTraceNode(String className, String methodName, int lineNumber) { this.className = className; this.methodName = methodName; + this.lineNumber = lineNumber; } @Override protected void appendMetadata(JsonWriter writer) throws IOException { writer.name("cl").value(this.className); writer.name("m").value(this.methodName); + if (this.lineNumber != NULL_LINE_NUMBER) { + writer.name("ln").value(this.lineNumber); + } } private String key() { - return generateKey(this.className, this.methodName); + return generateKey(this.className, this.methodName, this.lineNumber); } @Override |