diff options
author | Luck <git@lucko.me> | 2020-04-01 22:23:43 +0100 |
---|---|---|
committer | Luck <git@lucko.me> | 2020-04-01 22:23:53 +0100 |
commit | 242382646512acba6f5041ecbeab058160ec742d (patch) | |
tree | 7d25c4cee53074f655217ac45bf022c278d5eef8 | |
parent | 8e2369a64d20706cb68738f1d847d93422f71218 (diff) | |
download | spark-242382646512acba6f5041ecbeab058160ec742d.tar.gz spark-242382646512acba6f5041ecbeab058160ec742d.tar.bz2 spark-242382646512acba6f5041ecbeab058160ec742d.zip |
Allow comments to be specified on sampler output
3 files changed, 24 insertions, 15 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 1959a34..3d8907c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.command.modules; +import com.google.common.collect.Iterables; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.ActivityLog.Activity; import me.lucko.spark.common.command.Command; @@ -72,6 +73,7 @@ public class SamplerModule implements CommandModule { .argumentUsage("stop", null) .argumentUsage("cancel", null) .argumentUsage("timeout", "timeout seconds") + .argumentUsage("comment", "comment") .argumentUsage("thread", "thread name") .argumentUsage("regex", null) .argumentUsage("combine-all", null) @@ -117,9 +119,10 @@ public class SamplerModule implements CommandModule { this.activeSampler.cancel(); resp.broadcastPrefixed(TextComponent.of("The active sampling operation has been stopped! Uploading results...")); ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; + String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); - handleUpload(platform, resp, this.activeSampler, threadOrder, mergeMode); + handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode); this.activeSampler = null; } return; @@ -142,7 +145,6 @@ public class SamplerModule implements CommandModule { intervalMillis = 4; } - boolean includeLineNumbers = arguments.boolFlag("include-line-numbers"); boolean ignoreSleeping = arguments.boolFlag("ignore-sleeping"); Set<String> threads = arguments.stringFlag("thread"); @@ -227,11 +229,12 @@ public class SamplerModule implements CommandModule { // await the result if (timeoutSeconds != -1) { ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; + String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); future.thenAcceptAsync(s -> { resp.broadcastPrefixed(TextComponent.of("The active sampling operation has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, mergeMode); + handleUpload(platform, resp, s, threadOrder, comment, mergeMode); }); } }) @@ -241,12 +244,12 @@ public class SamplerModule implements CommandModule { } if (arguments.contains("--stop") || arguments.contains("--upload")) { - return TabCompleter.completeForOpts(arguments, "--order-by-time", "--separate-parent-calls"); + return TabCompleter.completeForOpts(arguments, "--order-by-time", "--separate-parent-calls", "--comment"); } List<String> opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel", "--timeout", "--regex", "--combine-all", "--not-combined", "--interval", - "--only-ticks-over", "--ignore-sleeping", "--order-by-time", "--separate-parent-calls")); + "--only-ticks-over", "--ignore-sleeping", "--order-by-time", "--separate-parent-calls", "--comment")); opts.removeAll(arguments); opts.add("--thread"); // allowed multiple times @@ -258,9 +261,9 @@ public class SamplerModule implements CommandModule { ); } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, MergeMode mergeMode) { + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) { platform.getPlugin().executeAsync(() -> { - byte[] output = sampler.formCompressedDataPayload(resp.sender(), threadOrder, mergeMode); + byte[] output = sampler.formCompressedDataPayload(resp.sender(), threadOrder, comment, mergeMode); try { String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE, false).key(); String url = SparkPlatform.VIEWER_URL + key; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 6548d56..ec01da2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -22,6 +22,7 @@ package me.lucko.spark.common.sampler; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.protobuf.ByteString; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.aggregator.SimpleDataAggregator; @@ -161,16 +162,20 @@ public class Sampler implements Runnable { } } - private SamplerData toProto(CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, MergeMode mergeMode) { - SamplerData.Builder proto = SamplerData.newBuilder(); - proto.setMetadata(SamplerMetadata.newBuilder() + private SamplerData toProto(CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) { + final SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setUser(creator.toData().toProto()) .setStartTime(this.startTime) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) - .setDataAggregator(this.dataAggregator.getMetadata()) - .build() - ); + .setDataAggregator(this.dataAggregator.getMetadata()); + + if (comment != null) { + metadata.setComment(comment); + } + + SamplerData.Builder proto = SamplerData.newBuilder(); + proto.setMetadata(metadata.build()); List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet()); data.sort(outputOrder); @@ -182,8 +187,8 @@ public class Sampler implements Runnable { return proto.build(); } - public byte[] formCompressedDataPayload(CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, MergeMode mergeMode) { - SamplerData proto = toProto(creator, outputOrder, mergeMode); + public byte[] formCompressedDataPayload(CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) { + SamplerData proto = toProto(creator, outputOrder, comment, mergeMode); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); try (OutputStream out = new GZIPOutputStream(byteOut)) { diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-common/src/main/proto/spark/spark.proto index 1c7a002..4777a5c 100644 --- a/spark-common/src/main/proto/spark/spark.proto +++ b/spark-common/src/main/proto/spark/spark.proto @@ -43,6 +43,7 @@ message SamplerMetadata { int32 interval = 3; ThreadDumper thread_dumper = 4; DataAggregator data_aggregator = 5; + string comment = 6; message ThreadDumper { Type type = 1; |