aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2020-01-02 19:58:53 +0000
committerLuck <git@lucko.me>2020-01-02 19:58:53 +0000
commite55c2884e2e16340920d062a917caa2ed6c755c8 (patch)
tree20f822b6d0d64695e65bffdeeb7bd95a48ecc6ec /spark-common/src/main/java/me/lucko/spark
parent03b6f817f89275e4d5c3d5b1868c3dcc861bf146 (diff)
downloadspark-e55c2884e2e16340920d062a917caa2ed6c755c8.tar.gz
spark-e55c2884e2e16340920d062a917caa2ed6c755c8.tar.bz2
spark-e55c2884e2e16340920d062a917caa2ed6c755c8.zip
Add --order-by-time option
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java21
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java53
3 files changed, 73 insertions, 10 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index f63d7ba..cd98aa4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -31,6 +31,7 @@ import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.SamplerBuilder;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
+import me.lucko.spark.common.sampler.ThreadNodeOrder;
import me.lucko.spark.common.sampler.TickCounter;
import net.kyori.text.TextComponent;
import net.kyori.text.event.ClickEvent;
@@ -77,6 +78,7 @@ public class SamplerModule implements CommandModule {
.argumentUsage("only-ticks-over", "tick length millis")
.argumentUsage("include-line-numbers", null)
.argumentUsage("ignore-sleeping", null)
+ .argumentUsage("order-by-time", null)
.executor((platform, sender, resp, arguments) -> {
if (arguments.boolFlag("info")) {
if (this.activeSampler == null) {
@@ -112,7 +114,8 @@ public class SamplerModule implements CommandModule {
} else {
this.activeSampler.cancel();
resp.broadcastPrefixed(TextComponent.of("The active sampling operation has been stopped! Uploading results..."));
- handleUpload(platform, resp, this.activeSampler);
+ ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME;
+ handleUpload(platform, resp, this.activeSampler, threadOrder);
this.activeSampler = null;
}
return;
@@ -220,20 +223,26 @@ public class SamplerModule implements CommandModule {
// await the result
if (timeoutSeconds != -1) {
+ ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME;
future.thenAcceptAsync(s -> {
resp.broadcastPrefixed(TextComponent.of("The active sampling operation has completed! Uploading results..."));
- handleUpload(platform, resp, s);
+ handleUpload(platform, resp, s, threadOrder);
});
}
})
.tabCompleter((platform, sender, arguments) -> {
- if (arguments.contains("--info") || arguments.contains("--stop") || arguments.contains("--upload") || arguments.contains("--cancel")) {
+ if (arguments.contains("--info") || arguments.contains("--cancel")) {
return Collections.emptyList();
}
+ if (arguments.contains("--stop") || arguments.contains("--upload")) {
+ return TabCompleter.completeForOpts(arguments, "--order-by-time");
+ }
+
List<String> opts = new ArrayList<>(Arrays.asList("--info", "--stop", "--cancel",
"--timeout", "--regex", "--combine-all", "--not-combined", "--interval",
- "--only-ticks-over", "--include-line-numbers", "--ignore-sleeping"));
+ "--only-ticks-over", "--include-line-numbers", "--ignore-sleeping",
+ "--order-by-time"));
opts.removeAll(arguments);
opts.add("--thread"); // allowed multiple times
@@ -245,9 +254,9 @@ public class SamplerModule implements CommandModule {
);
}
- private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler) {
+ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder) {
platform.getPlugin().executeAsync(() -> {
- byte[] output = sampler.formCompressedDataPayload(resp.sender());
+ byte[] output = sampler.formCompressedDataPayload(resp.sender(), threadOrder);
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE, false).key();
String url = SparkPlatform.VIEWER_URL + key;
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
index 033a2d2..81a757a 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java
@@ -37,6 +37,7 @@ import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
+import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
@@ -158,7 +159,7 @@ public class Sampler implements Runnable {
}
}
- private SamplerData toProto(CommandSender creator) {
+ private SamplerData toProto(CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder) {
SamplerData.Builder proto = SamplerData.newBuilder();
proto.setMetadata(SamplerMetadata.newBuilder()
.setUser(creator.toData().toProto())
@@ -170,7 +171,7 @@ public class Sampler implements Runnable {
);
List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
- data.sort(Map.Entry.comparingByKey());
+ data.sort(outputOrder);
for (Map.Entry<String, ThreadNode> entry : data) {
proto.addThreads(entry.getValue().toProto());
@@ -179,10 +180,10 @@ public class Sampler implements Runnable {
return proto.build();
}
- public byte[] formCompressedDataPayload(CommandSender creator) {
+ public byte[] formCompressedDataPayload(CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder) {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
try (OutputStream out = new GZIPOutputStream(byteOut)) {
- toProto(creator).writeTo(out);
+ toProto(creator, outputOrder).writeTo(out);
} catch (IOException e) {
throw new RuntimeException(e);
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
new file mode 100644
index 0000000..3765314
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java
@@ -0,0 +1,53 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.sampler;
+
+import me.lucko.spark.common.sampler.node.ThreadNode;
+
+import java.util.Comparator;
+import java.util.Map;
+
+/**
+ * Methods of ordering {@link ThreadNode}s in the output data.
+ */
+public enum ThreadNodeOrder implements Comparator<Map.Entry<String, ThreadNode>> {
+
+ /**
+ * Order by the name of the thread (alphabetically)
+ */
+ BY_NAME {
+ @Override
+ public int compare(Map.Entry<String, ThreadNode> o1, Map.Entry<String, ThreadNode> o2) {
+ return o1.getKey().compareTo(o2.getKey());
+ }
+ },
+
+ /**
+ * Order by the time taken by the thread
+ */
+ BY_TIME {
+ @Override
+ public int compare(Map.Entry<String, ThreadNode> o1, Map.Entry<String, ThreadNode> o2) {
+ return Double.compare(o1.getValue().getTotalTime(), o2.getValue().getTotalTime());
+ }
+ }
+
+}