aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2018-11-04 01:05:41 +0000
committerLuck <git@lucko.me>2018-11-04 01:05:41 +0000
commit320d6a28b60873c8e8163b27ed1389978aed4ee6 (patch)
treeba130d567e58883458411d115a6eac1b8688220a /spark-common/src/main/java/me/lucko
parent9e4c0edc47707fbcad34305b3cd723b08f1ab4d6 (diff)
downloadspark-320d6a28b60873c8e8163b27ed1389978aed4ee6.tar.gz
spark-320d6a28b60873c8e8163b27ed1389978aed4ee6.tar.bz2
spark-320d6a28b60873c8e8163b27ed1389978aed4ee6.zip
some misc refactoring
Diffstat (limited to 'spark-common/src/main/java/me/lucko')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java74
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java113
-rw-r--r--spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java19
-rw-r--r--spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java145
9 files changed, 167 insertions, 217 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 24bacc6..f73e3e4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -31,6 +31,7 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.util.BytebinClient;
import java.util.ArrayList;
import java.util.Arrays;
@@ -47,6 +48,9 @@ public abstract class SparkPlatform<S> {
/** The URL of the viewer frontend */
public static final String VIEWER_URL = "https://sparkprofiler.github.io/#";
+ /** The bytebin instance used by the platform */
+ public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient("https://bytebin.lucko.me/", "spark-plugin");
+
/** The prefix used in all messages */
private static final String PREFIX = "&8[&fspark&8] &7";
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
index 8752443..318ce25 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
@@ -23,13 +23,15 @@ package me.lucko.spark.common.command.modules;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
-import me.lucko.spark.common.http.Bytebin;
import me.lucko.spark.memory.HeapDump;
+import okhttp3.MediaType;
+
import java.io.IOException;
import java.util.function.Consumer;
public class HeapModule<S> implements CommandModule<S> {
+ private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
@@ -50,9 +52,9 @@ public class HeapModule<S> implements CommandModule<S> {
byte[] output = heapDump.formCompressedDataPayload();
try {
- String pasteId = Bytebin.postCompressedContent(output);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
platform.sendPrefixedMessage("&bHeap dump output:");
- platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
e.printStackTrace();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 693ffd9..5fd8b5b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -25,13 +25,14 @@ import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.common.http.Bytebin;
import me.lucko.spark.sampler.Sampler;
import me.lucko.spark.sampler.SamplerBuilder;
import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.ThreadGrouper;
import me.lucko.spark.sampler.TickCounter;
+import okhttp3.MediaType;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -42,6 +43,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
public class SamplerModule<S> implements CommandModule<S> {
+ private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
/** Guards {@link #activeSampler} */
private final Object[] activeSamplerMutex = new Object[0];
@@ -236,9 +238,9 @@ public class SamplerModule<S> implements CommandModule<S> {
platform.runAsync(() -> {
byte[] output = sampler.formCompressedDataPayload();
try {
- String pasteId = Bytebin.postCompressedContent(output);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
platform.sendPrefixedMessage("&bSampling results:");
- platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
e.printStackTrace();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java b/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java
deleted file mode 100644
index a017fdb..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.http;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-
-import okhttp3.MediaType;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-import okhttp3.ResponseBody;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.nio.charset.StandardCharsets;
-
-/**
- * Utility for uploading JSON data to bytebin.
- */
-public final class Bytebin {
-
- /** Media type for JSON data */
- private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
- /** The URL used to upload sampling data */
- private static final String UPLOAD_ENDPOINT = "https://bytebin.lucko.me/post";
-
- public static String postCompressedContent(byte[] buf) throws IOException {
- RequestBody body = RequestBody.create(JSON_TYPE, buf);
-
- Request.Builder requestBuilder = new Request.Builder()
- .url(UPLOAD_ENDPOINT)
- .header("Content-Encoding", "gzip")
- .post(body);
-
- Request request = requestBuilder.build();
- try (Response response = HttpClient.makeCall(request)) {
- try (ResponseBody responseBody = response.body()) {
- if (responseBody == null) {
- throw new RuntimeException("No response");
- }
-
- try (InputStream inputStream = responseBody.byteStream()) {
- try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
- JsonObject object = new Gson().fromJson(reader, JsonObject.class);
- return object.get("key").getAsString();
- }
- }
- }
- }
- }
-
- private Bytebin() {}
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java
deleted file mode 100644
index 61db597..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * This file is part of LuckPerms, licensed under the MIT License.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package me.lucko.spark.common.http;
-
-import okhttp3.Interceptor;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.Response;
-import okhttp3.ResponseBody;
-
-import java.io.IOException;
-import java.net.Proxy;
-import java.net.ProxySelector;
-import java.net.SocketAddress;
-import java.net.URI;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Utility class for making http requests.
- */
-public final class HttpClient {
- private static OkHttpClient client = null;
-
- private static synchronized OkHttpClient getClient() {
- if (client == null) {
- client = new OkHttpClient.Builder()
- .proxySelector(new NullSafeProxySelector())
- .addInterceptor(new UserAgentInterceptor())
- .build();
- }
- return client;
- }
-
- public static Response makeCall(Request request) throws IOException {
- Response response = getClient().newCall(request).execute();
- if (!response.isSuccessful()) {
- throw exceptionForUnsuccessfulResponse(response);
- }
- return response;
- }
-
- private static RuntimeException exceptionForUnsuccessfulResponse(Response response) {
- String msg = "";
- try (ResponseBody responseBody = response.body()) {
- if (responseBody != null) {
- msg = responseBody.string();
- }
- } catch (IOException e) {
- // ignore
- }
- return new RuntimeException("Got response: " + response.code() + " - " + response.message() + " - " + msg);
- }
-
- private static final class UserAgentInterceptor implements Interceptor {
- @Override
- public Response intercept(Chain chain) throws IOException {
- Request orig = chain.request();
- Request modified = orig.newBuilder()
- .header("User-Agent", "spark-plugin")
- .build();
-
- return chain.proceed(modified);
- }
- }
-
- // sometimes ProxySelector#getDefault returns null, and okhttp doesn't like that
- private static final class NullSafeProxySelector extends ProxySelector {
- private static final List<Proxy> DIRECT = Collections.singletonList(Proxy.NO_PROXY);
-
- @Override
- public List<Proxy> select(URI uri) {
- ProxySelector def = ProxySelector.getDefault();
- if (def == null) {
- return DIRECT;
- }
- return def.select(uri);
- }
-
- @Override
- public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
- ProxySelector def = ProxySelector.getDefault();
- if (def != null) {
- def.connectFailed(uri, sa, ioe);
- }
- }
- }
-
- private HttpClient() {}
-} \ No newline at end of file
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java
index 7ad7e7b..5758d85 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java
+++ b/spark-common/src/main/java/me/lucko/spark/sampler/Sampler.java
@@ -99,7 +99,7 @@ public class Sampler implements Runnable {
public void start() {
this.startTime = System.currentTimeMillis();
this.dataAggregator.start();
- this.task = workerPool.scheduleAtFixedRate(this, 0, interval, TimeUnit.MILLISECONDS);
+ this.task = this.workerPool.scheduleAtFixedRate(this, 0, this.interval, TimeUnit.MILLISECONDS);
}
public long getStartTime() {
@@ -118,7 +118,7 @@ public class Sampler implements Runnable {
}
public void cancel() {
- task.cancel(false);
+ this.task.cancel(false);
}
@Override
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java
index af963c6..940b1c6 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java
+++ b/spark-common/src/main/java/me/lucko/spark/sampler/ThreadDumper.java
@@ -44,14 +44,7 @@ public interface ThreadDumper {
/**
* Implementation of {@link ThreadDumper} that generates data for all threads.
*/
- ThreadDumper ALL = new All();
-
- final class All implements ThreadDumper {
- @Override
- public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) {
- return threadBean.dumpAllThreads(false, false);
- }
- }
+ ThreadDumper ALL = threadBean -> threadBean.dumpAllThreads(false, false);
/**
* Implementation of {@link ThreadDumper} that generates data for a specific set of threads.
diff --git a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java
index 0707df6..72cd4dc 100644
--- a/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java
+++ b/spark-common/src/main/java/me/lucko/spark/sampler/ThreadGrouper.java
@@ -40,33 +40,24 @@ public interface ThreadGrouper {
/**
* Implementation of {@link ThreadGrouper} that just groups by thread name.
*/
- ThreadGrouper BY_NAME = new ByName();
-
- final class ByName implements ThreadGrouper {
- @Override
- public String getGroup(String threadName) {
- return threadName;
- }
- }
+ ThreadGrouper BY_NAME = threadName -> threadName;
/**
* Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool
* the thread originated from.
*/
- ThreadGrouper BY_POOL = new ByPool();
-
- final class ByPool implements ThreadGrouper {
- private static final Pattern THREAD_POOL_PATTERN = Pattern.compile("^(.*)[-#] ?\\d+$");
+ ThreadGrouper BY_POOL = new ThreadGrouper() {
+ private final Pattern pattern = Pattern.compile("^(.*)[-#] ?\\d+$");
@Override
public String getGroup(String threadName) {
- Matcher matcher = THREAD_POOL_PATTERN.matcher(threadName);
+ Matcher matcher = this.pattern.matcher(threadName);
if (!matcher.matches()) {
return threadName;
}
return matcher.group(1).trim() + " (Combined)";
}
- }
+ };
}
diff --git a/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java b/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java
new file mode 100644
index 0000000..01f63f7
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/util/BytebinClient.java
@@ -0,0 +1,145 @@
+/*
+ * This file is part of bytebin, licensed under the MIT License.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+package me.lucko.spark.util;
+
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
+import java.io.IOException;
+import java.net.Proxy;
+import java.net.ProxySelector;
+import java.net.SocketAddress;
+import java.net.URI;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Utility for posting content to bytebin.
+ */
+public class BytebinClient {
+
+ /** The bytebin URL */
+ private final String url;
+ /** The client user agent */
+ private final String userAgent;
+ /** The http client */
+ protected final OkHttpClient okHttp;
+
+ /**
+ * Creates a new bytebin instance
+ *
+ * @param url the bytebin url
+ * @param userAgent the client user agent string
+ */
+ public BytebinClient(String url, String userAgent) {
+ if (url.endsWith("/")) {
+ this.url = url + "post";
+ } else {
+ this.url = url + "/post";
+ }
+ this.userAgent = userAgent;
+ this.okHttp = new OkHttpClient.Builder()
+ .proxySelector(new NullSafeProxySelector())
+ .build();
+ }
+
+ /**
+ * Posts content to bytebin.
+ *
+ * @param buf the content
+ * @param contentType the type of the content
+ * @return the key of the resultant content
+ * @throws IOException if an error occurs
+ */
+ public String postContent(byte[] buf, MediaType contentType) throws IOException {
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .header("User-Agent", this.userAgent)
+ .url(this.url)
+ .post(body);
+
+ Request request = requestBuilder.build();
+ try (Response response = makeHttpRequest(request)) {
+ return response.header("Location");
+ }
+ }
+
+ /**
+ * Posts GZIP compressed content to bytebin.
+ *
+ * @param buf the compressed content
+ * @param contentType the type of the content
+ * @return the key of the resultant content
+ * @throws IOException if an error occurs
+ */
+ public String postGzippedContent(byte[] buf, MediaType contentType) throws IOException {
+ RequestBody body = RequestBody.create(contentType, buf);
+
+ Request.Builder requestBuilder = new Request.Builder()
+ .url(this.url)
+ .header("User-Agent", this.userAgent)
+ .header("Content-Encoding", "gzip")
+ .post(body);
+
+ Request request = requestBuilder.build();
+ try (Response response = makeHttpRequest(request)) {
+ return response.header("Location");
+ }
+ }
+
+ protected Response makeHttpRequest(Request request) throws IOException {
+ Response response = this.okHttp.newCall(request).execute();
+ if (!response.isSuccessful()) {
+ throw new RuntimeException("Request was unsuccessful: " + response.code() + " - " + response.message());
+ }
+ return response;
+ }
+
+ // sometimes ProxySelector#getDefault returns null, and okhttp doesn't like that
+ private static final class NullSafeProxySelector extends ProxySelector {
+ private static final List<Proxy> DIRECT = Collections.singletonList(Proxy.NO_PROXY);
+
+ @Override
+ public List<Proxy> select(URI uri) {
+ ProxySelector def = ProxySelector.getDefault();
+ if (def == null) {
+ return DIRECT;
+ }
+ return def.select(uri);
+ }
+
+ @Override
+ public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
+ ProxySelector def = ProxySelector.getDefault();
+ if (def != null) {
+ def.connectFailed(uri, sa, ioe);
+ }
+ }
+ }
+}