aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java/me/lucko/spark/common
diff options
context:
space:
mode:
authorLuck <git@lucko.me>2018-11-04 01:05:41 +0000
committerLuck <git@lucko.me>2018-11-04 01:05:41 +0000
commit320d6a28b60873c8e8163b27ed1389978aed4ee6 (patch)
treeba130d567e58883458411d115a6eac1b8688220a /spark-common/src/main/java/me/lucko/spark/common
parent9e4c0edc47707fbcad34305b3cd723b08f1ab4d6 (diff)
downloadspark-320d6a28b60873c8e8163b27ed1389978aed4ee6.tar.gz
spark-320d6a28b60873c8e8163b27ed1389978aed4ee6.tar.bz2
spark-320d6a28b60873c8e8163b27ed1389978aed4ee6.zip
some misc refactoring
Diffstat (limited to 'spark-common/src/main/java/me/lucko/spark/common')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java4
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java74
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java113
5 files changed, 14 insertions, 193 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index 24bacc6..f73e3e4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -31,6 +31,7 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.TickCounter;
+import me.lucko.spark.util.BytebinClient;
import java.util.ArrayList;
import java.util.Arrays;
@@ -47,6 +48,9 @@ public abstract class SparkPlatform<S> {
/** The URL of the viewer frontend */
public static final String VIEWER_URL = "https://sparkprofiler.github.io/#";
+ /** The bytebin instance used by the platform */
+ public static final BytebinClient BYTEBIN_CLIENT = new BytebinClient("https://bytebin.lucko.me/", "spark-plugin");
+
/** The prefix used in all messages */
private static final String PREFIX = "&8[&fspark&8] &7";
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
index 8752443..318ce25 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapModule.java
@@ -23,13 +23,15 @@ package me.lucko.spark.common.command.modules;
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
-import me.lucko.spark.common.http.Bytebin;
import me.lucko.spark.memory.HeapDump;
+import okhttp3.MediaType;
+
import java.io.IOException;
import java.util.function.Consumer;
public class HeapModule<S> implements CommandModule<S> {
+ private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
@Override
public void registerCommands(Consumer<Command<S>> consumer) {
@@ -50,9 +52,9 @@ public class HeapModule<S> implements CommandModule<S> {
byte[] output = heapDump.formCompressedDataPayload();
try {
- String pasteId = Bytebin.postCompressedContent(output);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
platform.sendPrefixedMessage("&bHeap dump output:");
- platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the data.");
e.printStackTrace();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
index 693ffd9..5fd8b5b 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java
@@ -25,13 +25,14 @@ import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
import me.lucko.spark.common.command.tabcomplete.CompletionSupplier;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
-import me.lucko.spark.common.http.Bytebin;
import me.lucko.spark.sampler.Sampler;
import me.lucko.spark.sampler.SamplerBuilder;
import me.lucko.spark.sampler.ThreadDumper;
import me.lucko.spark.sampler.ThreadGrouper;
import me.lucko.spark.sampler.TickCounter;
+import okhttp3.MediaType;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -42,6 +43,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
public class SamplerModule<S> implements CommandModule<S> {
+ private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
/** Guards {@link #activeSampler} */
private final Object[] activeSamplerMutex = new Object[0];
@@ -236,9 +238,9 @@ public class SamplerModule<S> implements CommandModule<S> {
platform.runAsync(() -> {
byte[] output = sampler.formCompressedDataPayload();
try {
- String pasteId = Bytebin.postCompressedContent(output);
+ String key = SparkPlatform.BYTEBIN_CLIENT.postGzippedContent(output, JSON_TYPE);
platform.sendPrefixedMessage("&bSampling results:");
- platform.sendLink(SparkPlatform.VIEWER_URL + pasteId);
+ platform.sendLink(SparkPlatform.VIEWER_URL + key);
} catch (IOException e) {
platform.sendPrefixedMessage("&cAn error occurred whilst uploading the results.");
e.printStackTrace();
diff --git a/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java b/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java
deleted file mode 100644
index a017fdb..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/http/Bytebin.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * This file is part of spark.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-package me.lucko.spark.common.http;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-
-import okhttp3.MediaType;
-import okhttp3.Request;
-import okhttp3.RequestBody;
-import okhttp3.Response;
-import okhttp3.ResponseBody;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.nio.charset.StandardCharsets;
-
-/**
- * Utility for uploading JSON data to bytebin.
- */
-public final class Bytebin {
-
- /** Media type for JSON data */
- private static final MediaType JSON_TYPE = MediaType.parse("application/json; charset=utf-8");
- /** The URL used to upload sampling data */
- private static final String UPLOAD_ENDPOINT = "https://bytebin.lucko.me/post";
-
- public static String postCompressedContent(byte[] buf) throws IOException {
- RequestBody body = RequestBody.create(JSON_TYPE, buf);
-
- Request.Builder requestBuilder = new Request.Builder()
- .url(UPLOAD_ENDPOINT)
- .header("Content-Encoding", "gzip")
- .post(body);
-
- Request request = requestBuilder.build();
- try (Response response = HttpClient.makeCall(request)) {
- try (ResponseBody responseBody = response.body()) {
- if (responseBody == null) {
- throw new RuntimeException("No response");
- }
-
- try (InputStream inputStream = responseBody.byteStream()) {
- try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
- JsonObject object = new Gson().fromJson(reader, JsonObject.class);
- return object.get("key").getAsString();
- }
- }
- }
- }
- }
-
- private Bytebin() {}
-}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java b/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java
deleted file mode 100644
index 61db597..0000000
--- a/spark-common/src/main/java/me/lucko/spark/common/http/HttpClient.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * This file is part of LuckPerms, licensed under the MIT License.
- *
- * Copyright (c) lucko (Luck) <luck@lucko.me>
- * Copyright (c) contributors
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in all
- * copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-package me.lucko.spark.common.http;
-
-import okhttp3.Interceptor;
-import okhttp3.OkHttpClient;
-import okhttp3.Request;
-import okhttp3.Response;
-import okhttp3.ResponseBody;
-
-import java.io.IOException;
-import java.net.Proxy;
-import java.net.ProxySelector;
-import java.net.SocketAddress;
-import java.net.URI;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Utility class for making http requests.
- */
-public final class HttpClient {
- private static OkHttpClient client = null;
-
- private static synchronized OkHttpClient getClient() {
- if (client == null) {
- client = new OkHttpClient.Builder()
- .proxySelector(new NullSafeProxySelector())
- .addInterceptor(new UserAgentInterceptor())
- .build();
- }
- return client;
- }
-
- public static Response makeCall(Request request) throws IOException {
- Response response = getClient().newCall(request).execute();
- if (!response.isSuccessful()) {
- throw exceptionForUnsuccessfulResponse(response);
- }
- return response;
- }
-
- private static RuntimeException exceptionForUnsuccessfulResponse(Response response) {
- String msg = "";
- try (ResponseBody responseBody = response.body()) {
- if (responseBody != null) {
- msg = responseBody.string();
- }
- } catch (IOException e) {
- // ignore
- }
- return new RuntimeException("Got response: " + response.code() + " - " + response.message() + " - " + msg);
- }
-
- private static final class UserAgentInterceptor implements Interceptor {
- @Override
- public Response intercept(Chain chain) throws IOException {
- Request orig = chain.request();
- Request modified = orig.newBuilder()
- .header("User-Agent", "spark-plugin")
- .build();
-
- return chain.proceed(modified);
- }
- }
-
- // sometimes ProxySelector#getDefault returns null, and okhttp doesn't like that
- private static final class NullSafeProxySelector extends ProxySelector {
- private static final List<Proxy> DIRECT = Collections.singletonList(Proxy.NO_PROXY);
-
- @Override
- public List<Proxy> select(URI uri) {
- ProxySelector def = ProxySelector.getDefault();
- if (def == null) {
- return DIRECT;
- }
- return def.select(uri);
- }
-
- @Override
- public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
- ProxySelector def = ProxySelector.getDefault();
- if (def != null) {
- def.connectFailed(uri, sa, ioe);
- }
- }
- }
-
- private HttpClient() {}
-} \ No newline at end of file