aboutsummaryrefslogtreecommitdiff
path: root/spark-common/src/main/java
diff options
context:
space:
mode:
Diffstat (limited to 'spark-common/src/main/java')
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java9
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java5
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java85
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java69
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java189
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java8
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java12
-rw-r--r--spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java20
8 files changed, 378 insertions, 19 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
index c9aa030..099ad2d 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java
@@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import me.lucko.spark.common.activitylog.ActivityLog;
+import me.lucko.spark.common.api.SparkApi;
import me.lucko.spark.common.command.Arguments;
import me.lucko.spark.common.command.Command;
import me.lucko.spark.common.command.CommandModule;
@@ -89,6 +90,8 @@ public class SparkPlatform {
private Map<String, GarbageCollectorStatistics> startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
+ private SparkApi api;
+
public SparkPlatform(SparkPlugin plugin) {
this.plugin = plugin;
@@ -131,6 +134,10 @@ public class SparkPlatform {
this.startupGcStatistics = GarbageCollectorStatistics.pollStats();
this.serverNormalOperationStartTime = System.currentTimeMillis();
});
+
+ this.api = new SparkApi(this);
+ this.plugin.registerApi(this.api);
+ SparkApi.register(this.api);
}
public void disable() {
@@ -144,6 +151,8 @@ public class SparkPlatform {
for (CommandModule module : this.commandModules) {
module.close();
}
+
+ SparkApi.unregister();
}
public SparkPlugin getPlugin() {
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
index 171367e..216f23f 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
@@ -20,6 +20,7 @@
package me.lucko.spark.common;
+import me.lucko.spark.api.Spark;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.ThreadDumper;
@@ -107,4 +108,8 @@ public interface SparkPlugin {
*/
PlatformInfo getPlatformInfo();
+ default void registerApi(Spark api) {
+
+ }
+
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java b/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java
new file mode 100644
index 0000000..49a6ccb
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/api/AbstractStatistic.java
@@ -0,0 +1,85 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.api;
+
+import me.lucko.spark.api.statistic.Statistic;
+import me.lucko.spark.api.statistic.StatisticWindow;
+import me.lucko.spark.api.statistic.types.DoubleStatistic;
+import me.lucko.spark.api.statistic.types.GenericStatistic;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+import java.lang.reflect.Array;
+import java.util.Arrays;
+
+public abstract class AbstractStatistic<W extends Enum<W> & StatisticWindow> implements Statistic<W> {
+ private final String name;
+ protected final W[] windows;
+
+ protected AbstractStatistic(String name, Class<W> enumClass) {
+ this.name = name;
+ this.windows = enumClass.getEnumConstants();
+ }
+
+ @Override
+ public @NonNull String name() {
+ return this.name;
+ }
+
+ @Override
+ public W[] getWindows() {
+ return Arrays.copyOf(this.windows, this.windows.length);
+ }
+
+ public static abstract class Double<W extends Enum<W> & StatisticWindow> extends AbstractStatistic<W> implements DoubleStatistic<W> {
+ public Double(String name, Class<W> enumClass) {
+ super(name, enumClass);
+ }
+
+ @Override
+ public double[] poll() {
+ double[] values = new double[this.windows.length];
+ for (int i = 0; i < values.length; i++) {
+ values[i] = poll(this.windows[i]);
+ }
+ return values;
+ }
+ }
+
+ public static abstract class Generic<T, W extends Enum<W> & StatisticWindow> extends AbstractStatistic<W> implements GenericStatistic<T, W> {
+ private final Class<T> typeClass;
+
+ public Generic(String name, Class<T> typeClass, Class<W> enumClass) {
+ super(name, enumClass);
+ this.typeClass = typeClass;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public T[] poll() {
+ T[] values = (T[]) Array.newInstance(this.typeClass, this.windows.length);
+ for (int i = 0; i < values.length; i++) {
+ values[i] = poll(this.windows[i]);
+ }
+ return values;
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
new file mode 100644
index 0000000..8d289aa
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/api/GarbageCollectorInfo.java
@@ -0,0 +1,69 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.api;
+
+import me.lucko.spark.api.gc.GarbageCollector;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+
+public class GarbageCollectorInfo implements GarbageCollector {
+ private final String name;
+ private final long totalCollections;
+ private final long totalTime;
+ private final double averageTime;
+ private final long averageFrequency;
+
+ public GarbageCollectorInfo(String name, GarbageCollectorStatistics stats, long serverUptime) {
+ this.name = name;
+ this.totalCollections = stats.getCollectionCount();
+ this.totalTime = stats.getCollectionTime();
+
+ double totalTimeDouble = this.totalTime;
+ this.averageTime = this.totalCollections == 0 ? 0 : totalTimeDouble / this.totalCollections;
+ this.averageFrequency = this.totalCollections == 0 ? 0 : (long) ((serverUptime - totalTimeDouble) / this.totalCollections);
+ }
+
+ @Override
+ public @NonNull String name() {
+ return this.name;
+ }
+
+ @Override
+ public long totalCollections() {
+ return this.totalCollections;
+ }
+
+ @Override
+ public long totalTime() {
+ return this.totalTime;
+ }
+
+ @Override
+ public double avgTime() {
+ return this.averageTime;
+ }
+
+ @Override
+ public long avgFrequency() {
+ return this.averageFrequency;
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java
new file mode 100644
index 0000000..5ac41fc
--- /dev/null
+++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java
@@ -0,0 +1,189 @@
+/*
+ * This file is part of spark.
+ *
+ * Copyright (c) lucko (Luck) <luck@lucko.me>
+ * Copyright (c) contributors
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+package me.lucko.spark.common.api;
+
+import com.google.common.collect.ImmutableMap;
+
+import me.lucko.spark.api.Spark;
+import me.lucko.spark.api.SparkProvider;
+import me.lucko.spark.api.gc.GarbageCollector;
+import me.lucko.spark.api.statistic.misc.DoubleAverageInfo;
+import me.lucko.spark.api.statistic.types.DoubleStatistic;
+import me.lucko.spark.api.statistic.types.GenericStatistic;
+import me.lucko.spark.api.statistic.StatisticWindow;
+import me.lucko.spark.common.SparkPlatform;
+import me.lucko.spark.common.monitor.cpu.CpuMonitor;
+import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
+import me.lucko.spark.common.monitor.tick.TickStatistics;
+
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.sql.Ref;
+import java.util.HashMap;
+import java.util.Map;
+
+public class SparkApi implements Spark {
+ private static final Method SINGLETON_SET_METHOD;
+
+ static {
+ try {
+ SINGLETON_SET_METHOD = SparkProvider.class.getDeclaredMethod("set", Spark.class);
+ SINGLETON_SET_METHOD.setAccessible(true);
+ } catch (ReflectiveOperationException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private final SparkPlatform platform;
+
+ public SparkApi(SparkPlatform platform) {
+ this.platform = platform;
+ }
+
+ @Override
+ public @NonNull DoubleStatistic<StatisticWindow.CpuUsage> cpuProcess() {
+ return new AbstractStatistic.Double<StatisticWindow.CpuUsage>(
+ "CPU Process Usage", StatisticWindow.CpuUsage.class
+ ) {
+ @Override
+ public double poll(StatisticWindow.@NonNull CpuUsage window) {
+ switch (window) {
+ case SECONDS_10:
+ return CpuMonitor.processLoad10SecAvg();
+ case MINUTES_1:
+ return CpuMonitor.processLoad1MinAvg();
+ case MINUTES_15:
+ return CpuMonitor.processLoad15MinAvg();
+ default:
+ throw new AssertionError(window);
+ }
+ }
+ };
+ }
+
+ @Override
+ public @NonNull DoubleStatistic<StatisticWindow.CpuUsage> cpuSystem() {
+ return new AbstractStatistic.Double<StatisticWindow.CpuUsage>(
+ "CPU System Usage", StatisticWindow.CpuUsage.class
+ ) {
+ @Override
+ public double poll(StatisticWindow.@NonNull CpuUsage window) {
+ switch (window) {
+ case SECONDS_10:
+ return CpuMonitor.systemLoad10SecAvg();
+ case MINUTES_1:
+ return CpuMonitor.systemLoad1MinAvg();
+ case MINUTES_15:
+ return CpuMonitor.systemLoad15MinAvg();
+ default:
+ throw new AssertionError(window);
+ }
+ }
+ };
+ }
+
+ @Override
+ public @Nullable DoubleStatistic<StatisticWindow.TicksPerSecond> tps() {
+ TickStatistics stats = this.platform.getTickStatistics();
+ if (stats == null) {
+ return null;
+ }
+
+ return new AbstractStatistic.Double<StatisticWindow.TicksPerSecond>(
+ "Ticks Per Second", StatisticWindow.TicksPerSecond.class
+ ) {
+ @Override
+ public double poll(StatisticWindow.@NonNull TicksPerSecond window) {
+ switch (window) {
+ case SECONDS_5:
+ return stats.tps5Sec();
+ case SECONDS_10:
+ return stats.tps10Sec();
+ case MINUTES_1:
+ return stats.tps1Min();
+ case MINUTES_5:
+ return stats.tps5Min();
+ case MINUTES_15:
+ return stats.tps15Min();
+ default:
+ throw new AssertionError(window);
+ }
+ }
+ };
+ }
+
+ @Override
+ public @Nullable GenericStatistic<DoubleAverageInfo, StatisticWindow.MillisPerTick> mspt() {
+ TickStatistics stats = this.platform.getTickStatistics();
+ if (stats == null || !stats.isDurationSupported()) {
+ return null;
+ }
+
+ return new AbstractStatistic.Generic<DoubleAverageInfo, StatisticWindow.MillisPerTick>(
+ "Milliseconds Per Tick", DoubleAverageInfo.class, StatisticWindow.MillisPerTick.class
+ ) {
+ @Override
+ public DoubleAverageInfo poll(StatisticWindow.@NonNull MillisPerTick window) {
+ switch (window) {
+ case SECONDS_10:
+ return stats.duration10Sec();
+ case MINUTES_1:
+ return stats.duration1Min();
+ default:
+ throw new AssertionError(window);
+ }
+ }
+ };
+ }
+
+ @Override
+ public @NonNull Map<String, GarbageCollector> gc() {
+ long serverUptime = System.currentTimeMillis() - this.platform.getServerNormalOperationStartTime();
+ Map<String, GarbageCollectorStatistics> stats = GarbageCollectorStatistics.pollStatsSubtractInitial(
+ this.platform.getStartupGcStatistics()
+ );
+
+ Map<String, GarbageCollector> map = new HashMap<>(stats.size());
+ for (Map.Entry<String, GarbageCollectorStatistics> entry : stats.entrySet()) {
+ map.put(entry.getKey(), new GarbageCollectorInfo(entry.getKey(), entry.getValue(), serverUptime));
+ }
+ return ImmutableMap.copyOf(map);
+ }
+
+ public static void register(Spark spark) {
+ try {
+ SINGLETON_SET_METHOD.invoke(null, spark);
+ } catch (ReflectiveOperationException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void unregister() {
+ try {
+ SINGLETON_SET_METHOD.invoke(null, new Object[]{null});
+ } catch (ReflectiveOperationException e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
index 547131c..d6c0e10 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java
@@ -351,13 +351,13 @@ public class HealthModule implements CommandModule {
public static TextComponent formatTickDurations(RollingAverage average) {
return text()
- .append(formatTickDuration(average.getMin()))
+ .append(formatTickDuration(average.min()))
.append(text('/', GRAY))
- .append(formatTickDuration(average.getMedian()))
+ .append(formatTickDuration(average.median()))
.append(text('/', GRAY))
- .append(formatTickDuration(average.getPercentile(MSPT_95_PERCENTILE)))
+ .append(formatTickDuration(average.percentile(MSPT_95_PERCENTILE)))
.append(text('/', GRAY))
- .append(formatTickDuration(average.getMax()))
+ .append(formatTickDuration(average.max()))
.build();
}
diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
index 9c75dde..43e1f90 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuMonitor.java
@@ -97,15 +97,15 @@ public enum CpuMonitor {
}
public static double systemLoad10SecAvg() {
- return SYSTEM_AVERAGE_10_SEC.getAverage();
+ return SYSTEM_AVERAGE_10_SEC.mean();
}
public static double systemLoad1MinAvg() {
- return SYSTEM_AVERAGE_1_MIN.getAverage();
+ return SYSTEM_AVERAGE_1_MIN.mean();
}
public static double systemLoad15MinAvg() {
- return SYSTEM_AVERAGE_15_MIN.getAverage();
+ return SYSTEM_AVERAGE_15_MIN.mean();
}
/**
@@ -128,15 +128,15 @@ public enum CpuMonitor {
}
public static double processLoad10SecAvg() {
- return PROCESS_AVERAGE_10_SEC.getAverage();
+ return PROCESS_AVERAGE_10_SEC.mean();
}
public static double processLoad1MinAvg() {
- return PROCESS_AVERAGE_1_MIN.getAverage();
+ return PROCESS_AVERAGE_1_MIN.mean();
}
public static double processLoad15MinAvg() {
- return PROCESS_AVERAGE_15_MIN.getAverage();
+ return PROCESS_AVERAGE_15_MIN.mean();
}
/**
diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
index 2c6219a..87c41a4 100644
--- a/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
+++ b/spark-common/src/main/java/me/lucko/spark/common/util/RollingAverage.java
@@ -20,13 +20,15 @@
package me.lucko.spark.common.util;
+import me.lucko.spark.api.statistic.misc.DoubleAverageInfo;
+
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Queue;
-public class RollingAverage {
+public class RollingAverage implements DoubleAverageInfo {
private final Queue<BigDecimal> samples;
private final int windowSize;
@@ -47,7 +49,8 @@ public class RollingAverage {
}
}
- public double getAverage() {
+ @Override
+ public double mean() {
synchronized (this) {
if (this.samples.isEmpty()) {
return 0;
@@ -57,7 +60,8 @@ public class RollingAverage {
}
}
- public double getMax() {
+ @Override
+ public double max() {
synchronized (this) {
BigDecimal max = null;
for (BigDecimal sample : this.samples) {
@@ -69,7 +73,8 @@ public class RollingAverage {
}
}
- public double getMin() {
+ @Override
+ public double min() {
synchronized (this) {
BigDecimal min = null;
for (BigDecimal sample : this.samples) {
@@ -81,11 +86,8 @@ public class RollingAverage {
}
}
- public double getMedian() {
- return getPercentile(0.50d);
- }
-
- public double getPercentile(double percentile) {
+ @Override
+ public double percentile(double percentile) {
if (percentile < 0 || percentile > 1) {
throw new IllegalArgumentException("Invalid percentile " + percentile);
}