diff options
| author | Luck <git@lucko.me> | 2024-07-29 18:33:08 +0100 |
|---|---|---|
| committer | Luck <git@lucko.me> | 2024-07-29 18:33:08 +0100 |
| commit | 60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7 (patch) | |
| tree | 2bf8fcf914ac57466549d35dcd89ef96d3a2d65f | |
| parent | 4c0149b6a15fa887328bbd88c8055c2138cc4d72 (diff) | |
| download | spark-60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7.tar.gz spark-60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7.tar.bz2 spark-60d54cc4df05e3328f8b8d64ea3b44d5d22c9ed7.zip | |
Add some unit tests
45 files changed, 1905 insertions, 32 deletions
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bc3588a..5f3cea9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,12 @@ env: jobs: build-gradle: - runs-on: ubuntu-latest + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + + runs-on: ${{ matrix.os }} + steps: - name: Checkout repository uses: actions/checkout@v3 @@ -34,11 +39,12 @@ jobs: - name: Setup Gradle uses: gradle/gradle-build-action@v2 - - name: Run build with Gradle wrapper - run: ./gradlew build + - name: Run build and tests with Gradle wrapper + run: ./gradlew test build - name: Upload all artifacts uses: actions/upload-artifact@v3 + if: matrix.os == 'ubuntu-latest' with: name: jars path: | diff --git a/build.gradle b/build.gradle index 6dc9600..5d15ff2 100644 --- a/build.gradle +++ b/build.gradle @@ -2,6 +2,9 @@ plugins { id 'org.cadixdev.licenser' version '0.6.1' apply false } +import org.gradle.api.tasks.testing.logging.TestExceptionFormat +import org.gradle.api.tasks.testing.logging.TestLogEvent + allprojects { group = 'me.lucko' version = '1.10-SNAPSHOT' @@ -25,6 +28,16 @@ subprojects { options.release = 8 } + tasks.withType(Test).configureEach { + testLogging { + events = [TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.SKIPPED] + exceptionFormat = TestExceptionFormat.FULL + showExceptions = true + showCauses = true + showStackTraces = true + } + } + processResources { duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/spark-common/build.gradle b/spark-common/build.gradle index 0eaf711..5bc4bda 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -20,17 +20,17 @@ dependencies { exclude(module: 'slf4j-api') } - api('net.kyori:adventure-api:4.13.1') { + api('net.kyori:adventure-api:4.17.0') { exclude(module: 'adventure-bom') exclude(module: 'checker-qual') exclude(module: 'annotations') } - api('net.kyori:adventure-text-serializer-gson:4.13.1') { + api('net.kyori:adventure-text-serializer-gson:4.17.0') { exclude(module: 'adventure-bom') exclude(module: 'adventure-api') exclude(module: 'gson') } - api('net.kyori:adventure-text-serializer-legacy:4.13.1') { + api('net.kyori:adventure-text-serializer-legacy:4.17.0') { exclude(module: 'adventure-bom') exclude(module: 'adventure-api') } @@ -40,6 +40,22 @@ dependencies { compileOnly 'com.google.code.gson:gson:2.7' compileOnly 'com.google.guava:guava:19.0' compileOnly 'org.checkerframework:checker-qual:3.44.0' + + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.11.0-M2' + testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.11.0-M2' + testImplementation 'org.junit.jupiter:junit-jupiter-params:5.11.0-M2' + // testImplementation "org.testcontainers:junit-jupiter:1.19.8" + // testImplementation 'org.mockito:mockito-core:5.12.0' + // testImplementation 'org.mockito:mockito-junit-jupiter:5.12.0' + + testImplementation 'com.google.code.gson:gson:2.7' + testImplementation 'com.google.guava:guava:19.0' + testImplementation 'org.checkerframework:checker-qual:3.44.0' + + testImplementation('net.kyori:adventure-text-serializer-ansi:4.17.0') { + exclude(module: 'adventure-bom') + exclude(module: 'adventure-api') + } } protobuf { @@ -56,3 +72,8 @@ protobuf { } } } + +test { + useJUnitPlatform {} + systemProperty('net.kyori.ansi.colorLevel', 'indexed16') +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 733510d..5e25d91 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -52,10 +52,12 @@ import me.lucko.spark.common.sampler.source.ClassSourceLookup; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; -import me.lucko.spark.common.util.Configuration; import me.lucko.spark.common.util.SparkStaticLogger; import me.lucko.spark.common.util.TemporaryFiles; import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.config.Configuration; +import me.lucko.spark.common.util.config.FileConfiguration; +import me.lucko.spark.common.util.config.RuntimeConfiguration; import me.lucko.spark.common.ws.TrustedKeyStore; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; @@ -71,6 +73,7 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; @@ -121,7 +124,11 @@ public class SparkPlatform { SparkStaticLogger.setLogger(plugin::log); this.temporaryFiles = new TemporaryFiles(this.plugin.getPluginDirectory().resolve("tmp")); - this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); + this.configuration = Configuration.combining( + RuntimeConfiguration.SYSTEM_PROPERTIES, + RuntimeConfiguration.ENVIRONMENT_VARIABLES, + new FileConfiguration(this.plugin.getPluginDirectory().resolve("config.json")) + ); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); String bytebinUrl = this.configuration.getString("bytebinUrl", "https://spark-usercontent.lucko.me/"); @@ -330,7 +337,8 @@ public class SparkPlatform { return !getAvailableCommands(sender).isEmpty(); } - public void executeCommand(CommandSender sender, String[] args) { + public CompletableFuture<Void> executeCommand(CommandSender sender, String[] args) { + CompletableFuture<Void> future = new CompletableFuture<>(); AtomicReference<Thread> executorThread = new AtomicReference<>(); AtomicReference<Thread> timeoutThread = new AtomicReference<>(); AtomicBoolean completed = new AtomicBoolean(false); @@ -341,9 +349,11 @@ public class SparkPlatform { this.commandExecuteLock.lock(); try { executeCommand0(sender, args); + future.complete(null); } catch (Exception e) { this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command"); e.printStackTrace(); + future.completeExceptionally(e); } finally { this.commandExecuteLock.unlock(); executorThread.set(null); @@ -393,6 +403,8 @@ public class SparkPlatform { timeoutThread.set(null); } }); + + return future; } private void executeCommand0(CommandSender sender, String[] args) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 9e2647a..334e416 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -59,6 +59,7 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.Supplier; import static net.kyori.adventure.text.Component.empty; import static net.kyori.adventure.text.Component.space; @@ -208,7 +209,7 @@ public class SamplerModule implements CommandModule { } } - ThreadGrouper threadGrouper; + Supplier<ThreadGrouper> threadGrouper; if (arguments.boolFlag("combine-all")) { threadGrouper = ThreadGrouper.AS_ONE; } else if (arguments.boolFlag("not-combined")) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index 364edd6..4d34d4a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.heapdump; +import com.google.common.annotations.VisibleForTesting; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.proto.SparkHeapProtos.HeapData; @@ -123,6 +124,11 @@ public final class HeapDumpSummary { this.entries = entries; } + @VisibleForTesting + List<Entry> getEntries() { + return this.entries; + } + public HeapData toProto(SparkPlatform platform, CommandSender.Data creator) { HeapMetadata.Builder metadata = HeapMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) @@ -186,6 +192,16 @@ public final class HeapDumpSummary { .setType(this.type) .build(); } + + @Override + public String toString() { + return "Entry{" + + "order=" + this.order + + ", instances=" + this.instances + + ", bytes=" + this.bytes + + ", type='" + this.type + '\'' + + '}'; + } } public interface DiagnosticCommandMXBean { diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java new file mode 100644 index 0000000..c279f31 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/MacosSysctl.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.monitor; + +import org.checkerframework.checker.nullness.qual.NonNull; + +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +/** + * Utility for reading from sysctl on macOS systems. + */ +public enum MacosSysctl { + + SYSCTL("sysctl", "-a"),; + + private static final boolean SUPPORTED = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", "").equals("macosx"); + + private final String[] cmdArgs; + + MacosSysctl(String... cmdArgs) { + this.cmdArgs = cmdArgs; + } + + public @NonNull List<String> read() { + if (SUPPORTED) { + ProcessBuilder process = new ProcessBuilder(this.cmdArgs).redirectErrorStream(true); + try (BufferedReader buf = new BufferedReader(new InputStreamReader(process.start().getInputStream()))) { + List<String> lines = new ArrayList<>(); + + String line; + while ((line = buf.readLine()) != null) { + lines.add(line); + } + + return lines; + } catch (Exception e) { + // ignore + } + } + + return Collections.emptyList(); + } +} + diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java index 9954bd5..07875cc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/cpu/CpuInfo.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.monitor.cpu; import me.lucko.spark.common.monitor.LinuxProc; +import me.lucko.spark.common.monitor.MacosSysctl; import me.lucko.spark.common.monitor.WindowsWmic; import java.util.regex.Pattern; @@ -52,6 +53,12 @@ public enum CpuInfo { } } + for (String line : MacosSysctl.SYSCTL.read()) { + if (line.startsWith("machdep.cpu.brand_string:")) { + return line.substring("machdep.cpu.brand_string:".length()).trim(); + } + } + return ""; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java index eed695e..c2ba1da 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.monitor.net; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import me.lucko.spark.common.monitor.LinuxProc; import org.checkerframework.checker.nullness.qual.NonNull; @@ -200,7 +201,8 @@ public final class NetworkInterfaceInfo { private static final Pattern PROC_NET_DEV_PATTERN = Pattern.compile("^\\s*(\\w+):([\\d\\s]+)$"); - private static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) { + @VisibleForTesting + static @NonNull Map<String, NetworkInterfaceInfo> read(List<String> output) { // Inter-| Receive | Transmit // face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed // lo: 2776770 11307 0 0 0 0 0 0 2776770 11307 0 0 0 0 0 0 diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java index 4e9ca9e..1889304 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -22,8 +22,9 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.platform.PlatformInfo; -import me.lucko.spark.common.util.Configuration; +import me.lucko.spark.common.util.config.Configuration; +import java.util.function.Supplier; import java.util.logging.Level; public class BackgroundSamplerManager { @@ -103,7 +104,7 @@ public class BackgroundSamplerManager { private void startSampler() { boolean forceJavaEngine = this.configuration.getString(OPTION_ENGINE, "async").equals("java"); - ThreadGrouper threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool")); + Supplier<ThreadGrouper> threadGrouper = ThreadGrouper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_GROUPER, "by-pool")); ThreadDumper threadDumper = ThreadDumper.parseConfigSetting(this.configuration.getString(OPTION_THREAD_DUMPER, "default")); if (threadDumper == null) { threadDumper = this.platform.getPlugin().getDefaultThreadDumper(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index b6895ce..3046d92 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.sampler.java.JavaSampler; import me.lucko.spark.common.tick.TickHook; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; /** * Builds {@link Sampler} instances. @@ -44,7 +45,7 @@ public class SamplerBuilder { private long autoEndTime = -1; private boolean background = false; private ThreadDumper threadDumper = ThreadDumper.ALL; - private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; + private Supplier<ThreadGrouper> threadGrouper = ThreadGrouper.BY_NAME; private int ticksOver = -1; private TickHook tickHook = null; @@ -80,7 +81,7 @@ public class SamplerBuilder { return this; } - public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { + public SamplerBuilder threadGrouper(Supplier<ThreadGrouper> threadGrouper) { this.threadGrouper = threadGrouper; return this; } @@ -131,7 +132,7 @@ public class SamplerBuilder { this.samplingInterval ); - SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper, this.autoEndTime, this.background); + SamplerSettings settings = new SamplerSettings(interval, this.threadDumper, this.threadGrouper.get(), this.autoEndTime, this.background); Sampler sampler; if (this.mode == SamplerMode.ALLOCATION) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java index b6cfbea..c8d5b3c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java @@ -26,6 +26,7 @@ import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -64,7 +65,7 @@ public interface ThreadGrouper { * @param setting the config setting * @return the thread grouper */ - static ThreadGrouper parseConfigSetting(String setting) { + static Supplier<ThreadGrouper> parseConfigSetting(String setting) { switch (setting) { case "as-one": return AS_ONE; @@ -76,9 +77,14 @@ public interface ThreadGrouper { } /** + * Supplier for {@link ByName} thread groupers. + */ + Supplier<ThreadGrouper> BY_NAME = ByName::new; + + /** * Implementation of {@link ThreadGrouper} that just groups by thread name. */ - ThreadGrouper BY_NAME = new ThreadGrouper() { + class ByName implements ThreadGrouper { @Override public String getGroup(long threadId, String threadName) { return threadName; @@ -93,7 +99,12 @@ public interface ThreadGrouper { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; } - }; + } + + /** + * Supplier for {@link ByPool} thread groupers. + */ + Supplier<ThreadGrouper> BY_POOL = ByPool::new; /** * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool @@ -102,8 +113,8 @@ public interface ThreadGrouper { * <p>The regex pattern used to match pools expects a digit at the end of the thread name, * separated from the pool name with any of one or more of ' ', '-', or '#'.</p> */ - ThreadGrouper BY_POOL = new ThreadGrouper() { - private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); + class ByPool implements ThreadGrouper { + private static final Pattern PATTERN = Pattern.compile("^(.*?)[-# ]+\\d+$"); // thread id -> group private final Map<Long, String> cache = new ConcurrentHashMap<>(); @@ -117,7 +128,7 @@ public interface ThreadGrouper { return cached; } - Matcher matcher = this.pattern.matcher(threadName); + Matcher matcher = PATTERN.matcher(threadName); if (!matcher.matches()) { return threadName; } @@ -141,13 +152,18 @@ public interface ThreadGrouper { public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; } - }; + } + + /** + * Supplier for {@link AsOne} thread groupers. + */ + Supplier<ThreadGrouper> AS_ONE = AsOne::new; /** * Implementation of {@link ThreadGrouper} which groups all threads as one, under * the name "All". */ - ThreadGrouper AS_ONE = new ThreadGrouper() { + class AsOne implements ThreadGrouper { private final Set<Long> seen = ConcurrentHashMap.newKeySet(); @Override |
