diff options
author | Luck <git@lucko.me> | 2024-07-29 22:54:12 +0100 |
---|---|---|
committer | Luck <git@lucko.me> | 2024-07-29 22:54:12 +0100 |
commit | 1fb00b191cb3efd5b44d18fd2730ac6683626d67 (patch) | |
tree | 04d02538d8afbf9e9867c81e3768ebe4bc66c5d0 | |
parent | 6e7cc883d2d716bfcfcf871956f2acb995474d2d (diff) | |
download | spark-1fb00b191cb3efd5b44d18fd2730ac6683626d67.tar.gz spark-1fb00b191cb3efd5b44d18fd2730ac6683626d67.tar.bz2 spark-1fb00b191cb3efd5b44d18fd2730ac6683626d67.zip |
Add more automated tests
11 files changed, 255 insertions, 25 deletions
diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 11c419c..919fb95 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -317,6 +317,10 @@ public class SparkPlatform { return this.serverNormalOperationStartTime; } + public boolean hasEnabled() { + return this.enabled.get(); + } + public Path resolveSaveFile(String prefix, String extension) { Path pluginFolder = this.plugin.getPluginDirectory(); try { diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index 4d34d4a..52140d5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -20,12 +20,12 @@ package me.lucko.spark.common.heapdump; -import com.google.common.annotations.VisibleForTesting; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.proto.SparkHeapProtos.HeapData; import me.lucko.spark.proto.SparkHeapProtos.HeapEntry; import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; +import org.jetbrains.annotations.VisibleForTesting; import org.objectweb.asm.Type; import javax.management.JMX; diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java index c2ba1da..01bd3a7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/net/NetworkInterfaceInfo.java @@ -20,10 +20,10 @@ package me.lucko.spark.common.monitor.net; -import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import me.lucko.spark.common.monitor.LinuxProc; import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.VisibleForTesting; import java.util.Arrays; import java.util.Collections; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java index 7dcb131..84aaa95 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncProfilerAccess.java @@ -73,14 +73,10 @@ public class AsyncProfilerAccess { try { profiler = load(platform); - if (isEventSupported(profiler, ProfilingEvent.ALLOC, false)) { allocationProfilingEvent = ProfilingEvent.ALLOC; } - - if (isEventSupported(profiler, ProfilingEvent.CPU, false)) { - profilingEvent = ProfilingEvent.CPU; - } else if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { + if (isEventSupported(profiler, ProfilingEvent.WALL, true)) { profilingEvent = ProfilingEvent.WALL; } } catch (Exception e) { @@ -213,8 +209,7 @@ public class AsyncProfilerAccess { return false; } - enum ProfilingEvent { - CPU(Events.CPU), + public enum ProfilingEvent { WALL(Events.WALL), ALLOC(Events.ALLOC); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java index 1dad75b..23120ea 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/JavaVersion.java @@ -20,9 +20,9 @@ package me.lucko.spark.common.util; -import com.google.common.annotations.VisibleForTesting; +import org.jetbrains.annotations.VisibleForTesting; -public class JavaVersion { +public enum JavaVersion { ; private static final int JAVA_VERSION; diff --git a/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java b/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java index ec3638f..10da849 100644 --- a/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java +++ b/spark-common/src/test/java/me/lucko/spark/common/SparkPlatformTest.java @@ -20,27 +20,21 @@ package me.lucko.spark.common; -import me.lucko.spark.test.plugin.TestCommandSender; import me.lucko.spark.test.plugin.TestSparkPlugin; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.nio.file.Path; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class SparkPlatformTest { @Test public void testEnableDisable(@TempDir Path directory) { - System.setProperty("spark.backgroundProfiler", "false"); - - SparkPlatform platform = new SparkPlatform(new TestSparkPlugin(directory)); - platform.enable(); - - platform.executeCommand(TestCommandSender.INSTANCE, new String[]{"help"}).join(); - platform.executeCommand(TestCommandSender.INSTANCE, new String[]{"profiler", "info"}).join(); - platform.executeCommand(TestCommandSender.INSTANCE, new String[]{"health"}).join(); - - platform.disable(); + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + assertTrue(plugin.platform().hasEnabled()); + } } } diff --git a/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java index 42492d1..b2bb384 100644 --- a/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java +++ b/spark-common/src/test/java/me/lucko/spark/common/heapdump/HeapDumpSummaryTest.java @@ -20,9 +20,14 @@ package me.lucko.spark.common.heapdump; +import me.lucko.spark.proto.SparkHeapProtos; import me.lucko.spark.test.TestClass; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import java.nio.file.Path; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -31,7 +36,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; public class HeapDumpSummaryTest { @Test - public void testHeapDumpSummary() throws Exception { + public void testHeapDumpSummary(@TempDir Path directory) throws Exception { TestClass testClass1 = new TestClass(); TestClass testClass2 = new TestClass(); @@ -42,6 +47,17 @@ public class HeapDumpSummaryTest { assertNotNull(thisClassEntry); assertEquals(2, thisClassEntry.getInstances()); assertEquals(32, thisClassEntry.getBytes()); + + SparkHeapProtos.HeapData proto; + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + proto = dump.toProto(plugin.platform(), TestCommandSender.INSTANCE.toData()); + } + assertNotNull(proto); + + SparkHeapProtos.HeapEntry protoEntry = proto.getEntriesList().stream().filter(entry -> entry.getType().equals(TestClass.class.getName())).findAny().orElse(null); + assertNotNull(protoEntry); + assertEquals(2, protoEntry.getInstances()); + assertEquals(32, protoEntry.getSize()); } } diff --git a/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java new file mode 100644 index 0000000..6726461 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/platform/PlatformStatisticsProviderTest.java @@ -0,0 +1,30 @@ +package me.lucko.spark.common.platform; + +import me.lucko.spark.proto.SparkProtos; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class PlatformStatisticsProviderTest { + + @Test + public void testSystemStatistics(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkProtos.SystemStatistics systemStatistics = new PlatformStatisticsProvider(plugin.platform()).getSystemStatistics(); + assertNotNull(systemStatistics); + } + } + + @Test + public void testPlatformStatistics(@TempDir Path directory) { + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + SparkProtos.PlatformStatistics platformStatistics = new PlatformStatisticsProvider(plugin.platform()).getPlatformStatistics(null, true); + assertNotNull(platformStatistics); + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java new file mode 100644 index 0000000..7db14a3 --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/common/sampler/SamplerTest.java @@ -0,0 +1,105 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.common.sampler.async.AsyncSampler; +import me.lucko.spark.common.sampler.java.JavaSampler; +import me.lucko.spark.common.sampler.node.MergeMode; +import me.lucko.spark.common.sampler.source.ClassSourceLookup; +import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkSamplerProtos; +import me.lucko.spark.test.TestClass2; +import me.lucko.spark.test.plugin.TestCommandSender; +import me.lucko.spark.test.plugin.TestSparkPlugin; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; + +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +public class SamplerTest { + + @ParameterizedTest + @EnumSource + public void testSampler(ProfilerType profilerType, @TempDir Path directory) { + if (profilerType == ProfilerType.ASYNC) { + String os = System.getProperty("os.name").toLowerCase(Locale.ROOT).replace(" ", ""); + assumeTrue(os.equals("linux") || os.equals("macosx"), "async profiler is only supported on Linux and macOS"); + } + + Thread thread = new Thread(new TestClass2(), "Test Thread"); + thread.start(); + + try (TestSparkPlugin plugin = new TestSparkPlugin(directory)) { + Sampler sampler = new SamplerBuilder() + .threadDumper(new ThreadDumper.Specific(thread)) + .threadGrouper(ThreadGrouper.BY_POOL) + .samplingInterval(10) + .forceJavaSampler(profilerType == ProfilerType.JAVA) + .completeAfter(2, TimeUnit.SECONDS) + .start(plugin.platform()); + + assertInstanceOf(profilerType.expectedClass, sampler); + + assertNotEquals(-1, sampler.getAutoEndTime()); + sampler.getFuture().join(); + + Sampler.ExportProps exportProps = new Sampler.ExportProps() + .creator(TestCommandSender.INSTANCE.toData()) + .mergeMode(() -> MergeMode.sameMethod(new MethodDisambiguator(plugin.platform().createClassFinder()))) + .classSourceLookup(() -> ClassSourceLookup.create(plugin.platform())); + + SparkSamplerProtos.SamplerData proto = sampler.toProto(plugin.platform(), exportProps); + assertNotNull(proto); + + List<SparkSamplerProtos.ThreadNode> threads = proto.getThreadsList(); + assertEquals(1, threads.size()); + + SparkSamplerProtos.ThreadNode protoThread = threads.get(0); + assertEquals("Test Thread", protoThread.getName()); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("test"))); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("testA"))); + assertTrue(protoThread.getChildrenList().stream().anyMatch(n -> n.getClassName().equals("me.lucko.spark.test.TestClass2") && n.getMethodName().equals("testB"))); + } + } + + public enum ProfilerType { + JAVA(JavaSampler.class), + ASYNC(AsyncSampler.class); + + private final Class<? extends Sampler> expectedClass; + + ProfilerType(Class<? extends Sampler> expectedClass) { + this.expectedClass = expectedClass; + } + } + +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java b/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java new file mode 100644 index 0000000..17ee88e --- /dev/null +++ b/spark-common/src/test/java/me/lucko/spark/test/TestClass2.java @@ -0,0 +1,52 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.test; + +public class TestClass2 implements Runnable { + + @Override + public void run() { + while (true) { + try { + test(); + } catch (InterruptedException e) { + return; + } + } + } + + public void test() throws InterruptedException { + for (int i = 0; i < 10; i++) { + testA(); + Thread.sleep(100); + testB(); + } + } + + public void testA() throws InterruptedException { + Thread.sleep(100); + } + + public void testB() throws InterruptedException { + Thread.sleep(100); + + } +} diff --git a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java index 423e3e3..866a2b6 100644 --- a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java +++ b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java @@ -20,26 +20,55 @@ package me.lucko.spark.test.plugin; +import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.platform.PlatformInfo; +import me.lucko.spark.common.util.classfinder.ClassFinder; +import me.lucko.spark.common.util.classfinder.FallbackClassFinder; import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Stream; -public class TestSparkPlugin implements SparkPlugin { +public class TestSparkPlugin implements SparkPlugin, AutoCloseable { private static final Logger LOGGER = Logger.getLogger("spark-test"); private static final ScheduledExecutorService EXECUTOR_SERVICE = Executors.newScheduledThreadPool(16); private final Path directory; + private final Map<String, String> props; - public TestSparkPlugin(Path directory) { + private final SparkPlatform platform; + + public TestSparkPlugin(Path directory, Map<String, String> config) { this.directory = directory; + this.props = new HashMap<>(config); + this.props.putIfAbsent("backgroundProfiler", "false"); + + this.props.forEach((k, v) -> System.setProperty("spark." + k, v)); + this.platform = new SparkPlatform(this); + this.platform.enable(); + } + + public TestSparkPlugin(Path directory) { + this(directory, Collections.emptyMap()); + } + + public SparkPlatform platform() { + return this.platform; + } + + @Override + public void close() { + this.platform.disable(); + this.props.keySet().forEach((k) -> System.clearProperty("spark." + k)); } @Override @@ -101,4 +130,9 @@ public class TestSparkPlugin implements SparkPlugin { } }; } + + @Override + public ClassFinder createClassFinder() { + return FallbackClassFinder.INSTANCE; + } } |