diff options
| author | embeddedt <42941056+embeddedt@users.noreply.github.com> | 2022-07-11 12:17:35 -0400 |
|---|---|---|
| committer | embeddedt <42941056+embeddedt@users.noreply.github.com> | 2022-07-11 12:17:35 -0400 |
| commit | 9e477ace0acb3ba3f8d48841922b9b1eb2d2bf1e (patch) | |
| tree | 799200e997f98da276792f16b6f12e3c6f1483b5 /spark-common | |
| parent | ecc3714e6441ace0eb78156b2b4475ca050280db (diff) | |
| parent | a10f966a443d56845a5efb1e65232e6b87eabb96 (diff) | |
| download | spark-9e477ace0acb3ba3f8d48841922b9b1eb2d2bf1e.tar.gz spark-9e477ace0acb3ba3f8d48841922b9b1eb2d2bf1e.tar.bz2 spark-9e477ace0acb3ba3f8d48841922b9b1eb2d2bf1e.zip | |
Merge remote-tracking branch 'lucko/master' into forge-1.7.10
Diffstat (limited to 'spark-common')
24 files changed, 994 insertions, 119 deletions
diff --git a/spark-common/build.gradle b/spark-common/build.gradle index 554eec2..bc493f3 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -15,15 +15,18 @@ dependencies { implementation 'com.squareup.okio:okio:1.17.3' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' implementation 'org.tukaani:xz:1.8' - api('net.kyori:adventure-api:4.9.3') { + api('net.kyori:adventure-api:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'checker-qual') exclude(module: 'annotations') } - api('net.kyori:adventure-text-serializer-gson:4.9.3') { + api('net.kyori:adventure-text-serializer-gson:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'adventure-api') exclude(module: 'gson') } - api('net.kyori:adventure-text-serializer-legacy:4.9.3') { + api('net.kyori:adventure-text-serializer-legacy:4.11.0') { + exclude(module: 'adventure-bom') exclude(module: 'adventure-api') } implementation('net.kyori:adventure-text-feature-pagination:4.0.0-SNAPSHOT') { diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index b817df1..1116b04 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -25,6 +25,7 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; +import me.lucko.spark.common.platform.world.WorldInfoProvider; import me.lucko.spark.common.sampler.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; @@ -75,6 +76,15 @@ public interface SparkPlugin { void executeAsync(Runnable task); /** + * Executes the given {@link Runnable} on the server/client main thread. + * + * @param task the task + */ + default void executeSync(Runnable task) { + throw new UnsupportedOperationException(); + } + + /** * Print to the plugin logger. * * @param level the log level @@ -143,6 +153,15 @@ public interface SparkPlugin { } /** + * Creates a world info provider. + * + * @return the world info provider function + */ + default WorldInfoProvider createWorldInfoProvider() { + return WorldInfoProvider.NO_OP; + } + + /** * Gets information for the platform. * * @return information about the platform diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java index b777f3e..6252ac7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/ActivityLogModule.java @@ -50,6 +50,7 @@ import static net.kyori.adventure.text.format.TextDecoration.BOLD; public class ActivityLogModule implements CommandModule, RowRenderer<Activity> { private final Pagination.Builder pagination = Pagination.builder() + .width(45) .renderer(new Renderer() { @Override public Component renderEmpty() { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 970d062..fd5cd67 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -266,7 +266,7 @@ public class SamplerModule implements CommandModule { if (this.activeSampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - long timeout = this.activeSampler.getEndTime(); + long timeout = this.activeSampler.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); } else { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index f35bbbe..49cfed5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -30,8 +30,11 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; +import me.lucko.spark.common.platform.world.WorldInfoProvider; +import me.lucko.spark.common.platform.world.WorldStatisticsProvider; import me.lucko.spark.proto.SparkProtos.PlatformStatistics; import me.lucko.spark.proto.SparkProtos.SystemStatistics; +import me.lucko.spark.proto.SparkProtos.WorldStatistics; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; @@ -182,6 +185,18 @@ public class PlatformStatisticsProvider { builder.setPlayerCount(playerCount); } + try { + WorldInfoProvider worldInfo = this.platform.getPlugin().createWorldInfoProvider(); + WorldStatisticsProvider worldStatisticsProvider = new WorldStatisticsProvider(this.platform, worldInfo); + WorldStatistics worldStatistics = worldStatisticsProvider.getWorldStatistics(); + if (worldStatistics != null) { + builder.setWorld(worldStatistics); + } + } catch (Exception e) { + e.printStackTrace(); + } + + return builder.build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java index ead2131..559ae95 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/AbstractServerConfigProvider.java @@ -22,45 +22,39 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.Deque; -import java.util.LinkedList; +import java.util.List; import java.util.Map; -import java.util.stream.Collectors; /** * Abstract implementation of {@link ServerConfigProvider}. * * <p>This implementation is able to delete hidden paths from * the configurations before they are sent to the viewer.</p> - * - * @param <T> the file type */ -public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements ServerConfigProvider { - private final Map<String, T> files; - private final Collection<String> hiddenPaths; +public abstract class AbstractServerConfigProvider implements ServerConfigProvider { + private final Map<String, ConfigParser> files; + private final ExcludedConfigFilter hiddenPathFilters; - protected AbstractServerConfigProvider(Map<String, T> files, Collection<String> hiddenPaths) { + protected AbstractServerConfigProvider(Map<String, ConfigParser> files, Collection<String> hiddenPaths) { this.files = files; - this.hiddenPaths = hiddenPaths; + this.hiddenPathFilters = new ExcludedConfigFilter(hiddenPaths); } @Override public final Map<String, JsonElement> loadServerConfigurations() { ImmutableMap.Builder<String, JsonElement> builder = ImmutableMap.builder(); - this.files.forEach((path, type) -> { + this.files.forEach((path, parser) -> { try { - JsonElement json = load(path, type); - if (json != null) { - delete(json, this.hiddenPaths); - builder.put(path, json); + JsonElement json = parser.load(path, this.hiddenPathFilters); + if (json == null) { + return; } + builder.put(path, json); } catch (Exception e) { e.printStackTrace(); } @@ -69,68 +63,11 @@ public abstract class AbstractServerConfigProvider<T extends Enum<T>> implements return builder.build(); } - /** - * Loads a file from the system. - * - * @param path the name of the file to load - * @param type the type of the file - * @return the loaded file - * @throws IOException if an error occurs performing i/o - */ - protected abstract JsonElement load(String path, T type) throws IOException; - - /** - * Deletes the given paths from the json element. - * - * @param json the json element - * @param paths the paths to delete - */ - private static void delete(JsonElement json, Collection<String> paths) { - for (String path : paths) { - Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); - delete(json, pathDeque); - } - } - - private static void delete(JsonElement json, Deque<String> path) { - if (path.isEmpty()) { - return; - } - if (!json.isJsonObject()) { - return; - } - - JsonObject jsonObject = json.getAsJsonObject(); - String expected = path.removeFirst().replace("<dot>", "."); - - Collection<String> keys; - if (expected.equals("*")) { - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .collect(Collectors.toList()); - } else if (expected.endsWith("*")) { - String pattern = expected.substring(0, expected.length() - 1); - keys = jsonObject.entrySet().stream() - .map(Map.Entry::getKey) - .filter(key -> key.startsWith(pattern)) - .collect(Collectors.toList()); - } else if (jsonObject.has(expected)) { - keys = Collections.singletonList(expected); - } else { - keys = Collections.emptyList(); - } - - for (String key : keys) { - if (path.isEmpty()) { - jsonObject.remove(key); - } else { - Deque<String> pathCopy = keys.size() > 1 - ? new LinkedList<>(path) - : path; - - delete(jsonObject.get(key), pathCopy); - } - } + protected static List<String> getSystemPropertyList(String property) { + String value = System.getProperty(property); + return value == null + ? Collections.emptyList() + : Arrays.asList(value.split(",")); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java new file mode 100644 index 0000000..675a32e --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ConfigParser.java @@ -0,0 +1,48 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.gson.JsonElement; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; + +public interface ConfigParser { + + JsonElement load(String file, ExcludedConfigFilter filter) throws IOException; + + default Map<String, Object> parse(Path file) throws IOException { + if (!Files.exists(file)) { + return null; + } + + try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { + return this.parse(reader); + } + } + + Map<String, Object> parse(BufferedReader reader) throws IOException; + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java new file mode 100644 index 0000000..c11c7f8 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ExcludedConfigFilter.java @@ -0,0 +1,97 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform.serverconfig; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Deque; +import java.util.LinkedList; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Filtered excluded paths from {@link JsonElement}s (parsed configuration files). + */ +public class ExcludedConfigFilter { + private final Collection<String> pathsToExclude; + + public ExcludedConfigFilter(Collection<String> pathsToExclude) { + this.pathsToExclude = pathsToExclude; + } + + /** + * Deletes the excluded paths from the json element. + * + * @param json the json element + */ + public JsonElement apply(JsonElement json) { + for (String path : this.pathsToExclude) { + Deque<String> pathDeque = new LinkedList<>(Arrays.asList(path.split("\\."))); + delete(json, pathDeque); + } + return json; + } + + private static void delete(JsonElement json, Deque<String> path) { + if (path.isEmpty()) { + return; + } + if (!json.isJsonObject()) { + return; + } + + JsonObject jsonObject = json.getAsJsonObject(); + String expected = path.removeFirst().replace("<dot>", "."); + + Collection<String> keys; + if (expected.equals("*")) { + keys = jsonObject.entrySet().stream() + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + } else if (expected.endsWith("*")) { + String pattern = expected.substring(0, expected.length() - 1); + keys = jsonObject.entrySet().stream() + .map(Map.Entry::getKey) + .filter(key -> key.startsWith(pattern)) + .collect(Collectors.toList()); + } else if (jsonObject.has(expected)) { + keys = Collections.singletonList(expected); + } else { + keys = Collections.emptyList(); + } + + for (String key : keys) { + if (path.isEmpty()) { + jsonObject.remove(key); + } else { + Deque<String> pathCopy = keys.size() > 1 + ? new LinkedList<>(path) + : path; + + delete(jsonObject.get(key), pathCopy); + } + } + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java index 8fc89d7..344ba1c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesFileReader.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/PropertiesConfigParser.java @@ -20,25 +20,38 @@ package me.lucko.spark.common.platform.serverconfig; -import java.io.FilterReader; +import com.google.gson.Gson; +import com.google.gson.JsonElement; + +import java.io.BufferedReader; import java.io.IOException; -import java.io.Reader; +import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** - * A {@link Reader} that can parse a .properties file. + * A {@link ConfigParser} that can parse a .properties file. */ -public class PropertiesFileReader extends FilterReader { +public enum PropertiesConfigParser implements ConfigParser { + INSTANCE; + + private static final Gson GSON = new Gson(); + + @Override + public JsonElement load(String file, ExcludedConfigFilter filter) throws IOException { + Map<String, Object> values = this.parse(Paths.get(file)); + if (values == null) { + return null; + } - public PropertiesFileReader(Reader in) { - super(in); + return filter.apply(GSON.toJsonTree(values)); } - public Map<String, Object> readProperties() throws IOException { + @Override + public Map<String, Object> parse(BufferedReader reader) throws IOException { Properties properties = new Properties(); - properties.load(this); + properties.load(reader); Map<String, Object> values = new HashMap<>(); properties.forEach((k, v) -> { diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java index 1fc2391..c66305f 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/serverconfig/ServerConfigProvider.java @@ -23,8 +23,8 @@ package me.lucko.spark.common.platform.serverconfig; import com.google.gson.JsonElement; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Map; -import java.util.stream.Collectors; /** * Function to export server configuration files for access within the spark viewer. @@ -43,12 +43,9 @@ public interface ServerConfigProvider { Map<String, JsonElement> loadServerConfigurations(); default Map<String, String> exportServerConfigurations() { - return loadServerConfigurations().entrySet() - .stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> e.getValue().toString() - )); + Map<String, String> map = new LinkedHashMap<>(); + loadServerConfigurations().forEach((key, value) -> map.put(key, value.toString())); + return map; } /** diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java new file mode 100644 index 0000000..80026cd --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/AbstractChunkInfo.java @@ -0,0 +1,55 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform.world; + +public abstract class AbstractChunkInfo<E> implements ChunkInfo<E> { + private final int x; + private final int z; + + protected AbstractChunkInfo(int x, int z) { + this.x = x; + this.z = z; + } + + @Override + public int getX() { + return this.x; + } + + @Override + public int getZ() { + return this.z; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) return true; + if (!(obj instanceof AbstractChunkInfo)) return false; + AbstractChunkInfo<?> that = (AbstractChunkInfo<?>) obj; + return this.x == that.x && this.z == that.z; + } + + @Override + public int hashCode() { + return this.x ^ this.z; + } + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java new file mode 100644 index 0000000..2193a50 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/ChunkInfo.java @@ -0,0 +1,44 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform.world; + +/** + * Information about a given chunk. + * + * @param <E> the type used to describe entities + */ +public interface ChunkInfo<E> { + + int getX(); + + int getZ(); + + CountMap<E> getEntityCounts(); + + /** + * Converts entity type {@link E} to a string. + * + * @param type the entity type + * @return a string + */ + String entityTypeName(E type); + +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java new file mode 100644 index 0000000..3083266 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/world/CountMap.java @@ -0,0 +1,110 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) <luck@lucko.me> + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package me.lucko.spark.common.platform.world; + +import java.util.EnumMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * A map of (key) -> count. + * + * @param <T> the key type + */ +public interface CountMap<T> { + + /** + * Increment the counter for the given key + * + * @param key the key + */ + void increment(T key); + + /** + * Add to the counter for the given key + * + * @param key the key + */ + void add(T key, int delta); + + AtomicInteger total(); + + Map<T, AtomicInteger> asMap(); + + /** + * A simple {@link CountMap} backed by the provided {@link Map} + * + * @param <T> the key type + */ + class Simple<T> implements CountMap<T> { + private final Map<T, AtomicInteger> counts; + private final AtomicInteger total; + + public Simple(Map<T, AtomicInteger> counts) { + this.counts = counts; + this.total = new AtomicInteger(); + } + + @Override + public void increment(T key) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.incrementAndGet(); + this.total.incrementAndGet(); + } + + @Override + public void add(T key, int delta) { + AtomicInteger counter = this.counts.get(key); + if (counter == null) { + counter = new AtomicInteger(); + this.counts.put(key, counter); + } + counter.addAndGet(delta); + this.total.addAndGet(delta); + } + + @Override + public AtomicInteger total() { + return this.total; + } + + @Override + public Map<T, AtomicInteger> asMap() { + return this.counts; + } + } + + /** + * A {@link CountMap} backed by an {@link EnumMap}. + * |
