|
16 | 16 | package com.github.benmanes.caffeine.cache; |
17 | 17 |
|
18 | 18 | import static com.google.common.base.Preconditions.checkState; |
| 19 | +import static java.util.concurrent.CompletableFuture.completedFuture; |
19 | 20 | import static java.util.function.Function.identity; |
20 | 21 | import static org.slf4j.event.Level.WARN; |
21 | 22 |
|
| 23 | +import java.time.Duration; |
22 | 24 | import java.util.List; |
23 | 25 | import java.util.Map; |
24 | | -import java.util.concurrent.CompletableFuture; |
25 | 26 | import java.util.concurrent.ThreadLocalRandom; |
26 | | -import java.util.function.BiConsumer; |
| 27 | +import java.util.function.Consumer; |
27 | 28 |
|
28 | 29 | import org.testng.annotations.Listeners; |
29 | 30 | import org.testng.annotations.Test; |
30 | 31 |
|
31 | 32 | import com.github.benmanes.caffeine.cache.testing.CacheContext; |
32 | 33 | import com.github.benmanes.caffeine.cache.testing.CacheProvider; |
33 | 34 | import com.github.benmanes.caffeine.cache.testing.CacheSpec; |
| 35 | +import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheExpiry; |
34 | 36 | import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheWeigher; |
35 | 37 | import com.github.benmanes.caffeine.cache.testing.CacheSpec.Expire; |
| 38 | +import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation; |
36 | 39 | import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener; |
37 | 40 | import com.github.benmanes.caffeine.cache.testing.CacheSpec.Maximum; |
38 | 41 | import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population; |
|
56 | 59 | public final class MultiThreadedTest { |
57 | 60 |
|
58 | 61 | @Test(dataProvider = "caches") |
59 | | - @CacheSpec(maximumSize = Maximum.DISABLED, stats = Stats.DISABLED, |
60 | | - population = Population.EMPTY, expireAfterAccess = Expire.DISABLED, |
61 | | - expireAfterWrite = Expire.DISABLED, removalListener = Listener.DISABLED, |
62 | | - refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, |
63 | | - keys = ReferenceType.STRONG, values = ReferenceType.STRONG, |
64 | | - evictionListener = Listener.DISABLED) |
65 | | - public void concurrent_unbounded(LoadingCache<Int, Int> cache, CacheContext context) { |
66 | | - Threads.runTest(cache, operations()); |
| 62 | + @CacheSpec(implementation = Implementation.Caffeine, |
| 63 | + maximumSize = {Maximum.DISABLED, Maximum.FULL}, |
| 64 | + weigher = {CacheWeigher.DISABLED, CacheWeigher.RANDOM}, |
| 65 | + stats = Stats.DISABLED, population = Population.EMPTY, |
| 66 | + expireAfterAccess = {Expire.DISABLED, Expire.ONE_MILLISECOND}, |
| 67 | + expireAfterWrite = {Expire.DISABLED, Expire.ONE_MILLISECOND}, |
| 68 | + refreshAfterWrite = {Expire.DISABLED, Expire.ONE_MILLISECOND}, |
| 69 | + expiry = {CacheExpiry.DISABLED, CacheExpiry.CREATE, CacheExpiry.WRITE, CacheExpiry.ACCESS}, |
| 70 | + expiryTime = Expire.ONE_MILLISECOND, keys = ReferenceType.STRONG, |
| 71 | + removalListener = Listener.DISABLED, evictionListener = Listener.DISABLED) |
| 72 | + public void concurrent(Cache<Int, Int> cache, CacheContext context) { |
| 73 | + Threads.runTest(operations(cache, context)); |
67 | 74 | } |
68 | 75 |
|
69 | | - @Test(dataProvider = "caches") |
70 | | - @CacheSpec(maximumSize = Maximum.FULL, weigher = {CacheWeigher.DISABLED, CacheWeigher.RANDOM}, |
71 | | - stats = Stats.DISABLED, population = Population.EMPTY, removalListener = Listener.DISABLED, |
72 | | - refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, |
73 | | - keys = ReferenceType.STRONG, values = ReferenceType.STRONG, |
74 | | - evictionListener = Listener.DISABLED) |
75 | | - public void concurrent_bounded(LoadingCache<Int, Int> cache, CacheContext context) { |
76 | | - Threads.runTest(cache, operations()); |
77 | | - } |
78 | | - |
79 | | - @Test(dataProvider = "caches") |
80 | | - @CacheSpec(maximumSize = Maximum.DISABLED, stats = Stats.DISABLED, |
81 | | - population = Population.EMPTY, expireAfterAccess = Expire.DISABLED, |
82 | | - expireAfterWrite = Expire.DISABLED, removalListener = Listener.DISABLED, |
83 | | - refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, |
84 | | - keys = ReferenceType.STRONG, values = ReferenceType.STRONG, |
85 | | - evictionListener = Listener.DISABLED) |
86 | | - public void async_concurrent_unbounded( |
87 | | - AsyncLoadingCache<Int, Int> cache, CacheContext context) { |
88 | | - Threads.runTest(cache, asyncOperations()); |
89 | | - } |
90 | | - |
91 | | - @Test(dataProvider = "caches") |
92 | | - @CacheSpec(maximumSize = Maximum.FULL, weigher = {CacheWeigher.DISABLED, CacheWeigher.RANDOM}, |
93 | | - stats = Stats.DISABLED, population = Population.EMPTY, removalListener = Listener.DISABLED, |
94 | | - refreshAfterWrite = { Expire.DISABLED, Expire.ONE_MILLISECOND }, |
95 | | - keys = ReferenceType.STRONG, values = ReferenceType.STRONG, |
96 | | - evictionListener = Listener.DISABLED) |
97 | | - public void async_concurrent_bounded( |
98 | | - AsyncLoadingCache<Int, Int> cache, CacheContext context) { |
99 | | - Threads.runTest(cache, asyncOperations()); |
100 | | - } |
101 | | - |
102 | | - @SuppressWarnings({"CollectionToArray", "FutureReturnValueIgnored", "MethodReferenceUsage", |
103 | | - "PMD.OptimizableToArrayCall", "rawtypes", "ReturnValueIgnored", "SelfEquals", |
104 | | - "SizeGreaterThanOrEqualsZero"}) |
105 | | - private static ImmutableList<BiConsumer<LoadingCache<Int, Int>, Int>> operations() { |
106 | | - return ImmutableList.of( |
107 | | - // LoadingCache |
108 | | - (cache, key) -> { cache.get(key); }, |
109 | | - (cache, key) -> { cache.getAll(List.of(key)); }, |
110 | | - (cache, key) -> { cache.refresh(key); }, |
111 | | - |
112 | | - // Cache |
113 | | - (cache, key) -> { cache.getIfPresent(key); }, |
114 | | - (cache, key) -> { cache.get(key, identity()); }, |
115 | | - (cache, key) -> { cache.getAllPresent(List.of(key)); }, |
116 | | - (cache, key) -> { cache.put(key, key); }, |
117 | | - (cache, key) -> { cache.putAll(Map.of(key, key)); }, |
118 | | - (cache, key) -> { cache.invalidate(key); }, |
119 | | - (cache, key) -> { cache.invalidateAll(List.of(key)); }, |
120 | | - (cache, key) -> { |
| 76 | + @SuppressWarnings({"CollectionToArray", "CollectionUndefinedEquality", "FutureReturnValueIgnored", |
| 77 | + "MethodReferenceUsage", "PMD.OptimizableToArrayCall", "rawtypes", "ReturnValueIgnored", |
| 78 | + "SelfEquals", "SizeGreaterThanOrEqualsZero", "unchecked"}) |
| 79 | + private static ImmutableList<Consumer<Int>> operations( |
| 80 | + Cache<Int, Int> cache, CacheContext context) { |
| 81 | + var builder = new ImmutableList.Builder<Consumer<Int>>(); |
| 82 | + builder.add( |
| 83 | + key -> { cache.getIfPresent(key); }, |
| 84 | + key -> { cache.get(key, identity()); }, |
| 85 | + key -> { cache.getAllPresent(List.of(key)); }, |
| 86 | + key -> { cache.put(key, key); }, |
| 87 | + key -> { cache.putAll(Map.of(key, key)); }, |
| 88 | + key -> { cache.invalidate(key); }, |
| 89 | + key -> { cache.invalidateAll(List.of(key)); }, |
| 90 | + key -> { |
121 | 91 | int random = ThreadLocalRandom.current().nextInt(); |
122 | 92 | // expensive so do it less frequently |
123 | 93 | if ((random & 255) == 0) { |
124 | 94 | cache.invalidateAll(); |
125 | 95 | } |
126 | 96 | }, |
127 | | - (cache, key) -> { checkState(cache.estimatedSize() >= 0); }, |
128 | | - (cache, key) -> { cache.stats(); }, |
129 | | - (cache, key) -> { cache.cleanUp(); }, |
130 | | - |
131 | | - // Map |
132 | | - (cache, key) -> { cache.asMap().containsKey(key); }, |
133 | | - (cache, key) -> { cache.asMap().containsValue(key); }, |
134 | | - (cache, key) -> { cache.asMap().isEmpty(); }, |
135 | | - (cache, key) -> { checkState(cache.asMap().size() >= 0); }, |
136 | | - (cache, key) -> { cache.asMap().get(key); }, |
137 | | - (cache, key) -> { cache.asMap().put(key, key); }, |
138 | | - (cache, key) -> { cache.asMap().putAll(Map.of(key, key)); }, |
139 | | - (cache, key) -> { cache.asMap().putIfAbsent(key, key); }, |
140 | | - (cache, key) -> { cache.asMap().remove(key); }, |
141 | | - (cache, key) -> { cache.asMap().remove(key, key); }, |
142 | | - (cache, key) -> { cache.asMap().replace(key, key); }, |
143 | | - (cache, key) -> { cache.asMap().computeIfAbsent(key, k -> k); }, |
144 | | - (cache, key) -> { cache.asMap().computeIfPresent(key, (k, v) -> v); }, |
145 | | - (cache, key) -> { cache.asMap().compute(key, (k, v) -> v); }, |
146 | | - (cache, key) -> { cache.asMap().merge(key, key, (k, v) -> v); }, |
147 | | - (cache, key) -> { // expensive so do it less frequently |
| 97 | + key -> { checkState(cache.estimatedSize() >= 0); }, |
| 98 | + key -> { cache.stats(); }, |
| 99 | + key -> { cache.cleanUp(); }, |
| 100 | + key -> { cache.hashCode(); }, |
| 101 | + key -> { cache.equals(cache); }, |
| 102 | + key -> { cache.toString(); }, |
| 103 | + key -> { |
148 | 104 | int random = ThreadLocalRandom.current().nextInt(); |
| 105 | + // expensive so do it less frequently |
149 | 106 | if ((random & 255) == 0) { |
150 | | - cache.asMap().clear(); |
| 107 | + SerializableTester.reserialize(cache); |
151 | 108 | } |
152 | 109 | }, |
153 | | - (cache, key) -> { cache.asMap().keySet().toArray(new Object[cache.asMap().size()]); }, |
154 | | - (cache, key) -> { cache.asMap().values().toArray(new Object[cache.asMap().size()]); }, |
155 | | - (cache, key) -> { cache.asMap().entrySet().toArray(new Map.Entry[cache.asMap().size()]); }, |
156 | | - (cache, key) -> { cache.hashCode(); }, |
157 | | - (cache, key) -> { cache.equals(cache); }, |
158 | | - (cache, key) -> { cache.toString(); }, |
159 | | - (cache, key) -> { |
| 110 | + |
| 111 | + // Map |
| 112 | + key -> { cache.asMap().containsKey(key); }, |
| 113 | + key -> { cache.asMap().containsValue(key); }, |
| 114 | + key -> { cache.asMap().isEmpty(); }, |
| 115 | + key -> { checkState(cache.asMap().size() >= 0); }, |
| 116 | + key -> { cache.asMap().get(key); }, |
| 117 | + key -> { cache.asMap().put(key, key); }, |
| 118 | + key -> { cache.asMap().putAll(Map.of(key, key)); }, |
| 119 | + key -> { cache.asMap().putIfAbsent(key, key); }, |
| 120 | + key -> { cache.asMap().remove(key); }, |
| 121 | + key -> { cache.asMap().remove(key, key); }, |
| 122 | + key -> { cache.asMap().replace(key, key); }, |
| 123 | + key -> { cache.asMap().computeIfAbsent(key, k -> k); }, |
| 124 | + key -> { cache.asMap().computeIfPresent(key, (k, v) -> v); }, |
| 125 | + key -> { cache.asMap().compute(key, (k, v) -> v); }, |
| 126 | + key -> { cache.asMap().merge(key, key, (k, v) -> v); }, |
| 127 | + key -> { // expensive so do it less frequently |
160 | 128 | int random = ThreadLocalRandom.current().nextInt(); |
161 | | - // expensive so do it less frequently |
162 | 129 | if ((random & 255) == 0) { |
163 | | - SerializableTester.reserialize(cache); |
| 130 | + cache.asMap().clear(); |
164 | 131 | } |
165 | | - }); |
166 | | - } |
| 132 | + }, |
| 133 | + key -> { cache.asMap().keySet().toArray(new Object[cache.asMap().size()]); }, |
| 134 | + key -> { cache.asMap().values().toArray(new Object[cache.asMap().size()]); }, |
| 135 | + key -> { cache.asMap().entrySet().toArray(new Map.Entry[cache.asMap().size()]); }, |
| 136 | + key -> { cache.asMap().hashCode(); }, |
| 137 | + key -> { cache.asMap().equals(cache.asMap()); }, |
| 138 | + key -> { cache.asMap().toString(); }); |
| 139 | + |
| 140 | + if (cache instanceof LoadingCache<?, ?>) { |
| 141 | + var loadingCache = (LoadingCache<Int, Int>) cache; |
| 142 | + builder.add( |
| 143 | + key -> { loadingCache.get(key); }, |
| 144 | + key -> { loadingCache.getAll(List.of(key)); }, |
| 145 | + key -> { loadingCache.refresh(key); }); |
| 146 | + } |
| 147 | + |
| 148 | + if (context.isAsync()) { |
| 149 | + var asyncCache = (AsyncCache<Int, Int>) context.asyncCache(); |
| 150 | + builder.add( |
| 151 | + key -> { asyncCache.getIfPresent(key); }, |
| 152 | + key -> { asyncCache.get(key, k -> key); }, |
| 153 | + key -> { asyncCache.get(key, (k, e) -> completedFuture(key)); }, |
| 154 | + key -> { asyncCache.put(key, completedFuture(key)); }, |
| 155 | + |
| 156 | + // Map |
| 157 | + key -> { asyncCache.asMap().containsKey(key); }, |
| 158 | + key -> { asyncCache.asMap().containsValue(key.toFuture()); }, |
| 159 | + key -> { asyncCache.asMap().isEmpty(); }, |
| 160 | + key -> { checkState(asyncCache.asMap().size() >= 0); }, |
| 161 | + key -> { asyncCache.asMap().get(key); }, |
| 162 | + key -> { asyncCache.asMap().put(key, completedFuture(null)); }, |
| 163 | + key -> { asyncCache.asMap().putAll(Map.of(key, completedFuture(null))); }, |
| 164 | + key -> { asyncCache.asMap().putIfAbsent(key, completedFuture(null)); }, |
| 165 | + key -> { asyncCache.asMap().remove(key); }, |
| 166 | + key -> { asyncCache.asMap().remove(key, key); }, |
| 167 | + key -> { asyncCache.asMap().replace(key, completedFuture(null)); }, |
| 168 | + key -> { asyncCache.asMap().computeIfAbsent(key, k -> completedFuture(null)); }, |
| 169 | + key -> { asyncCache.asMap().computeIfPresent(key, (k, v) -> v); }, |
| 170 | + key -> { asyncCache.asMap().compute(key, (k, v) -> v); }, |
| 171 | + key -> { asyncCache.asMap().merge(key, key.toFuture(), (k, v) -> v); }, |
| 172 | + key -> { // expensive so do it less frequently |
| 173 | + int random = ThreadLocalRandom.current().nextInt(); |
| 174 | + if ((random & 255) == 0) { |
| 175 | + asyncCache.asMap().clear(); |
| 176 | + } |
| 177 | + }, |
| 178 | + key -> { asyncCache.asMap().keySet().toArray(new Object[cache.asMap().size()]); }, |
| 179 | + key -> { asyncCache.asMap().values().toArray(new Object[cache.asMap().size()]); }, |
| 180 | + key -> { asyncCache.asMap().entrySet().toArray(new Map.Entry[cache.asMap().size()]); }, |
| 181 | + key -> { asyncCache.asMap().hashCode(); }, |
| 182 | + key -> { asyncCache.asMap().equals(asyncCache.asMap()); }, |
| 183 | + key -> { asyncCache.asMap().toString(); }); |
| 184 | + |
| 185 | + if (asyncCache instanceof AsyncLoadingCache<?, ?>) { |
| 186 | + var asyncLoadingCache = (AsyncLoadingCache<Int, Int>) asyncCache; |
| 187 | + builder.add( |
| 188 | + key -> { asyncLoadingCache.get(key); }, |
| 189 | + key -> { asyncLoadingCache.getAll(List.of(key)); }); |
| 190 | + } |
| 191 | + } |
| 192 | + |
| 193 | + cache.policy().expireVariably().ifPresent(policy -> { |
| 194 | + var duration = Duration.ofDays(1); |
| 195 | + builder.add( |
| 196 | + key -> { policy.put(key, key, duration); }, |
| 197 | + key -> { policy.putIfAbsent(key, key, duration); }, |
| 198 | + key -> { policy.compute(key, (k, v) -> v, duration); }); |
| 199 | + }); |
167 | 200 |
|
168 | | - @SuppressWarnings({"CheckReturnValue", "FutureReturnValueIgnored", "MethodReferenceUsage"}) |
169 | | - private static ImmutableList<BiConsumer<AsyncLoadingCache<Int, Int>, Int>> asyncOperations() { |
170 | | - return ImmutableList.of( |
171 | | - (cache, key) -> { cache.getIfPresent(key); }, |
172 | | - (cache, key) -> { cache.get(key, k -> key); }, |
173 | | - (cache, key) -> { cache.get(key, (k, e) -> CompletableFuture.completedFuture(key)); }, |
174 | | - (cache, key) -> { cache.get(key); }, |
175 | | - (cache, key) -> { cache.getAll(List.of(key)); }, |
176 | | - (cache, key) -> { cache.put(key, CompletableFuture.completedFuture(key)); }); |
| 201 | + return builder.build(); |
177 | 202 | } |
178 | 203 | } |
0 commit comments