Skip to content

Commit

Permalink
Generalize parameterized tests
Browse files Browse the repository at this point in the history
Many parameterized tests included the configuration option to specify
the backing cache implementation (Caffeine or Guava). This was mostly
for cases testing Caffeine-specific features. This is now better
inferred by the generator by inspecting the argument types, allowing
the tests to be more generic by constraining only their behavior.
  • Loading branch information
ben-manes committed Sep 26, 2021
1 parent b85c8ec commit 65298df
Show file tree
Hide file tree
Showing 29 changed files with 1,050 additions and 1,094 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,8 @@ public interface LoadingCache<K extends Object, V extends Object> extends Cache<
* @throws NullPointerException if the specified collection is null or contains a null element
* @throws CompletionException if a checked exception was thrown while loading the value
* @throws RuntimeException or Error if the {@link CacheLoader} does so, if
* {@link CacheLoader#loadAll} returns {@code null}, returns a map containing null keys or
* values, or fails to return an entry for each requested key. In all cases, the mapping
* is left unestablished
* {@link CacheLoader#loadAll} returns {@code null}, or returns a map containing null keys
* or values. In all cases, the mapping is left unestablished.
*/
Map<K, V> getAll(Iterable<? extends K> keys);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType;
import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
import com.github.benmanes.caffeine.cache.testing.CheckNoStats;
import com.github.benmanes.caffeine.testing.ConcurrentTestHarness;
Expand Down Expand Up @@ -921,8 +920,8 @@ public void computeIfAbsent_nullValue(Map<Int, Int> map, CacheContext context) {
assertThat(map).hasSize(context.initialSize());
}

@CacheSpec
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine)
public void computeIfAbsent_recursive(Map<Int, Int> map, CacheContext context) {
var mappingFunction = new Function<Int, Int>() {
@Override public Int apply(Int key) {
Expand All @@ -935,8 +934,8 @@ public void computeIfAbsent_recursive(Map<Int, Int> map, CacheContext context) {
} catch (StackOverflowError | IllegalStateException e) { /* ignored */ }
}

@CacheSpec
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine)
public void computeIfAbsent_pingpong(Map<Int, Int> map, CacheContext context) {
var mappingFunction = new Function<Int, Int>() {
@Override public Int apply(Int key) {
Expand All @@ -953,8 +952,8 @@ public void computeIfAbsent_pingpong(Map<Int, Int> map, CacheContext context) {
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void computeIfAbsent_error(Map<Int, Int> map, CacheContext context) {
try {
map.computeIfAbsent(context.absentKey(), key -> { throw new Error(); });
} catch (Error expected) {}
map.computeIfAbsent(context.absentKey(), key -> { throw new ExpectedError(); });
} catch (ExpectedError expected) {}
assertThat(map).containsExactlyEntriesIn(context.original());
assertThat(context).stats().hits(0).misses(1).success(0).failures(1);
assertThat(map.computeIfAbsent(context.absentKey(), key -> key)).isEqualTo(context.absentKey());
Expand Down Expand Up @@ -1083,8 +1082,8 @@ public void computeIfPresent_pingpong(Map<Int, Int> map, CacheContext context) {
@CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
public void computeIfPresent_error(Map<Int, Int> map, CacheContext context) {
try {
map.computeIfPresent(context.firstKey(), (key, value) -> { throw new Error(); });
} catch (Error expected) {}
map.computeIfPresent(context.firstKey(), (key, value) -> { throw new ExpectedError(); });
} catch (ExpectedError expected) {}
assertThat(map).isEqualTo(context.original());
assertThat(context).stats().hits(0).misses(0).success(0).failures(1);
assertThat(map.computeIfPresent(context.firstKey(), (k, v) -> k.negate()))
Expand All @@ -1101,9 +1100,7 @@ public void computeIfPresent_absent(Map<Int, Int> map, CacheContext context) {
}

@Test(dataProvider = "caches")
@CacheSpec(
implementation = Implementation.Caffeine,
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, values = ReferenceType.WEAK)
@CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
public void computeIfPresent_present_sameValue(Map<Int, Int> map, CacheContext context) {
var expectedMap = new HashMap<Int, Int>();
for (Int key : context.firstMiddleLastKeys()) {
Expand Down Expand Up @@ -1211,8 +1208,8 @@ public void compute_remove(Map<Int, Int> map, CacheContext context) {
assertThat(context).removalNotifications().withCause(EXPLICIT).hasSize(count).exclusively();
}

@CacheSpec
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine)
public void compute_recursive(Map<Int, Int> map, CacheContext context) {
var mappingFunction = new BiFunction<Int, Int, Int>() {
@Override public Int apply(Int key, Int value) {
Expand All @@ -1226,7 +1223,7 @@ public void compute_recursive(Map<Int, Int> map, CacheContext context) {
}

@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY, implementation = Implementation.Caffeine)
@CacheSpec(population = Population.EMPTY)
public void compute_pingpong(Map<Int, Int> map, CacheContext context) {
var key1 = Int.valueOf(1);
var key2 = Int.valueOf(2);
Expand Down Expand Up @@ -1432,8 +1429,8 @@ public void merge_pingpong(Map<Int, Int> map, CacheContext context) {
public void merge_error(Map<Int, Int> map, CacheContext context) {
try {
map.merge(context.firstKey(), context.original().get(context.firstKey()),
(oldValue, value) -> { throw new Error(); });
} catch (Error expected) {}
(oldValue, value) -> { throw new ExpectedError(); });
} catch (ExpectedError expected) {}
assertThat(map).containsExactlyEntriesIn(context.original());
assertThat(context).stats().hits(0).misses(0).success(0).failures(1);
}
Expand Down Expand Up @@ -2209,8 +2206,7 @@ public void entrySpliterator_estimateSize(Map<Int, Int> map, CacheContext contex

@CheckNoStats
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine,
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
@CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
public void writeThroughEntry(Map<Int, Int> map, CacheContext context) {
var entry = map.entrySet().iterator().next();

Expand All @@ -2222,8 +2218,7 @@ public void writeThroughEntry(Map<Int, Int> map, CacheContext context) {

@CheckNoStats
@Test(dataProvider = "caches", expectedExceptions = NullPointerException.class)
@CacheSpec(implementation = Implementation.Caffeine,
population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
@CacheSpec(population = { Population.SINGLETON, Population.PARTIAL, Population.FULL })
public void writeThroughEntry_null(Map<Int, Int> map, CacheContext context) {
map.entrySet().iterator().next().setValue(null);
}
Expand All @@ -2237,4 +2232,7 @@ public void writeThroughEntry_serialize(Map<Int, Int> map, CacheContext context)
var copy = SerializableTester.reserialize(entry);
assertThat(entry).isEqualTo(copy);
}

@SuppressWarnings("serial")
static final class ExpectedError extends Error {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
import com.github.benmanes.caffeine.cache.testing.CacheContext;
import com.github.benmanes.caffeine.cache.testing.CacheProvider;
import com.github.benmanes.caffeine.cache.testing.CacheSpec;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
Expand Down Expand Up @@ -760,8 +759,8 @@ public void computeIfAbsent_nullValue(AsyncCache<Int, Int> cache, CacheContext c
assertThat(cache).hasSize(context.initialSize());
}

@CacheSpec
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine)
public void computeIfAbsent_recursive(AsyncCache<Int, Int> cache, CacheContext context) {
var mappingFunction = new Function<Int, CompletableFuture<Int>>() {
@Override public CompletableFuture<Int> apply(Int key) {
Expand All @@ -774,8 +773,8 @@ public void computeIfAbsent_recursive(AsyncCache<Int, Int> cache, CacheContext c
} catch (StackOverflowError | IllegalStateException e) { /* ignored */ }
}

@CacheSpec
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine)
public void computeIfAbsent_pingpong(AsyncCache<Int, Int> cache, CacheContext context) {
var mappingFunction = new Function<Int, CompletableFuture<Int>>() {
@Override public CompletableFuture<Int> apply(Int key) {
Expand Down Expand Up @@ -1007,8 +1006,8 @@ public void compute_remove(AsyncCache<Int, Int> cache, CacheContext context) {
assertThat(context).removalNotifications().withCause(EXPLICIT).hasSize(count).exclusively();
}

@CacheSpec
@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine)
public void compute_recursive(AsyncCache<Int, Int> cache, CacheContext context) {
var mappingFunction = new BiFunction<Int, CompletableFuture<Int>, CompletableFuture<Int>>() {
@Override public CompletableFuture<Int> apply(Int key, CompletableFuture<Int> value) {
Expand All @@ -1021,8 +1020,8 @@ public void compute_recursive(AsyncCache<Int, Int> cache, CacheContext context)
} catch (StackOverflowError | IllegalStateException e) { /* ignored */ }
}

@CacheSpec(population = Population.EMPTY, implementation = Implementation.Caffeine)
@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY)
public void compute_pingpong(AsyncCache<Int, Int> cache, CacheContext context) {
var key1 = Int.valueOf(1);
var key2 = Int.valueOf(2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@
import com.github.benmanes.caffeine.cache.testing.CacheProvider;
import com.github.benmanes.caffeine.cache.testing.CacheSpec;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheExecutor;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.ExecutorFailure;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
Expand Down Expand Up @@ -481,8 +479,7 @@ public void getAllFunction_present_ordered_exceeds(
}

@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, compute = Compute.ASYNC,
removalListener = { Listener.DEFAULT, Listener.REJECTING })
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void getAllFunction_badLoader(AsyncCache<Int, Int> cache, CacheContext context) {
assertThat(cache.getAll(context.absentKeys(), keysToLoad -> { throw new LoadAllException(); }))
.failsWith(CompletionException.class).hasCauseThat().isInstanceOf(LoadAllException.class);
Expand Down Expand Up @@ -721,8 +718,7 @@ public void getAllBifunction_present_ordered_exceeds(
}

@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, compute = Compute.ASYNC,
removalListener = { Listener.DEFAULT, Listener.REJECTING })
@CacheSpec(removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void getAllBifunction_badLoader(AsyncCache<Int, Int> cache, CacheContext context) {
try {
cache.getAll(context.absentKeys(), (keysToLoad, executor) ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,9 @@
import com.github.benmanes.caffeine.cache.testing.CacheSpec.CacheExecutor;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Compute;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.ExecutorFailure;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Implementation;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType;
import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
import com.github.benmanes.caffeine.cache.testing.CheckNoStats;
import com.github.benmanes.caffeine.testing.Int;
Expand Down Expand Up @@ -274,8 +272,7 @@ public void getAllPresent_ordered_exceeds(
}

@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, compute = Compute.ASYNC,
removalListener = { Listener.DEFAULT, Listener.REJECTING })
@CacheSpec(compute = Compute.ASYNC, removalListener = { Listener.DEFAULT, Listener.REJECTING })
public void getAll_badLoader(CacheContext context) {
var loader = new AsyncCacheLoader<Int, Int>() {
@Override public CompletableFuture<Int> asyncLoad(Int key, Executor executor) {
Expand Down Expand Up @@ -318,8 +315,8 @@ public void put_replace(AsyncLoadingCache<Int, Int> cache, CacheContext context)
/* --------------- refresh --------------- */

@Test(dataProvider = "caches")
@CacheSpec(implementation = Implementation.Caffeine, population = Population.EMPTY,
executor = CacheExecutor.THREADED, compute = Compute.ASYNC, values = ReferenceType.STRONG)
@CacheSpec(population = Population.EMPTY,
compute = Compute.ASYNC, executor = CacheExecutor.THREADED)
public void refresh(CacheContext context) {
var done = new AtomicBoolean();
var cache = context.buildAsync((Int key) -> {
Expand All @@ -341,9 +338,9 @@ public void refresh(CacheContext context) {
await().untilAsserted(() -> assertThat(cache).containsEntry(key, key.negate()));
}

@Test(dataProvider = "caches", timeOut = 5000) // Issue #69
@CacheSpec(implementation = Implementation.Caffeine, population = Population.EMPTY,
executor = CacheExecutor.THREADED, compute = Compute.ASYNC, values = ReferenceType.STRONG)
@Test(dataProvider = "caches", timeOut = 5_000) // Issue #69
@CacheSpec(population = Population.EMPTY,
compute = Compute.ASYNC, executor = CacheExecutor.THREADED)
public void refresh_deadlock(CacheContext context) {
var future = new CompletableFuture<Int>();
var cache = context.buildAsync((Int k, Executor e) -> future);
Expand Down
Loading

0 comments on commit 65298df

Please sign in to comment.