Skip to content

Commit

Permalink
fix variable expiration calculation when nearing overflow
Browse files Browse the repository at this point in the history
A test run failed thanks to using a random seed for the starting time,
where overflow occurs if slightly more than 60s from the maximum. The
expiration tests are now run to include a known bad seed to verify the
calculation and their advancements set to trigger the scenario.
  • Loading branch information
ben-manes committed Jan 11, 2025
1 parent 3ff2445 commit 2d93f2b
Show file tree
Hide file tree
Showing 6 changed files with 196 additions and 103 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1056,7 +1056,7 @@ boolean evictEntry(Node<K, V> node, RemovalCause cause, long now) {
expired |= ((now - n.getWriteTime()) >= expiresAfterWriteNanos());
}
if (expiresVariable()) {
expired |= (n.getVariableTime() <= now);
expired |= ((now - node.getVariableTime()) >= 0);
}
if (!expired) {
resurrect[0] = true;
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Listener;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Loader;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.StartTime;
import com.github.benmanes.caffeine.cache.testing.CacheValidationListener;
import com.github.benmanes.caffeine.cache.testing.CheckMaxLogLevel;
import com.github.benmanes.caffeine.cache.testing.CheckNoEvictions;
Expand Down Expand Up @@ -174,7 +175,8 @@ public void getAll_absent(LoadingCache<Int, Int> cache, CacheContext context) {

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE)
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void put_replace(Cache<Int, Int> cache, CacheContext context) {
context.ticker().advance(Duration.ofSeconds(30));

Expand All @@ -198,7 +200,8 @@ public void put_replace(Cache<Int, Int> cache, CacheContext context) {

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE)
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void put_replace(AsyncCache<Int, Int> cache, CacheContext context) {
var future = context.absentValue().toFuture();
context.ticker().advance(Duration.ofSeconds(30));
Expand All @@ -223,7 +226,8 @@ public void put_replace(AsyncCache<Int, Int> cache, CacheContext context) {

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE)
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void put_replace(Map<Int, Int> map, CacheContext context) {
context.ticker().advance(Duration.ofSeconds(30));

Expand All @@ -247,7 +251,8 @@ public void put_replace(Map<Int, Int> map, CacheContext context) {

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE)
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void putAll_replace(Cache<Int, Int> cache, CacheContext context) {
context.ticker().advance(Duration.ofSeconds(30));

Expand All @@ -272,11 +277,12 @@ public void putAll_replace(Cache<Int, Int> cache, CacheContext context) {

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE)
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void replace_updated(Map<Int, Int> map, CacheContext context) {
context.ticker().advance(Duration.ofSeconds(30));
assertThat(map.replace(context.firstKey(), context.absentValue())).isNotNull();
context.ticker().advance(Duration.ofSeconds(30));
context.ticker().advance(Duration.ofSeconds(45));

context.cleanUp();
assertThat(map).isExhaustivelyEmpty();
Expand Down Expand Up @@ -311,12 +317,13 @@ public void replace_expiryFails_async(AsyncCache<Int, Int> cache, CacheContext c

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE)
expiryTime = Expire.ONE_MINUTE, expiry = CacheExpiry.CREATE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void replaceConditionally_updated(Map<Int, Int> map, CacheContext context) {
Int key = context.firstKey();
context.ticker().advance(Duration.ofSeconds(30));
assertThat(map.replace(key, context.original().get(key), context.absentValue())).isTrue();
context.ticker().advance(Duration.ofSeconds(30));
context.ticker().advance(Duration.ofSeconds(45));

context.cleanUp();
assertThat(map).isExhaustivelyEmpty();
Expand Down Expand Up @@ -1031,7 +1038,8 @@ public void putIfAbsent_excessiveDuration(Cache<Int, Int> cache,

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE)
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void putIfAbsent_insert(Cache<Int, Int> cache,
CacheContext context, VarExpiration<Int, Int> expireAfterVar) {
Int key = context.absentKey();
Expand All @@ -1049,7 +1057,8 @@ public void putIfAbsent_insert(Cache<Int, Int> cache,

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE)
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void putIfAbsent_present(Cache<Int, Int> cache,
CacheContext context, VarExpiration<Int, Int> expireAfterVar) {
Int key = context.firstKey();
Expand Down Expand Up @@ -1136,7 +1145,8 @@ public void put_nullDuration(Cache<Int, Int> cache,

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE)
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void put_insert(Cache<Int, Int> cache,
CacheContext context, VarExpiration<Int, Int> expireAfterVar) {
Int key = context.absentKey();
Expand All @@ -1154,7 +1164,8 @@ public void put_insert(Cache<Int, Int> cache,

@Test(dataProvider = "caches")
@CacheSpec(population = Population.FULL,
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE)
expiry = CacheExpiry.WRITE, expiryTime = Expire.ONE_MINUTE,
startTime = {StartTime.RANDOM, StartTime.ONE_MINUTE_FROM_MAX})
public void put_replace(Cache<Int, Int> cache,
CacheContext context, VarExpiration<Int, Int> expireAfterVar) {
Int key = context.firstKey();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Maximum;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.StartTime;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Stats;
import com.github.benmanes.caffeine.cache.testing.GuavaCacheFromContext.GuavaLoadingCache;
import com.github.benmanes.caffeine.cache.testing.GuavaCacheFromContext.SingleLoader;
Expand Down Expand Up @@ -98,6 +99,7 @@ public final class CacheContext {
final Population population;
final Maximum maximumSize;
final Scheduler scheduler;
final StartTime startTime;
final Expire afterAccess;
final Expire afterWrite;
final Expire expiryTime;
Expand Down Expand Up @@ -132,7 +134,7 @@ public CacheContext(CacheContext context) {
context.keyStrength, context.valueStrength, context.cacheExecutor, context.cacheScheduler,
context.removalListenerType, context.evictionListenerType, context.population,
context.isAsyncLoader, context.compute, context.loader, context.implementation,
context.expiryTime);
context.startTime, context.expiryTime);
}

@SuppressWarnings({"NullAway.Init", "PMD.ExcessiveParameterList", "TooManyParameters"})
Expand All @@ -141,7 +143,7 @@ public CacheContext(InitialCapacity initialCapacity, Stats stats, CacheWeigher c
Expire refresh, ReferenceType keyStrength, ReferenceType valueStrength,
CacheExecutor cacheExecutor, CacheScheduler cacheScheduler, Listener removalListenerType,
Listener evictionListenerType, Population population, boolean isAsyncLoader, Compute compute,
Loader loader, Implementation implementation, Expire expiryTime) {
Loader loader, Implementation implementation, StartTime startTime, Expire expiryTime) {
this.initialCapacity = requireNonNull(initialCapacity);
this.stats = requireNonNull(stats);
this.weigher = cacheWeigher.create();
Expand All @@ -163,13 +165,14 @@ public CacheContext(InitialCapacity initialCapacity, Stats stats, CacheWeigher c
this.population = requireNonNull(population);
this.loader = requireNonNull(loader);
this.isAsyncLoader = isAsyncLoader;
this.ticker = new SerializableFakeTicker();
this.implementation = requireNonNull(implementation);
this.original = new LinkedHashMap<>();
this.initialSize = -1;
this.compute = compute;
this.expiryType = expiryType;
this.expiryTime = expiryTime;
this.compute = requireNonNull(compute);
this.expiryType = requireNonNull(expiryType);
this.expiryTime = requireNonNull(expiryTime);
this.startTime = requireNonNull(startTime);
this.ticker = new SerializableFakeTicker(startTime.create());
this.expiry = (expiryType == CacheExpiry.DISABLED) ? null : expiryType.createExpiry(expiryTime);
}

Expand Down Expand Up @@ -536,9 +539,9 @@ static final class SerializableFakeTicker extends FakeTicker implements Serializ

final long startTime;

public SerializableFakeTicker() {
startTime = ThreadLocalRandom.current().nextLong(Long.MIN_VALUE, Long.MAX_VALUE);
public SerializableFakeTicker(long startTime) {
advance(Duration.ofNanos(startTime));
this.startTime = startTime;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Maximum;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Population;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.ReferenceType;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.StartTime;
import com.github.benmanes.caffeine.cache.testing.CacheSpec.Stats;
import com.github.benmanes.caffeine.testing.Int;
import com.google.common.collect.ImmutableList;
Expand Down Expand Up @@ -98,6 +99,7 @@ private Set<List<Object>> combinations() {
var weigher = Sets.immutableEnumSet(Arrays.asList(cacheSpec.weigher()));
var executor = Sets.immutableEnumSet(Arrays.asList(cacheSpec.executor()));
var scheduler = Sets.immutableEnumSet(Arrays.asList(cacheSpec.scheduler()));
var startTime = Sets.immutableEnumSet(Arrays.asList(cacheSpec.startTime()));
var population = Sets.immutableEnumSet(Arrays.asList(cacheSpec.population()));
var maximumSize = Sets.immutableEnumSet(Arrays.asList(cacheSpec.maximumSize()));
var implementations = filterTypes(options.implementation(), cacheSpec.implementation());
Expand Down Expand Up @@ -135,7 +137,7 @@ private Set<List<Object>> combinations() {
return Sets.cartesianProduct(initialCapacity, statistics, weigher, maximumSize, expiry,
expireAfterAccess, expireAfterWrite, refreshAfterWrite, keys, values, executor, scheduler,
removalListener, evictionListener, population, asyncLoader, computations, loaders,
implementations);
implementations, startTime);
}

/** Returns the set of options filtered if a specific type is specified. */
Expand Down Expand Up @@ -170,6 +172,7 @@ private CacheContext newCacheContext(List<Object> combination) {
(Compute) combination.get(index++),
(Loader) combination.get(index++),
(Implementation) combination.get(index++),
(StartTime) combination.get(index),
cacheSpec.expiryTime());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import java.util.function.LongSupplier;
import java.util.function.Supplier;

import org.mockito.Mockito;
Expand Down Expand Up @@ -795,7 +796,7 @@ public TrackingExecutor create() {

/* --------------- Scheduler --------------- */

/** The executors retrieved from a supplier, each resulting in a new combination. */
/** The schedulers retrieved from a supplier, each resulting in a new combination. */
CacheScheduler[] scheduler() default {
CacheScheduler.DISABLED,
};
Expand All @@ -818,6 +819,29 @@ public Scheduler create() {
}
}

/* --------------- Ticker --------------- */

/** The starting time retrieved from a supplier, each resulting in a new combination. */
StartTime[] startTime() default {
StartTime.RANDOM,
};

/** The starting time that the ticker can be configured with. */
enum StartTime {
RANDOM(() -> ThreadLocalRandom.current().nextLong(Long.MIN_VALUE, Long.MAX_VALUE)),
ONE_MINUTE_FROM_MAX(() -> Long.MAX_VALUE - TimeUnit.MINUTES.toNanos(1));

private final LongSupplier startTime;

StartTime(LongSupplier startTime) {
this.startTime = requireNonNull(startTime);
}

public long create() {
return startTime.getAsLong();
}
}

/* --------------- Populated --------------- */

/**
Expand Down

0 comments on commit 2d93f2b

Please sign in to comment.