forked from graphql-java/java-dataloader
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathValueCache.java
More file actions
161 lines (149 loc) · 7.06 KB
/
ValueCache.java
File metadata and controls
161 lines (149 loc) · 7.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
package org.dataloader;
import org.dataloader.annotations.PublicSpi;
import org.dataloader.impl.CompletableFutureKit;
import org.dataloader.impl.NoOpValueCache;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
/**
* The {@link ValueCache} is used by data loaders that use caching and want a long-lived or external cache
* of values. The {@link ValueCache} is used as a place to cache values when they come back from an async
* cache store.
* <p>
* It differs from {@link CacheMap} which is in fact a cache of promised values aka {@link CompletableFuture}<V>'s.
* <p>
* {@link ValueCache} is more suited to be a wrapper of a long-lived or externally cached values. {@link CompletableFuture}s can't
* be easily placed in an external cache outside the JVM say, hence the need for the {@link ValueCache}.
* <p>
* {@link DataLoader}s use a two stage cache strategy if caching is enabled. If the {@link CacheMap} already has the promise to a value
* that is used. If not then the {@link ValueCache} is asked for a value, if it has one then that is returned (and cached as a promise in the {@link CacheMap}).
* <p>
* If there is no value then the key is queued and loaded via the {@link BatchLoader} calls. The returned values will then be stored in
* the {@link ValueCache} and the promises to those values are also stored in the {@link CacheMap}.
* <p>
* The default implementation is a no-op store which replies with the key always missing and doesn't
* store any actual results. This is to avoid duplicating the stored data between the {@link CacheMap}
* out of the box.
* <p>
* The API signature uses {@link CompletableFuture}s because the backing implementation MAY be a remote external cache
* and hence exceptions may happen in retrieving values, and they may take time to complete.
*
* @param <K> the type of cache keys
* @param <V> the type of cache values
*
* @author <a href="https://github.com/craig-day">Craig Day</a>
* @author <a href="https://github.com/bbakerman/">Brad Baker</a>
*/
@PublicSpi
public interface ValueCache<K, V> {
/**
* Creates a new value cache, using the default no-op implementation.
*
* @param <K> the type of cache keys
* @param <V> the type of cache values
*
* @return the cache store
*/
static <K, V> ValueCache<K, V> defaultValueCache() {
//noinspection unchecked
return (ValueCache<K, V>) NoOpValueCache.NOOP;
}
/**
* Gets the specified key from the value cache. If the key is not present, then the implementation MUST return an exceptionally completed future
* and not null because null is a valid cacheable value. An exceptionally completed future will cause {@link DataLoader} to load the key via batch loading
* instead.
* <p>
*
* @param key the key to retrieve
*
* @return a future containing the cached value (which maybe null) or exceptionally completed future if the key does
* not exist in the cache.
*/
CompletableFuture<V> get(K key);
/**
* Gets the specified keys from the value cache, in a batch call. If your underlying cache cannot do batch caching retrieval
* then do not implement this method, and it will delegate back to {@link #get(Object)} for you
* <p>
* Each item in the returned list of values is a {@link Try}. If the key could not be found then a failed Try just be returned otherwise
* a successful Try contain the cached value is returned.
* <p>
* You MUST return a List that is the same size as the keys passed in. The code will assert if you do not.
* <p>
* If your cache does not have anything in it at all, and you want to quickly short-circuit this method and avoid any object allocation
* then throw {@link ValueCachingNotSupported} and the code will know there is nothing in cache at this time.
*
* @param keys the list of keys to get cached values for.
*
* @return a future containing a list of {@link Try} cached values for each key passed in.
*
* @throws ValueCachingNotSupported if this cache wants to short-circuit this method completely
*/
default CompletableFuture<List<Try<V>>> getValues(List<K> keys) throws ValueCachingNotSupported {
List<CompletableFuture<Try<V>>> cacheLookups = new ArrayList<>(keys.size());
for (K key : keys) {
CompletableFuture<Try<V>> cacheTry = Try.tryFuture(get(key));
cacheLookups.add(cacheTry);
}
return CompletableFutureKit.allOf(cacheLookups);
}
/**
* Stores the value with the specified key, or updates it if the key already exists.
*
* @param key the key to store
* @param value the value to store
*
* @return a future containing the stored value for fluent composition
*/
CompletableFuture<V> set(K key, V value);
/**
* Stores the value with the specified keys, or updates it if the keys if they already exist. If your underlying cache can't do batch caching setting
* then do not implement this method, and it will delegate back to {@link #set(Object, Object)} for you
*
* @param keys the keys to store
* @param values the values to store
*
* @return a future containing the stored values for fluent composition
*
* @throws ValueCachingNotSupported if this cache wants to short-circuit this method completely
*/
default CompletableFuture<List<V>> setValues(List<K> keys, List<V> values) throws ValueCachingNotSupported {
List<CompletableFuture<V>> cacheSets = new ArrayList<>();
for (int i = 0; i < keys.size(); i++) {
K k = keys.get(i);
V v = values.get(i);
CompletableFuture<V> setCall = set(k, v);
CompletableFuture<V> set = Try.tryFuture(setCall).thenApply(ignored -> v);
cacheSets.add(set);
}
return CompletableFutureKit.allOf(cacheSets);
}
/**
* Deletes the entry with the specified key from the value cache, if it exists.
* <p>
* NOTE: Your implementation MUST not throw exceptions, rather it should return a CompletableFuture that has completed exceptionally. Failure
* to do this may cause the {@link DataLoader} code to not run properly.
*
* @param key the key to delete
*
* @return a void future for error handling and fluent composition
*/
CompletableFuture<Void> delete(K key);
/**
* Clears all entries from the value cache.
* <p>
* NOTE: Your implementation MUST not throw exceptions, rather it should return a CompletableFuture that has completed exceptionally. Failure
* to do this may cause the {@link DataLoader} code to not run properly.
*
* @return a void future for error handling and fluent composition
*/
CompletableFuture<Void> clear();
/**
* This special exception can be used to short-circuit a caching method
*/
class ValueCachingNotSupported extends UnsupportedOperationException {
@Override
public Throwable fillInStackTrace() {
return this;
}
}
}