8000 Upgrading back to dataloader 3.2 by bbakerman · Pull Request #2507 · graphql-java/graphql-java · GitHub
[go: up one dir, main page]

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ dependencies {
compileOnly 'org.jetbrains:annotations:20.1.0'
implementation 'org.antlr:antlr4-runtime:' + antlrVersion
implementation 'org.slf4j:slf4j-api:' + slf4jVersion
api 'com.graphql-java:java-dataloader:2.2.3'
api 'com.graphql-java:java-dataloader:3.1.0'
api 'org.reactivestreams:reactive-streams:' + reactiveStreamsVersion
antlr 'org.antlr:antlr4:' + antlrVersion
implementation 'com.google.guava:guava:30.0-jre'
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
package graphql.execution.instrumentation

import graphql.ExecutionInput
import graphql.GraphQL
import graphql.TestUtil
import graphql.schema.DataFetcher
import graphql.schema.DataFetchingEnvironment
import org.dataloader.BatchLoader
import org.dataloader.DataLoader
import org.dataloader.DataLoaderFactory
import org.dataloader.DataLoaderOptions
import org.dataloader.DataLoaderRegistry
import org.dataloader.ValueCache
import spock.lang.Specification

import java.util.concurrent.CompletableFuture

class DataLoaderCacheCanBeAsyncTest extends Specification {

def sdl = """
type Query {
user(id : ID) : User
}

type User {
id : ID
name : String
}
"""

static class CustomValueCache implements ValueCache<String, Object> {
Map<String, Object> store = [:]

int getRandomNumber(int min, int max) {
Random random = new Random()
return random.nextInt(max - min) + min
}

@Override
CompletableFuture get(String key) {
return CompletableFuture.supplyAsync({
Thread.sleep(getRandomNumber(100, 500))
if (store.containsKey(key)) {
return store.get(key)
}
throw new RuntimeException("Key Missing")
})
}

@Override
CompletableFuture set(String key, Object value) {
return CompletableFuture.supplyAsync({
Thread.sleep(getRandomNumber(100, 500))
store.put(key, value)
return value
})
}

@Override
CompletableFuture<Void> delete(String key) {
return CompletableFuture.completedFuture(null)
}

@Override
CompletableFuture<Void> clear() {
return CompletableFuture.completedFuture(null)
}
}

DataLoaderRegistry registry
GraphQL graphQL

void setup() {

BatchLoader userBatchLoader = { List<String> keys ->
return CompletableFuture.supplyAsync({ ->
Thread.sleep(100)
def users = []
for (String k : keys) {
users.add([id: k, name: k + "Name"])
}
users
})
}


def valueCache = new CustomValueCache()
valueCache.store.put("a", [id: "cachedA", name: "cachedAName"])

DataLoaderOptions options = DataLoaderOptions.newOptions().setValueCache(valueCache).setCachingEnabled(true)
DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader, options)

registry = DataLoaderRegistry.newRegistry()
.register("users", userDataLoader)
.build()

DataFetcher userDF = { DataFetchingEnvironment env ->
def id = env.getArgument("id")
def loader = env.getDataLoader("users")
return loader.load(id)
}

def schema = TestUtil.schema(sdl, [Query: [user: userDF]])
graphQL = GraphQL.newGraphQL(schema).build()

}

def "can execute data loader calls"() {
def query = '''
query {
a: user(id : "a") {
id name
}
b: user(id : "b") {
id name
}
c: user(id : "c") {
id name
}
}
'''
def executionInput = ExecutionInput.newExecutionInput(query).dataLoaderRegistry(registry).build()

when:
def er = graphQL.execute(executionInput)
then:
er.errors.isEmpty()
er.data == [a: [id: "cachedA", name: "cachedAName"],
b: [id: "b", name: "bName"],
c: [id: "c", name: "cName"],
]
}
}
48 changes: 21 additions & 27 deletions src/test/groovy/readme/DataLoaderBatchingExamples.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,11 @@
import org.dataloader.BatchLoaderContextProvider;
import org.dataloader.BatchLoaderEnvironment;
import org.dataloader.BatchLoaderWithContext;
import org.dataloader.CacheMap;
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderFactory;
import org.dataloader.DataLoaderOptions;
import org.dataloader.DataLoaderRegistry;
import org.dataloader.ValueCache;

import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -105,7 +106,7 @@ public Object get(DataFetchingEnvironment environment) {
//
// Since data loaders are stateful, they are created per execution request.
//
DataLoader<String, Object> characterDataLoader = DataLoader.newDataLoader(characterBatchLoader);
DataLoader<String, Object> characterDataLoader = DataLoaderFactory.newDataLoader(characterBatchLoader);

//
// DataLoaderRegistry is a place to register all data loaders in that needs to be dispatched together
Expand All @@ -130,18 +131,19 @@ public boolean containsKey(String key) {
return false;
}

public Object getValue(String key) {
public CompletableFuture<Object> getValue(String key) {
return null;
}

public CacheMap<String, Object> setValue(String key, Object value) {
public CompletableFuture<Object> setValue(String key, Object value) {
return null;
}

public void clearKey(String key) {
public CompletableFuture<Void> clearKey(String key) {
return null;
}

public CacheMap<String, Object> clearAll() {
public CompletableFuture<Void> clearAll() {
return null;
}
}
Expand All @@ -157,39 +159,31 @@ public CompletionStage<List<Object>> load(List<String> keys) {

private void changeCachingSolutionOfDataLoader() {

CacheMap<String, Object> crossRequestCacheMap = new CacheMap<String, Object>() {
@Override
public boolean containsKey(String key) {
return redisIntegration.containsKey(key);
}

ValueCache<String, Object> crossRequestValueCache = new ValueCache<String, Object>() {
@Override
public Object get(String key) {
public CompletableFuture<Object> get(String key) {
return redisIntegration.getValue(key);
}

@Override
public CacheMap<String, Object> set(String key, Object value) {
redisIntegration.setValue(key, value);
return this;
public CompletableFuture<Object> set(String key, Object value) {
return redisIntegration.setValue(key, value);
}

@Override
public CacheMap<String, Object> delete(String key) {
redisIntegration.clearKey(key);
return this;
public CompletableFuture<Void> delete(String key) {
return redisIntegration.clearKey(key);
}

@Override
public CacheMap<String, Object> clear() {
redisIntegration.clearAll();
return this;
public CompletableFuture<Void> clear() {
return redisIntegration.clearAll();
}
};

DataLoaderOptions options = DataLoaderOptions.newOptions().setCacheMap(crossRequestCacheMap);
DataLoaderOptions options = DataLoaderOptions.newOptions().setValueCache(crossRequestValueCache);

DataLoader<String, Object> dataLoader = DataLoader.newDataLoader(batchLoader, options);
DataLoader<String, Object> dataLoader = DataLoaderFactory.newDataLoader(batchLoader, options);
}

private void doNotUseAsyncInYouDataFetcher() {
Expand All @@ -201,7 +195,7 @@ public CompletionStage<List<Object>> load(List<String> keys) {
}
};

DataLoader<String, Object> characterDataLoader = DataLoader.newDataLoader(batchLoader);
DataLoader<String, Object> characterDataLoader = DataLoaderFactory.newDataLoader(batchLoader);

// .... later in your data fetcher

Expand Down Expand Up @@ -229,7 +223,7 @@ public CompletionStage<List<Object>> load(List<String> keys) {
}
};

DataLoader<String, Object> characterDataLoader = DataLoader.newDataLoader(batchLoader);
DataLoader<String, Object> characterDataLoader = DataLoaderFactory.newDataLoader(batchLoader);

// .... later in your data fetcher

Expand Down Expand Up @@ -277,7 +271,7 @@ public Object getContext() {
// this creates an overall context for the dataloader
//
DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider);
DataLoader<String, Object> characterDataLoader = DataLoader.newDataLoader(batchLoaderWithCtx, loaderOptions);
DataLoader<String, Object> characterDataLoader = DataLoaderFactory.newDataLoader(batchLoaderWithCtx, loaderOptions);

// .... later in your data fetcher

Expand Down
0