Skip to content

Commit

Permalink
SchemaMappingInspector @BatchMapping method support
Browse files Browse the repository at this point in the history
Closes gh-673
  • Loading branch information
rstoyanchev committed Apr 26, 2023
1 parent c7c75da commit 1a5dacc
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
Expand Down Expand Up @@ -349,8 +350,7 @@ public void configure(RuntimeWiring.Builder runtimeWiringBuilder) {
info, this.argumentResolvers, this.validationHelper, this.exceptionResolver, this.executor);
}
else {
String dataLoaderKey = registerBatchLoader(info);
dataFetcher = new BatchMappingDataFetcher(dataLoaderKey);
dataFetcher = registerBatchLoader(info);
}
runtimeWiringBuilder.type(info.getCoordinates().getTypeName(), typeBuilder ->
typeBuilder.dataFetcher(info.getCoordinates().getFieldName(), dataFetcher));
Expand Down Expand Up @@ -502,38 +502,49 @@ private String formatMappings(Class<?> handlerType, Collection<MappingInfo> info
.collect(Collectors.joining("\n\t", "\n\t" + formattedType + ":" + "\n\t", ""));
}

private String registerBatchLoader(MappingInfo info) {
private DataFetcher<Object> registerBatchLoader(MappingInfo info) {
if (!info.isBatchMapping()) {
throw new IllegalArgumentException("Not a @BatchMapping method: " + info);
}

String dataLoaderKey = info.getCoordinates().toString();
BatchLoaderRegistry registry = obtainApplicationContext().getBean(BatchLoaderRegistry.class);
BatchLoaderRegistry.RegistrationSpec<Object, Object> registration = registry.forName(dataLoaderKey);
if (info.getMaxBatchSize() > 0) {
registration.withOptions(options -> options.setMaxBatchSize(info.getMaxBatchSize()));
}

HandlerMethod handlerMethod = info.getHandlerMethod();
BatchLoaderHandlerMethod invocable = new BatchLoaderHandlerMethod(handlerMethod, this.executor);

MethodParameter returnType = handlerMethod.getReturnType();
Class<?> clazz = returnType.getParameterType();
Class<?> nestedClass = (clazz.equals(Callable.class) ? returnType.nested().getNestedParameterType() : clazz);

BatchLoaderRegistry.RegistrationSpec<Object, Object> registration = registry.forName(dataLoaderKey);
if (info.getMaxBatchSize() > 0) {
registration.withOptions(options -> options.setMaxBatchSize(info.getMaxBatchSize()));
if (clazz.equals(Callable.class)) {
returnType = returnType.nested();
clazz = returnType.getNestedParameterType();
}

if (clazz.equals(Flux.class) || Collection.class.isAssignableFrom(nestedClass)) {
if (clazz.equals(Flux.class) || Collection.class.isAssignableFrom(clazz)) {
registration.registerBatchLoader(invocable::invokeForIterable);
ResolvableType valueType = ResolvableType.forMethodParameter(returnType.nested());
return new BatchMappingDataFetcher(info, valueType, dataLoaderKey);
}
else if (clazz.equals(Mono.class) || nestedClass.equals(Map.class)) {
registration.registerMappedBatchLoader(invocable::invokeForMap);

if (clazz.equals(Mono.class)) {
returnType = returnType.nested();
clazz = returnType.getNestedParameterType();
}
else {
throw new IllegalStateException("@BatchMapping method is expected to return " +
"Flux<V>, List<V>, Mono<Map<K, V>>, or Map<K, V>: " + handlerMethod);

if (Map.class.isAssignableFrom(clazz)) {
registration.registerMappedBatchLoader(invocable::invokeForMap);
ResolvableType valueType = ResolvableType.forMethodParameter(returnType.nested(1));
return new BatchMappingDataFetcher(info, valueType, dataLoaderKey);
}

return dataLoaderKey;
throw new IllegalStateException(
"@BatchMapping method is expected to return " +
"Mono<Map<K, V>>, Map<K, V>, Flux<V>, or Collection<V>: " + handlerMethod);
}

/**
Expand Down Expand Up @@ -715,20 +726,34 @@ public String toString() {
}


static class BatchMappingDataFetcher implements DataFetcher<Object> {
static class BatchMappingDataFetcher implements DataFetcher<Object>, SelfDescribingDataFetcher<Object> {

private final MappingInfo info;

private final ResolvableType returnType;

private final String dataLoaderKey;

BatchMappingDataFetcher(String dataLoaderKey) {
BatchMappingDataFetcher(MappingInfo info, ResolvableType valueType, String dataLoaderKey) {
this.info = info;
this.returnType = ResolvableType.forClassWithGenerics(CompletableFuture.class, valueType);
this.dataLoaderKey = dataLoaderKey;
}

@Override
public String getDescription() {
return "@BatchMapping " + this.info.getHandlerMethod().getShortLogMessage();
}

@Override
public ResolvableType getReturnType() {
return this.returnType;
}

@Override
public Object get(DataFetchingEnvironment env) {
DataLoader<?, ?> dataLoader = env.getDataLoaderRegistry().getDataLoader(this.dataLoaderKey);
if (dataLoader == null) {
throw new IllegalStateException("No DataLoader for key '" + this.dataLoaderKey + "'");
}
Assert.state(dataLoader != null, "No DataLoader for key '" + this.dataLoaderKey + "'");
return dataLoader.load(env.getSource());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;

Expand All @@ -30,13 +31,15 @@
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;

import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.data.domain.OffsetScrollPosition;
import org.springframework.data.domain.Window;
import org.springframework.graphql.Author;
import org.springframework.graphql.Book;
import org.springframework.graphql.data.method.annotation.Argument;
import org.springframework.graphql.data.method.annotation.BatchMapping;
import org.springframework.graphql.data.method.annotation.MutationMapping;
import org.springframework.graphql.data.method.annotation.QueryMapping;
import org.springframework.graphql.data.method.annotation.SchemaMapping;
Expand Down Expand Up @@ -309,6 +312,29 @@ void reportIsEmptyWhenFieldHasDataFetcherMapping() {
assertThatReport(report).hasUnmappedFieldCount(0).hasSkippedTypeCount(0);
}

@Test
void reportIsEmptyWhenFieldHasBatchMapping() {
String schema = """
type Query {
books: [Book]
}
type Book {
id: ID
name: String
author: Author
}
type Author {
id: ID
firstName: String
lastName: String
}
""";
SchemaMappingInspector.Report report = inspectSchema(schema, BatchMappingBookController.class);
assertThatReport(report).hasUnmappedFieldCount(0).hasSkippedTypeCount(0);
}

@Test
void reportHasUnmappedField() {
String schema = """
Expand Down Expand Up @@ -527,6 +553,7 @@ private RuntimeWiring createRuntimeWiring(Class<?>... controllerTypes) {
for (Class<?> controllerType : controllerTypes) {
context.registerBean(controllerType);
}
context.registerBean(BatchLoaderRegistry.class, () -> new DefaultBatchLoaderRegistry());
context.refresh();

AnnotatedControllerConfigurer configurer = new AnnotatedControllerConfigurer();
Expand Down Expand Up @@ -604,6 +631,21 @@ public Flux<List<Book>> bookSearch(@Argument String author) {
}


@Controller
private static class BatchMappingBookController {

@QueryMapping
public List<Book> books() {
return Collections.emptyList();
}

@BatchMapping
public Mono<Map<Book, Author>> author(List<Book> books) {
return Mono.empty();
}
}


@Controller
static class TeamController {
@QueryMapping
Expand Down Expand Up @@ -684,7 +726,7 @@ public SchemaInspectionReportAssert hasUnmappedDataFetcherCount(int expected) {
public SchemaInspectionReportAssert hasSkippedTypeCount(int expected) {
isNotNull();
if (this.actual.skippedTypes().size() != expected) {
failWithMessage("Expected %s skipped types, found %d.", expected, this.actual.skippedTypes());
failWithMessage("Expected %s skipped types, found %s.", expected, this.actual.skippedTypes());
}
return this;
}
Expand Down

0 comments on commit 1a5dacc

Please sign in to comment.