Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow adaption on class level and make FieldDataFetcher plugable #1532

Merged
merged 1 commit into from
Sep 2, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -49,63 +49,66 @@ public static Optional<AdaptTo> getAdaptTo(Field field, Annotations annotations)
* @return Potentially a AdaptTo model
*/
public static Optional<AdaptTo> getAdaptTo(Reference r, Annotations annotations) {
Type type = getAdaptTo(annotations);
if (type != null) {
String scalarName = getScalarName(type);
Reference reference = Scalars.getScalar(scalarName);
AdaptTo adaptTo = new AdaptTo(reference);
// Check the way to create this (deserializeMethod)
// First check if the user supplied a way
String deserializeMethod = getDeserializeMethod(annotations);
if (deserializeMethod != null) {
adaptTo.setDeserializeMethod(deserializeMethod);
} else {
// Auto detect this.
String className = r.getClassName();
if (!r.getType().equals(ReferenceType.SCALAR)) { // mapping to scalar stays on default NONE
ClassInfo classInfo = ScanningContext.getIndex().getClassByName(DotName.createSimple(className));
if (classInfo != null) {
// Get Parameter type
Type parameter = Type.create(DotName.createSimple(reference.getClassName()), Type.Kind.CLASS);

// Check if we can use a constructor
MethodInfo constructor = classInfo.method(CONTRUCTOR_METHOD_NAME, parameter);
if (constructor != null) {
adaptTo.setDeserializeMethod(CONTRUCTOR_METHOD_NAME); // Create new instance with a contructor
} else {
// Check if we can use setValue
MethodInfo setValueMethod = classInfo.method(SET_VALUE_METHOD_NAME, parameter);
if (setValueMethod != null) {
adaptTo.setDeserializeMethod(SET_VALUE_METHOD_NAME);

if (r.isAdaptingTo()) {
return Optional.of(r.getAdaptTo());
} else {
Type type = getAdaptTo(annotations);
if (type != null) {
String scalarName = getScalarName(type);
Reference reference = Scalars.getScalar(scalarName);
AdaptTo adaptTo = new AdaptTo(reference);
// Check the way to create this (deserializeMethod)
// First check if the user supplied a way
String deserializeMethod = getDeserializeMethod(annotations);
if (deserializeMethod != null) {
adaptTo.setDeserializeMethod(deserializeMethod);
} else {
// Auto detect this.
String className = r.getClassName();
if (!r.getType().equals(ReferenceType.SCALAR)) { // mapping to scalar stays on default NONE
ClassInfo classInfo = ScanningContext.getIndex().getClassByName(DotName.createSimple(className));
if (classInfo != null) {
// Get Parameter type
Type parameter = Type.create(DotName.createSimple(reference.getClassName()), Type.Kind.CLASS);

// Check if we can use a constructor
MethodInfo constructor = classInfo.method(CONTRUCTOR_METHOD_NAME, parameter);
if (constructor != null) {
adaptTo.setDeserializeMethod(CONTRUCTOR_METHOD_NAME); // Create new instance with a contructor
} else {
// Check if we can use static fromXXXXX
String staticFromMethodName = FROM + scalarName;
MethodInfo staticFromMethod = classInfo.method(staticFromMethodName, parameter);
if (staticFromMethod != null) {
adaptTo.setDeserializeMethod(staticFromMethodName);
// Check if we can use setValue
MethodInfo setValueMethod = classInfo.method(SET_VALUE_METHOD_NAME, parameter);
if (setValueMethod != null) {
adaptTo.setDeserializeMethod(SET_VALUE_METHOD_NAME);
} else {
// Check if we can use static getInstance
MethodInfo staticGetInstance = classInfo.method(GET_INSTANCE_METHOD_NAME, parameter);
if (staticGetInstance != null) {
adaptTo.setDeserializeMethod(GET_INSTANCE_METHOD_NAME);
// Check if we can use static fromXXXXX
String staticFromMethodName = FROM + scalarName;
MethodInfo staticFromMethod = classInfo.method(staticFromMethodName, parameter);
if (staticFromMethod != null) {
adaptTo.setDeserializeMethod(staticFromMethodName);
} else {
// Check if we can use static getInstance
MethodInfo staticGetInstance = classInfo.method(GET_INSTANCE_METHOD_NAME, parameter);
if (staticGetInstance != null) {
adaptTo.setDeserializeMethod(GET_INSTANCE_METHOD_NAME);
}
}
}
}
}

}
}
}
}

// Get serializeMethod (default to toString)
String serializeMethod = getSerializeMethod(annotations);
if (serializeMethod != null) {
adaptTo.setSerializeMethod(serializeMethod);
}
// Get serializeMethod (default to toString)
String serializeMethod = getSerializeMethod(annotations);
if (serializeMethod != null) {
adaptTo.setSerializeMethod(serializeMethod);
}

return Optional.of(adaptTo);
} else {
// TODO: Support other than Scalar mapping
return Optional.of(adaptTo);
}
}
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,9 @@ public static Optional<AdaptWith> getAdaptWith(Direction direction, ReferenceCre
*/
public static Optional<AdaptWith> getAdaptWith(Direction direction, ReferenceCreator referenceCreator, Reference r,
Annotations annotations, AdapterType adapterType) {

if (adapterType != null) {
if (r.isAdaptingWith()) {
return Optional.of(r.getAdaptWith());
} else if (adapterType != null) {
Type type = adapterType.type;
AdaptWith adaptWith = adapterType.adaptWith;
if (type.kind().equals(Type.Kind.CLASS)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
import io.smallrye.graphql.execution.Classes;
import io.smallrye.graphql.execution.datafetcher.BatchDataFetcher;
import io.smallrye.graphql.execution.datafetcher.CollectionCreator;
import io.smallrye.graphql.execution.datafetcher.FieldDataFetcher;
import io.smallrye.graphql.execution.datafetcher.PlugableDataFetcher;
import io.smallrye.graphql.execution.error.ErrorInfoMap;
import io.smallrye.graphql.execution.event.EventEmitter;
import io.smallrye.graphql.execution.resolver.InterfaceOutputRegistry;
Expand Down Expand Up @@ -642,7 +642,7 @@ private GraphQLFieldDefinition createGraphQLFieldDefinitionFromField(Reference o
GraphQLFieldDefinition graphQLFieldDefinition = fieldBuilder.build();

// DataFetcher
FieldDataFetcher<?> datafetcher = new FieldDataFetcher<>(field, getTypeForField(field), owner);
PlugableDataFetcher<?> datafetcher = dataFetcherFactory.getFieldDataFetcher(field, getTypeForField(field), owner);
this.codeRegistryBuilder.dataFetcher(FieldCoordinates.coordinates(owner.getName(), graphQLFieldDefinition.getName()),
datafetcher);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,15 @@
import graphql.schema.DataFetcher;
import io.smallrye.graphql.execution.datafetcher.CompletionStageDataFetcher;
import io.smallrye.graphql.execution.datafetcher.DefaultDataFetcher;
import io.smallrye.graphql.execution.datafetcher.FieldDataFetcher;
import io.smallrye.graphql.execution.datafetcher.MultiDataFetcher;
import io.smallrye.graphql.execution.datafetcher.PlugableBatchableDataFetcher;
import io.smallrye.graphql.execution.datafetcher.PlugableDataFetcher;
import io.smallrye.graphql.execution.datafetcher.PublisherDataFetcher;
import io.smallrye.graphql.execution.datafetcher.UniDataFetcher;
import io.smallrye.graphql.schema.model.Field;
import io.smallrye.graphql.schema.model.Operation;
import io.smallrye.graphql.schema.model.Reference;
import io.smallrye.graphql.schema.model.Type;
import io.smallrye.graphql.schema.model.Wrapper;
import io.smallrye.graphql.spi.DataFetcherService;
Expand Down Expand Up @@ -51,6 +54,16 @@ public <T> DataFetcher<T> getDataFetcher(Operation operation, Type type) {
return (DataFetcher<T>) get(operation, type);
}

public <T> PlugableDataFetcher<T> getFieldDataFetcher(Field field, Type type, Reference owner) {
for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getFieldDataFetcher(field, type, owner);
if (df != null) {
return (PlugableDataFetcher) df;
}
}
return new FieldDataFetcher<>(field, type, owner);
}

public <K, T> BatchLoaderWithContext<K, T> getSourceBatchLoader(Operation operation, Type type) {
return (BatchLoaderWithContext<K, T>) get(operation, type);
}
Expand Down Expand Up @@ -85,10 +98,10 @@ private <V> V get(Operation operation, Type type) {
return (V) getOtherFieldDataFetcher(operation, type);
}

public PlugableDataFetcher getCompletionStageDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getCompletionStageDataFetcher(Operation operation, Type type) {

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getCompletionStageDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getCompletionStageDataFetcher(operation, type);
if (df != null) {
return df;
}
Expand All @@ -97,10 +110,10 @@ public PlugableDataFetcher getCompletionStageDataFetcher(Operation operation, Ty
return new CompletionStageDataFetcher(operation, type);
}

public PlugableDataFetcher getUniDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getUniDataFetcher(Operation operation, Type type) {

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getUniDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getUniDataFetcher(operation, type);
if (df != null) {
return df;
}
Expand All @@ -109,10 +122,10 @@ public PlugableDataFetcher getUniDataFetcher(Operation operation, Type type) {
return new UniDataFetcher(operation, type);
}

public PlugableDataFetcher getPublisherDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getPublisherDataFetcher(Operation operation, Type type) {

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getPublisherDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getPublisherDataFetcher(operation, type);
if (df != null) {
return df;
}
Expand All @@ -121,10 +134,10 @@ public PlugableDataFetcher getPublisherDataFetcher(Operation operation, Type typ
return new PublisherDataFetcher(operation, type);
}

public PlugableDataFetcher getMultiDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getMultiDataFetcher(Operation operation, Type type) {

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getMultiDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getMultiDataFetcher(operation, type);
if (df != null) {
return df;
}
Expand All @@ -133,10 +146,10 @@ public PlugableDataFetcher getMultiDataFetcher(Operation operation, Type type) {
return new MultiDataFetcher(operation, type);
}

public PlugableDataFetcher getOtherWrappedDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getOtherWrappedDataFetcher(Operation operation, Type type) {

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getOtherWrappedDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getOtherWrappedDataFetcher(operation, type);
if (df != null) {
return df;
}
Expand All @@ -145,17 +158,17 @@ public PlugableDataFetcher getOtherWrappedDataFetcher(Operation operation, Type
return getDefaultDataFetcher(operation, type);
}

public PlugableDataFetcher getOtherFieldDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getOtherFieldDataFetcher(Operation operation, Type type) {

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getOtherFieldDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getOtherFieldDataFetcher(operation, type);
if (df != null) {
return df;
}
}

for (DataFetcherService dfe : dataFetcherServices) {
PlugableDataFetcher df = dfe.getDefaultDataFetcher(operation, type);
PlugableBatchableDataFetcher df = dfe.getDefaultDataFetcher(operation, type);
if (df != null) {
return df;
}
Expand All @@ -164,7 +177,7 @@ public PlugableDataFetcher getOtherFieldDataFetcher(Operation operation, Type ty
return new DefaultDataFetcher(operation, type);
}

public PlugableDataFetcher getDefaultDataFetcher(Operation operation, Type type) {
private PlugableBatchableDataFetcher getDefaultDataFetcher(Operation operation, Type type) {
return getOtherFieldDataFetcher(operation, type);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
* @param <K>
* @param <T>
*/
public abstract class AbstractDataFetcher<K, T> implements PlugableDataFetcher<K, T> {
public abstract class AbstractDataFetcher<K, T> implements PlugableBatchableDataFetcher<K, T> {

protected Operation operation;
protected Type type;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

import graphql.GraphQLException;
import graphql.TrivialDataFetcher;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import io.smallrye.graphql.execution.context.SmallRyeContextManager;
import io.smallrye.graphql.execution.datafetcher.helper.FieldHelper;
Expand All @@ -29,7 +28,7 @@
* different
* subtype of the owner class for each call).
*/
public class FieldDataFetcher<T> implements DataFetcher<T>, TrivialDataFetcher<T> {
public class FieldDataFetcher<T> implements PlugableDataFetcher<T>, TrivialDataFetcher<T> {

private final FieldHelper fieldHelper;
private final Field field;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
package io.smallrye.graphql.execution.datafetcher;

import org.dataloader.BatchLoaderWithContext;

/**
* Allows DataFetchers to be plugged
*
* @author Phillip Kruger ([email protected])
*/
public interface PlugableBatchableDataFetcher<K, T> extends PlugableDataFetcher<T>, BatchLoaderWithContext<K, T> {

}
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
package io.smallrye.graphql.execution.datafetcher;

import org.dataloader.BatchLoaderWithContext;

import graphql.schema.DataFetcher;

/**
* Allows DataFetchers to be plugged
*
* @author Phillip Kruger ([email protected])
*/
public interface PlugableDataFetcher<K, T> extends DataFetcher<T>, BatchLoaderWithContext<K, T> {
public interface PlugableDataFetcher<T> extends DataFetcher<T> {

}
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,23 @@ protected boolean shouldTransform(Field field) {
* @return if adaption is needed
*/
protected boolean shouldAdapt(Field field) {
return field.getReference().isAdaptingWith() || field.isAdaptingWith()
|| (field.hasWrapper() && field.getWrapper().isMap());
return shouldAdaptWith(field) || shouldAutoAdaptWithMap(field) || shouldAdaptTo(field);
}

protected boolean shouldAutoAdaptWithMap(Field field) {
return field.hasWrapper() && field.getWrapper().isMap();
}

protected boolean shouldAdaptWith(Field field) {
return field.getReference().isAdaptingWith() || field.isAdaptingWith();
}

protected boolean shouldAdaptTo(Field field) {
return field.getReference().isAdaptingTo()
&& field.getReference().getAdaptTo().getDeserializeMethod() != null
||
field.isAdaptingTo()
&& field.getAdaptTo().getDeserializeMethod() != null;
}

public Object transformOrAdapt(Object val, Field field, DataFetchingEnvironment dfe)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,18 +243,6 @@ private Object transformInput(Field field, Object object) throws AbstractDataFet
}
}

private boolean shouldAdaptTo(Field field) {
return field.getReference().isAdaptingTo()
&& field.getReference().getAdaptTo().getDeserializeMethod() != null
||
field.isAdaptingTo()
&& field.getAdaptTo().getDeserializeMethod() != null;
}

private boolean shouldAdaptWith(Field field) {
return field.getReference().isAdaptingWith() || field.isAdaptingWith();
}

private String getCreateMethodName(Field field) {
if (field.getReference().isAdaptingTo()) {
return field.getReference().getAdaptTo().getDeserializeMethod();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ Object singleAdapting(Object argumentValue, Field field, DataFetchingEnvironment
log.transformError(ex);
throw new TransformException(ex, field, argumentValue);
}
} else if (field.isAdaptingTo()) {
return argumentValue.toString();
} else if (field.hasWrapper() && field.getWrapper().isMap()) {
Object key = null;
Map<String, Object> arguments = dfe.getArguments();
Expand Down
Loading