From a78e922e1825b381eba4e8c6fd72cfcb2383eccf Mon Sep 17 00:00:00 2001 From: Ches Martin Date: Fri, 22 Nov 2019 10:04:41 +0700 Subject: [PATCH 1/3] Add Spotless Maven plugin for Google formatting --- CONTRIBUTING.md | 18 +++++++++++++++--- pom.xml | 14 ++++++++++++++ 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fcee6f3a39..06476c0156 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -310,11 +310,23 @@ Please submit a **pull request** to initiate the code review process. We use [pr ### Java -We conform to the [java google style guide](https://google.github.io/styleguide/javaguide.html) +We conform to the [Google Java Style Guide]. Maven can helpfully take care of +that for you before you commit: -If using Intellij please import the code styles: -https://github.com/google/styleguide/blob/gh-pages/intellij-java-google-style.xml + $ mvn spotless:apply + +Formatting will be checked automatically during the `verify` phase. This can be +skipped temporarily: + + $ mvn spotless:check # Check is automatic upon `mvn verify` + $ mvn verify -Dspotless.check.skip + +If you're using IntelliJ, you can import [these code style settings][G +IntelliJ] if you'd like to use the IDE's reformat function as you work. ### Go Make sure you apply `go fmt`. + +[Google Java Style Guide]: https://google.github.io/styleguide/javaguide.html +[G IntelliJ]: https://github.com/google/styleguide/blob/gh-pages/intellij-java-google-style.xml diff --git a/pom.xml b/pom.xml index d8008853a0..440bcb812a 100644 --- a/pom.xml +++ b/pom.xml @@ -267,6 +267,20 @@ + + com.diffplug.spotless + spotless-maven-plugin + 1.26.0 + + + + 1.7 + + + + + + org.apache.maven.plugins maven-compiler-plugin From 705b1a1fddbe0161dbaae7fe2898997562c5a691 Mon Sep 17 00:00:00 2001 From: Ches Martin Date: Fri, 22 Nov 2019 10:20:47 +0700 Subject: [PATCH 2/3] Run auto format with Spotless --- .../main/java/feast/core/CoreApplication.java | 2 +- .../config/CoreGRpcServerBuilderConfig.java | 2 +- .../feast/core/config/FeastProperties.java | 3 - .../core/config/FeatureStreamConfig.java | 27 +- .../java/feast/core/config/JPAConfig.java | 3 +- .../java/feast/core/config/JobConfig.java | 28 +- .../java/feast/core/config/WebMvcConfig.java | 12 +- .../feast/core/dao/FeatureSetRepository.java | 4 +- .../feast/core/dao/JobInfoRepository.java | 3 +- .../feast/core/dao/MetricsRepository.java | 3 +- .../java/feast/core/dao/StoreRepository.java | 4 +- .../core/exception/JobExecutionException.java | 4 +- .../exception/JobMonitoringException.java | 4 +- .../core/exception/TopicExistsException.java | 4 +- .../java/feast/core/grpc/CoreServiceImpl.java | 35 +- .../feast/core/http/HealthController.java | 11 +- .../main/java/feast/core/job/JobManager.java | 2 +- .../main/java/feast/core/job/JobMonitor.java | 3 - .../java/feast/core/job/NoopJobMonitor.java | 3 - core/src/main/java/feast/core/job/Runner.java | 2 +- .../feast/core/job/ScheduledJobMonitor.java | 13 +- .../core/job/dataflow/DataflowJobManager.java | 30 +- .../core/job/dataflow/DataflowJobMonitor.java | 21 +- .../job/dataflow/DataflowJobStateMapper.java | 2 +- .../java/feast/core/job/direct/DirectJob.java | 2 - .../core/job/direct/DirectJobRegistry.java | 5 +- .../core/job/direct/DirectJobStateMapper.java | 1 + .../job/direct/DirectRunnerJobManager.java | 24 +- .../job/direct/DirectRunnerJobMonitor.java | 4 - core/src/main/java/feast/core/log/Action.java | 4 +- .../main/java/feast/core/log/AuditLogger.java | 10 +- .../main/java/feast/core/log/Resource.java | 4 +- .../core/model/AbstractTimestampEntity.java | 5 +- .../java/feast/core/model/FeatureSet.java | 41 +- .../src/main/java/feast/core/model/Field.java | 3 +- .../main/java/feast/core/model/JobInfo.java | 20 +- .../main/java/feast/core/model/Metrics.java | 10 +- .../main/java/feast/core/model/Source.java | 24 +- .../src/main/java/feast/core/model/Store.java | 19 +- .../core/service/JobCoordinatorService.java | 37 +- .../feast/core/service/JobStatusService.java | 108 +-- .../java/feast/core/service/SpecService.java | 118 +-- .../java/feast/core/util/PackageUtil.java | 10 +- .../java/feast/core/util/PipelineUtil.java | 10 +- .../java/feast/core/util/TypeConversion.java | 6 +- .../core/validators/FeatureSetValidator.java | 7 +- .../java/feast/core/validators/Matchers.java | 3 +- .../java/feast/core/CoreApplicationTest.java | 231 +++--- .../feast/core/http/HealthControllerTest.java | 13 +- .../core/job/ScheduledJobMonitorTest.java | 25 +- .../job/dataflow/DataflowJobManagerTest.java | 71 +- .../job/dataflow/DataflowJobMonitorTest.java | 20 +- .../dataflow/DataflowJobStateMapperTest.java | 2 +- .../direct/DirectRunnerJobManagerTest.java | 46 +- .../service/JobCoordinatorServiceTest.java | 174 ++-- .../feast/core/service/SpecServiceTest.java | 247 +++--- .../feast/core/util/TypeConversionTest.java | 14 +- .../feast/core/validators/MatchersTest.java | 17 +- .../main/java/feast/ingestion/ImportJob.java | 47 +- .../ingestion/options/ImportOptions.java | 14 +- .../ingestion/transform/ReadFromSource.java | 13 +- .../transform/ValidateFeatureRows.java | 35 +- .../WriteFailedElementToBigQuery.java | 7 +- .../ingestion/transform/WriteToStore.java | 27 +- .../fn/KafkaRecordToFeatureRowDoFn.java | 9 +- .../transform/fn/ValidateFeatureRowDoFn.java | 38 +- .../WriteDeadletterRowMetricsDoFn.java | 18 +- .../metrics/WriteMetricsTransform.java | 56 +- .../metrics/WriteRowMetricsDoFn.java | 36 +- .../java/feast/ingestion/utils/JsonUtil.java | 3 +- .../java/feast/ingestion/utils/SpecUtil.java | 4 +- .../feast/ingestion/values/FailedElement.java | 1 - .../ingestion/values/FeatureSetSpec.java | 4 +- .../serving/bigquery/GetTableDestination.java | 25 +- .../redis/FeatureRowToRedisMutationDoFn.java | 23 +- .../store/serving/redis/RedisCustomIO.java | 23 +- .../src/test/java/feast/FeastMatchers.java | 4 +- .../test/java/feast/ToOrderedFeatureRows.java | 6 +- .../java/feast/ingestion/ImportJobTest.java | 124 +-- .../transform/ValidateFeatureRowsTest.java | 88 +- .../feast/ingestion/util/JsonUtilTest.java | 2 +- .../feast/ingestion/util/StoreUtilTest.java | 2 +- .../serving/redis/RedisCustomIOTest.java | 61 +- .../src/test/java/feast/test/TestUtil.java | 154 ++-- .../com/gojek/feast/v1alpha1/FeastClient.java | 2 +- .../java/feast/serving/FeastProperties.java | 1 - .../configuration/ContextClosedHandler.java | 4 +- .../configuration/InstrumentationConfig.java | 2 +- .../configuration/JobServiceConfig.java | 8 +- .../ServingApiConfiguration.java | 3 +- .../configuration/ServingServiceConfig.java | 41 +- .../configuration/SpecServiceConfig.java | 16 +- .../ServingServiceGRpcController.java | 10 +- .../ServingServiceRestController.java | 4 +- .../service/BigQueryServingService.java | 203 +++-- .../serving/service/CachedSpecService.java | 61 +- .../serving/service/CoreSpecService.java | 4 +- .../feast/serving/service/NoopJobService.java | 4 +- .../service/RedisBackedJobService.java | 1 - .../serving/service/RedisServingService.java | 20 +- .../java/feast/serving/util/BigQueryUtil.java | 36 +- .../main/java/feast/serving/util/Metrics.java | 55 +- .../feast/serving/util/RequestHelper.java | 7 +- .../util/mappers/ResponseJSONMapper.java | 4 +- .../ServingServiceGRpcControllerTest.java | 35 +- .../service/CachedSpecServiceTest.java | 73 +- .../service/RedisServingServiceTest.java | 752 ++++++++++-------- .../util/mappers/YamlToProtoMapperTest.java | 34 +- 108 files changed, 1926 insertions(+), 1838 deletions(-) diff --git a/core/src/main/java/feast/core/CoreApplication.java b/core/src/main/java/feast/core/CoreApplication.java index 993d2c49ef..c28887d59e 100644 --- a/core/src/main/java/feast/core/CoreApplication.java +++ b/core/src/main/java/feast/core/CoreApplication.java @@ -34,4 +34,4 @@ public class CoreApplication { public static void main(String[] args) { SpringApplication.run(CoreApplication.class, args); } -} \ No newline at end of file +} diff --git a/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java b/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java index 3d2d158c0d..84912d0a74 100644 --- a/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java +++ b/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java @@ -8,7 +8,7 @@ @Component public class CoreGRpcServerBuilderConfig extends GRpcServerBuilderConfigurer { @Override - public void configure(ServerBuilder serverBuilder){ + public void configure(ServerBuilder serverBuilder) { serverBuilder.addService(ProtoReflectionService.newInstance()); } } diff --git a/core/src/main/java/feast/core/config/FeastProperties.java b/core/src/main/java/feast/core/config/FeastProperties.java index d285abff34..681157a683 100644 --- a/core/src/main/java/feast/core/config/FeastProperties.java +++ b/core/src/main/java/feast/core/config/FeastProperties.java @@ -41,6 +41,3 @@ public static class MetricsProperties { private int port; } } - - - diff --git a/core/src/main/java/feast/core/config/FeatureStreamConfig.java b/core/src/main/java/feast/core/config/FeatureStreamConfig.java index 4034468172..10036b2e15 100644 --- a/core/src/main/java/feast/core/config/FeatureStreamConfig.java +++ b/core/src/main/java/feast/core/config/FeatureStreamConfig.java @@ -27,8 +27,7 @@ public class FeatureStreamConfig { @Bean public Source getDefaultSource(FeastProperties feastProperties) { StreamProperties streamProperties = feastProperties.getStream(); - SourceType featureStreamType = SourceType - .valueOf(streamProperties.getType().toUpperCase()); + SourceType featureStreamType = SourceType.valueOf(streamProperties.getType().toUpperCase()); switch (featureStreamType) { case KAFKA: String bootstrapServers = streamProperties.getOptions().get("bootstrapServers"); @@ -38,25 +37,31 @@ public Source getDefaultSource(FeastProperties feastProperties) { map.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "1000"); AdminClient client = AdminClient.create(map); - NewTopic newTopic = new NewTopic(topicName, - Integer.valueOf(streamProperties.getOptions().getOrDefault("numPartitions", "1")), - Short.valueOf(streamProperties.getOptions().getOrDefault("replicationFactor", "1"))); - CreateTopicsResult createTopicsResult = client - .createTopics(Collections.singleton(newTopic)); + NewTopic newTopic = + new NewTopic( + topicName, + Integer.valueOf(streamProperties.getOptions().getOrDefault("numPartitions", "1")), + Short.valueOf( + streamProperties.getOptions().getOrDefault("replicationFactor", "1"))); + CreateTopicsResult createTopicsResult = + client.createTopics(Collections.singleton(newTopic)); try { createTopicsResult.values().get(topicName).get(); } catch (InterruptedException | ExecutionException e) { if (e.getCause().getClass().equals(TopicExistsException.class)) { - log.warn(Strings - .lenientFormat( + log.warn( + Strings.lenientFormat( "Unable to create topic %s in the feature stream, topic already exists, using existing topic.", topicName)); } else { throw new RuntimeException(e.getMessage(), e); } } - KafkaSourceConfig sourceConfig = KafkaSourceConfig.newBuilder() - .setBootstrapServers(bootstrapServers).setTopic(topicName).build(); + KafkaSourceConfig sourceConfig = + KafkaSourceConfig.newBuilder() + .setBootstrapServers(bootstrapServers) + .setTopic(topicName) + .build(); return new Source(featureStreamType, sourceConfig, true); default: throw new RuntimeException("Unsupported source stream, only [KAFKA] is supported"); diff --git a/core/src/main/java/feast/core/config/JPAConfig.java b/core/src/main/java/feast/core/config/JPAConfig.java index 8e98897fd7..6b8b5e5e22 100644 --- a/core/src/main/java/feast/core/config/JPAConfig.java +++ b/core/src/main/java/feast/core/config/JPAConfig.java @@ -17,14 +17,13 @@ package feast.core.config; +import javax.persistence.EntityManagerFactory; import lombok.extern.slf4j.Slf4j; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.transaction.PlatformTransactionManager; -import javax.persistence.EntityManagerFactory; - /** Configuration of JPA related services and beans for the core application. */ @Configuration @Slf4j diff --git a/core/src/main/java/feast/core/config/JobConfig.java b/core/src/main/java/feast/core/config/JobConfig.java index e44036700b..4342352bae 100644 --- a/core/src/main/java/feast/core/config/JobConfig.java +++ b/core/src/main/java/feast/core/config/JobConfig.java @@ -42,9 +42,7 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -/** - * Beans for job management - */ +/** Beans for job management */ @Slf4j @Configuration public class JobConfig { @@ -57,9 +55,7 @@ public class JobConfig { @Bean @Autowired public JobManager getJobManager( - FeastProperties feastProperties, - DirectJobRegistry directJobRegistry) - throws Exception { + FeastProperties feastProperties, DirectJobRegistry directJobRegistry) throws Exception { JobProperties jobProperties = feastProperties.getJobs(); Runner runner = Runner.fromString(jobProperties.getRunner()); @@ -95,21 +91,17 @@ public JobManager getJobManager( throw new IllegalStateException("Unable to initialize DataflowJobManager", e); } case DIRECT: - return new DirectRunnerJobManager(jobProperties.getOptions(), directJobRegistry, - jobProperties.getMetrics()); + return new DirectRunnerJobManager( + jobProperties.getOptions(), directJobRegistry, jobProperties.getMetrics()); default: throw new IllegalArgumentException("Unsupported runner: " + jobProperties.getRunner()); } } - /** - * Get a Job Monitor given the runner type and dataflow configuration. - */ + /** Get a Job Monitor given the runner type and dataflow configuration. */ @Bean public JobMonitor getJobMonitor( - FeastProperties feastProperties, - DirectJobRegistry directJobRegistry) - throws Exception { + FeastProperties feastProperties, DirectJobRegistry directJobRegistry) throws Exception { JobProperties jobProperties = feastProperties.getJobs(); Runner runner = Runner.fromString(jobProperties.getRunner()); @@ -132,8 +124,8 @@ public JobMonitor getJobMonitor( JacksonFactory.getDefaultInstance(), credential); - return new DataflowJobMonitor(dataflow, jobOptions.get("project"), - jobOptions.get("region")); + return new DataflowJobMonitor( + dataflow, jobOptions.get("project"), jobOptions.get("region")); } catch (IOException e) { log.error( "Unable to find credential required for Dataflow monitoring API: {}", e.getMessage()); @@ -149,9 +141,7 @@ public JobMonitor getJobMonitor( } } - /** - * Get a direct job registry - */ + /** Get a direct job registry */ @Bean public DirectJobRegistry directJobRegistry() { return new DirectJobRegistry(); diff --git a/core/src/main/java/feast/core/config/WebMvcConfig.java b/core/src/main/java/feast/core/config/WebMvcConfig.java index 2098245b21..cbbaf12850 100644 --- a/core/src/main/java/feast/core/config/WebMvcConfig.java +++ b/core/src/main/java/feast/core/config/WebMvcConfig.java @@ -17,21 +17,19 @@ package feast.core.config; +import java.util.List; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.protobuf.ProtobufJsonFormatHttpMessageConverter; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; -import java.util.List; - -/** - * Configuration for the spring web MVC layer - */ +/** Configuration for the spring web MVC layer */ @Configuration public class WebMvcConfig implements WebMvcConfigurer { /** * Get a json-protobuf converter. + * * @return ProtobufJsonFormatHttpMessageConverter */ @Bean @@ -39,9 +37,7 @@ ProtobufJsonFormatHttpMessageConverter getProtobufHttpMessageConverter() { return new ProtobufJsonFormatHttpMessageConverter(); } - /** - * Register json-protobuf converter. - */ + /** Register json-protobuf converter. */ @Override public void configureMessageConverters(List> converters) { converters.add(getProtobufHttpMessageConverter()); diff --git a/core/src/main/java/feast/core/dao/FeatureSetRepository.java b/core/src/main/java/feast/core/dao/FeatureSetRepository.java index 1e8ef99129..11a1c308bd 100644 --- a/core/src/main/java/feast/core/dao/FeatureSetRepository.java +++ b/core/src/main/java/feast/core/dao/FeatureSetRepository.java @@ -6,7 +6,7 @@ import org.springframework.data.jpa.repository.Query; /** JPA repository supplying FeatureSet objects keyed by id. */ -public interface FeatureSetRepository extends JpaRepository { +public interface FeatureSetRepository extends JpaRepository { // Find feature set by name and version FeatureSet findFeatureSetByNameAndVersion(String name, Integer version); @@ -18,6 +18,6 @@ public interface FeatureSetRepository extends JpaRepository List findByName(String name); // find all versions of featureSets with names matching the regex - @Query(nativeQuery=true, value="SELECT * FROM feature_sets WHERE name LIKE ?1") + @Query(nativeQuery = true, value = "SELECT * FROM feature_sets WHERE name LIKE ?1") List findByNameWithWildcard(String name); } diff --git a/core/src/main/java/feast/core/dao/JobInfoRepository.java b/core/src/main/java/feast/core/dao/JobInfoRepository.java index 06381aa557..949e900636 100644 --- a/core/src/main/java/feast/core/dao/JobInfoRepository.java +++ b/core/src/main/java/feast/core/dao/JobInfoRepository.java @@ -28,5 +28,6 @@ @Repository public interface JobInfoRepository extends JpaRepository { List findByStatusNotIn(Collection statuses); + List findBySourceIdAndStoreName(String sourceId, String storeName); -} \ No newline at end of file +} diff --git a/core/src/main/java/feast/core/dao/MetricsRepository.java b/core/src/main/java/feast/core/dao/MetricsRepository.java index bbf5bfad12..24c6d05036 100644 --- a/core/src/main/java/feast/core/dao/MetricsRepository.java +++ b/core/src/main/java/feast/core/dao/MetricsRepository.java @@ -18,11 +18,10 @@ package feast.core.dao; import feast.core.model.Metrics; +import java.util.List; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; -import java.util.List; - @Repository public interface MetricsRepository extends JpaRepository { List findByJobInfo_Id(String id); diff --git a/core/src/main/java/feast/core/dao/StoreRepository.java b/core/src/main/java/feast/core/dao/StoreRepository.java index 7df7af3ad7..70560320da 100644 --- a/core/src/main/java/feast/core/dao/StoreRepository.java +++ b/core/src/main/java/feast/core/dao/StoreRepository.java @@ -1,9 +1,7 @@ package feast.core.dao; -import feast.core.model.FeatureSet; import feast.core.model.Store; import org.springframework.data.jpa.repository.JpaRepository; /** JPA repository supplying Store objects keyed by id. */ -public interface StoreRepository extends JpaRepository { -} +public interface StoreRepository extends JpaRepository {} diff --git a/core/src/main/java/feast/core/exception/JobExecutionException.java b/core/src/main/java/feast/core/exception/JobExecutionException.java index 694d97c1ab..75a635e324 100644 --- a/core/src/main/java/feast/core/exception/JobExecutionException.java +++ b/core/src/main/java/feast/core/exception/JobExecutionException.java @@ -17,9 +17,7 @@ package feast.core.exception; -/** - * Exception thrown when a request for job execution fails. - */ +/** Exception thrown when a request for job execution fails. */ public class JobExecutionException extends RuntimeException { public JobExecutionException() { super(); diff --git a/core/src/main/java/feast/core/exception/JobMonitoringException.java b/core/src/main/java/feast/core/exception/JobMonitoringException.java index d55c725641..705c8ebac2 100644 --- a/core/src/main/java/feast/core/exception/JobMonitoringException.java +++ b/core/src/main/java/feast/core/exception/JobMonitoringException.java @@ -17,9 +17,7 @@ package feast.core.exception; -/** - * Exception thrown when error happen during job monitoring. - */ +/** Exception thrown when error happen during job monitoring. */ public class JobMonitoringException extends RuntimeException { public JobMonitoringException() { diff --git a/core/src/main/java/feast/core/exception/TopicExistsException.java b/core/src/main/java/feast/core/exception/TopicExistsException.java index 9d6ffe1c3c..4416563651 100644 --- a/core/src/main/java/feast/core/exception/TopicExistsException.java +++ b/core/src/main/java/feast/core/exception/TopicExistsException.java @@ -17,9 +17,7 @@ package feast.core.exception; -/** - * Exception thrown when creation of a topic in the stream fails because it already exists. - */ +/** Exception thrown when creation of a topic in the stream fails because it already exists. */ public class TopicExistsException extends RuntimeException { public TopicExistsException() { super(); diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java index 81a77a6f45..744c449307 100644 --- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java +++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java @@ -52,17 +52,13 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; -/** - * Implementation of the feast core GRPC service. - */ +/** Implementation of the feast core GRPC service. */ @Slf4j @GRpcService public class CoreServiceImpl extends CoreServiceImplBase { - @Autowired - private SpecService specService; - @Autowired - private JobCoordinatorService jobCoordinatorService; + @Autowired private SpecService specService; + @Autowired private JobCoordinatorService jobCoordinatorService; @Override public void getFeastCoreVersion( @@ -127,8 +123,7 @@ public void applyFeatureSet( .filter( sub -> { String subString = sub.getName(); - if (!subString.contains(".*")) - { + if (!subString.contains(".*")) { subString = subString.replace("*", ".*"); } Pattern p = Pattern.compile(subString); @@ -150,8 +145,8 @@ public void applyFeatureSet( // We use the request featureSet source because it contains the information // about whether to default to the default feature stream or not SourceProto.Source source = response.getFeatureSet().getSource(); - jobCoordinatorService - .startOrUpdateJob(Lists.newArrayList(featureSetSpecs), source, store); + jobCoordinatorService.startOrUpdateJob( + Lists.newArrayList(featureSetSpecs), source, store); } } responseObserver.onNext(response); @@ -164,8 +159,8 @@ public void applyFeatureSet( @Override @Transactional - public void updateStore(UpdateStoreRequest request, - StreamObserver responseObserver) { + public void updateStore( + UpdateStoreRequest request, StreamObserver responseObserver) { try { UpdateStoreResponse response = specService.updateStore(request); responseObserver.onNext(response); @@ -176,13 +171,13 @@ public void updateStore(UpdateStoreRequest request, Store store = response.getStore(); for (Subscription subscription : store.getSubscriptionsList()) { featureSetSpecs.addAll( - specService.listFeatureSets( - ListFeatureSetsRequest.Filter.newBuilder() - .setFeatureSetName(subscription.getName()) - .setFeatureSetVersion(subscription.getVersion()) - .build()) - .getFeatureSetsList() - ); + specService + .listFeatureSets( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion()) + .build()) + .getFeatureSetsList()); } if (featureSetSpecs.size() == 0) { return; diff --git a/core/src/main/java/feast/core/http/HealthController.java b/core/src/main/java/feast/core/http/HealthController.java index e718332070..563c7bad42 100644 --- a/core/src/main/java/feast/core/http/HealthController.java +++ b/core/src/main/java/feast/core/http/HealthController.java @@ -17,6 +17,11 @@ package feast.core.http; +import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR; + +import java.sql.Connection; +import java.sql.SQLException; +import javax.sql.DataSource; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; @@ -25,12 +30,6 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.SQLException; - -import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR; - /** Web http for pod health-check endpoints. */ @Slf4j @RestController diff --git a/core/src/main/java/feast/core/job/JobManager.java b/core/src/main/java/feast/core/job/JobManager.java index 3e1d22d245..bed7f265b7 100644 --- a/core/src/main/java/feast/core/job/JobManager.java +++ b/core/src/main/java/feast/core/job/JobManager.java @@ -26,6 +26,7 @@ public interface JobManager { /** * Get Runner Type + * * @return runner type */ Runner getRunnerType(); @@ -54,5 +55,4 @@ public interface JobManager { * @param extId runner specific job id. */ void abortJob(String extId); - } diff --git a/core/src/main/java/feast/core/job/JobMonitor.java b/core/src/main/java/feast/core/job/JobMonitor.java index f77b9df942..8829241340 100644 --- a/core/src/main/java/feast/core/job/JobMonitor.java +++ b/core/src/main/java/feast/core/job/JobMonitor.java @@ -19,8 +19,6 @@ import feast.core.model.JobInfo; import feast.core.model.JobStatus; -import feast.core.model.Metrics; -import java.util.List; public interface JobMonitor { @@ -31,5 +29,4 @@ public interface JobMonitor { * @return job status. */ JobStatus getJobStatus(JobInfo job); - } diff --git a/core/src/main/java/feast/core/job/NoopJobMonitor.java b/core/src/main/java/feast/core/job/NoopJobMonitor.java index f73782298d..e078730b03 100644 --- a/core/src/main/java/feast/core/job/NoopJobMonitor.java +++ b/core/src/main/java/feast/core/job/NoopJobMonitor.java @@ -19,9 +19,6 @@ import feast.core.model.JobInfo; import feast.core.model.JobStatus; -import feast.core.model.Metrics; -import java.util.Collections; -import java.util.List; public class NoopJobMonitor implements JobMonitor { diff --git a/core/src/main/java/feast/core/job/Runner.java b/core/src/main/java/feast/core/job/Runner.java index ede26ce863..98e008f7a9 100644 --- a/core/src/main/java/feast/core/job/Runner.java +++ b/core/src/main/java/feast/core/job/Runner.java @@ -16,7 +16,7 @@ public String getName() { } public static Runner fromString(String runner) { - for(Runner r: Runner.values()) { + for (Runner r : Runner.values()) { if (r.getName().equals(runner)) { return r; } diff --git a/core/src/main/java/feast/core/job/ScheduledJobMonitor.java b/core/src/main/java/feast/core/job/ScheduledJobMonitor.java index 6772cb5ca2..96b070b2b8 100644 --- a/core/src/main/java/feast/core/job/ScheduledJobMonitor.java +++ b/core/src/main/java/feast/core/job/ScheduledJobMonitor.java @@ -24,14 +24,11 @@ import feast.core.log.Resource; import feast.core.model.JobInfo; import feast.core.model.JobStatus; -import feast.core.model.Metrics; +import java.util.Collection; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; - -import java.util.Collection; -import java.util.List; import org.springframework.transaction.annotation.Transactional; @Slf4j @@ -42,16 +39,14 @@ public class ScheduledJobMonitor { private final JobInfoRepository jobInfoRepository; @Autowired - public ScheduledJobMonitor( - JobMonitor jobMonitor, - JobInfoRepository jobInfoRepository) { + public ScheduledJobMonitor(JobMonitor jobMonitor, JobInfoRepository jobInfoRepository) { this.jobMonitor = jobMonitor; this.jobInfoRepository = jobInfoRepository; } - // TODO: Keep receiving the following exception with these arguments below - // Caused by: java.lang.IllegalStateException: Encountered invalid @Scheduled method 'pollStatusAndMetrics': Circular placeholder reference .. in property definitions + // Caused by: java.lang.IllegalStateException: Encountered invalid @Scheduled method + // 'pollStatusAndMetrics': Circular placeholder reference .. in property definitions // @Scheduled( // fixedDelayString = "${feast.jobs.monitor.fixedDelay}", // initialDelayString = "${feast.jobs.monitor.initialDelay}") diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java index 89184f4f01..28fc680e53 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java @@ -72,7 +72,6 @@ public Runner getRunnerType() { return RUNNER_TYPE; } - @Override public String startJob(String name, List featureSets, Store sink) { return submitDataflowJob(name, featureSets, sink, false); @@ -91,8 +90,8 @@ public String updateJob(JobInfo jobInfo) { for (FeatureSet featureSet : jobInfo.getFeatureSets()) { featureSetSpecs.add(featureSet.toProto()); } - return submitDataflowJob(jobInfo.getId(), featureSetSpecs, jobInfo.getStore().toProto(), - true); + return submitDataflowJob( + jobInfo.getId(), featureSetSpecs, jobInfo.getStore().toProto(), true); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(String.format("Unable to update job %s", jobInfo.getId()), e); } @@ -127,8 +126,8 @@ public void abortJob(String dataflowJobId) { } } - private String submitDataflowJob(String jobName, List featureSets, Store sink, - boolean update) { + private String submitDataflowJob( + String jobName, List featureSets, Store sink, boolean update) { try { ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSets, sink, update); DataflowPipelineJob pipelineResult = runPipeline(pipelineOptions); @@ -140,9 +139,9 @@ private String submitDataflowJob(String jobName, List featureSet } } - private ImportOptions getPipelineOptions(String jobName, List featureSets, - Store sink, - boolean update) throws IOException { + private ImportOptions getPipelineOptions( + String jobName, List featureSets, Store sink, boolean update) + throws IOException { String[] args = TypeConversion.convertMapToArgs(defaultOptions); ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); Printer printer = JsonFormat.printer(); @@ -156,8 +155,8 @@ private ImportOptions getPipelineOptions(String jobName, List fe pipelineOptions.setUpdate(update); pipelineOptions.setRunner(DataflowRunner.class); pipelineOptions.setJobName(jobName); - pipelineOptions - .setFilesToStage(detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader())); + pipelineOptions.setFilesToStage( + detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader())); if (metrics.isEnabled()) { pipelineOptions.setMetricsExporterType(metrics.getType()); @@ -169,10 +168,8 @@ private ImportOptions getPipelineOptions(String jobName, List fe return pipelineOptions; } - public DataflowPipelineJob runPipeline(ImportOptions pipelineOptions) - throws IOException { - return (DataflowPipelineJob) ImportJob - .runPipeline(pipelineOptions); + public DataflowPipelineJob runPipeline(ImportOptions pipelineOptions) throws IOException { + return (DataflowPipelineJob) ImportJob.runPipeline(pipelineOptions); } private String waitForJobToRun(DataflowPipelineJob pipelineResult) @@ -181,8 +178,9 @@ private String waitForJobToRun(DataflowPipelineJob pipelineResult) while (true) { State state = pipelineResult.getState(); if (state.isTerminal()) { - String dataflowDashboardUrl = String - .format("https://console.cloud.google.com/dataflow/jobsDetail/locations/%s/jobs/%s", + String dataflowDashboardUrl = + String.format( + "https://console.cloud.google.com/dataflow/jobsDetail/locations/%s/jobs/%s", location, pipelineResult.getJobId()); throw new RuntimeException( String.format( diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java index c8cfaeda4b..09feb61049 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java @@ -21,16 +21,10 @@ import com.google.api.services.dataflow.Dataflow; import com.google.api.services.dataflow.model.Job; -import com.google.api.services.dataflow.model.JobMetrics; -import com.google.api.services.dataflow.model.MetricUpdate; import feast.core.job.JobMonitor; import feast.core.job.Runner; import feast.core.model.JobInfo; import feast.core.model.JobStatus; -import feast.core.model.Metrics; -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -65,12 +59,19 @@ public JobStatus getJobStatus(JobInfo jobInfo) { } try { - Job job = dataflow.projects().locations().jobs().get(projectId, location, jobInfo.getExtId()) - .execute(); + Job job = + dataflow + .projects() + .locations() + .jobs() + .get(projectId, location, jobInfo.getExtId()) + .execute(); return jobStateMaper.map(job.getCurrentState()); } catch (Exception e) { - log.error("Unable to retrieve status of a dataflow job with id : {}\ncause: {}", - jobInfo.getExtId(), e.getMessage()); + log.error( + "Unable to retrieve status of a dataflow job with id : {}\ncause: {}", + jobInfo.getExtId(), + e.getMessage()); } return JobStatus.UNKNOWN; } diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java index 46f0dabbad..59deb1cb95 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java @@ -52,9 +52,9 @@ public class DataflowJobStateMapper { DATAFLOW_TO_FEAST_JOB_STATUS.put(JOB_STATE_DONE, JobStatus.COMPLETED); } - /** * Map a string containing Dataflow's JobState into Feast's JobStatus + * * @param jobState Dataflow JobState * @return JobStatus. * @throws IllegalArgumentException if jobState is invalid. diff --git a/core/src/main/java/feast/core/job/direct/DirectJob.java b/core/src/main/java/feast/core/job/direct/DirectJob.java index 10480c51c6..3483045fd2 100644 --- a/core/src/main/java/feast/core/job/direct/DirectJob.java +++ b/core/src/main/java/feast/core/job/direct/DirectJob.java @@ -5,7 +5,6 @@ import lombok.Getter; import org.apache.beam.sdk.PipelineResult; - @Getter @AllArgsConstructor public class DirectJob { @@ -23,4 +22,3 @@ public void abort() throws IOException { } } } - diff --git a/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java b/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java index 99f478644a..70d6696967 100644 --- a/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java +++ b/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java @@ -19,6 +19,7 @@ public DirectJobRegistry() { /** * Add the given job to the registry. + * * @param job containing the job id, */ public void add(DirectJob job) { @@ -48,9 +49,7 @@ public void remove(String id) { jobs.remove(id); } - /** - * Kill all child jobs when the registry is garbage collected - */ + /** Kill all child jobs when the registry is garbage collected */ @Override public void finalize() { for (DirectJob job : this.jobs.values()) { diff --git a/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java b/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java index 763340b605..fa9a5f347b 100644 --- a/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java +++ b/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java @@ -22,6 +22,7 @@ public class DirectJobStateMapper { /** * Map a dataflow job state to Feast's JobStatus + * * @param jobState beam PipelineResult State * @return JobStatus */ diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java index a9b968bcb9..dff265a35b 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java @@ -51,8 +51,9 @@ public class DirectRunnerJobManager implements JobManager { private final DirectJobRegistry jobs; private MetricsProperties metrics; - - public DirectRunnerJobManager(Map defaultOptions, DirectJobRegistry jobs, + public DirectRunnerJobManager( + Map defaultOptions, + DirectJobRegistry jobs, MetricsProperties metricsProperties) { this.defaultOptions = defaultOptions; this.jobs = jobs; @@ -72,8 +73,8 @@ public Runner getRunnerType() { * @param sinkSpec Store to sink features to */ @Override - public String startJob(String name, List featureSetSpecs, - StoreProto.Store sinkSpec) { + public String startJob( + String name, List featureSetSpecs, StoreProto.Store sinkSpec) { try { ImportOptions pipelineOptions = getPipelineOptions(featureSetSpecs, sinkSpec); PipelineResult pipelineResult = runPipeline(pipelineOptions); @@ -86,8 +87,8 @@ public String startJob(String name, List featureSetSpecs, } } - private ImportOptions getPipelineOptions(List featureSetSpecs, - StoreProto.Store sink) + private ImportOptions getPipelineOptions( + List featureSetSpecs, StoreProto.Store sink) throws InvalidProtocolBufferException { String[] args = TypeConversion.convertMapToArgs(defaultOptions); ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); @@ -112,11 +113,11 @@ private ImportOptions getPipelineOptions(List featureSetSpecs, } /** - * Stops an existing job and restarts a new job in its place as a proxy for job updates. - * Note that since we do not maintain a consumer group across the two jobs and the old job - * is not drained, some data may be lost. + * Stops an existing job and restarts a new job in its place as a proxy for job updates. Note that + * since we do not maintain a consumer group across the two jobs and the old job is not drained, + * some data may be lost. * - * As a rule of thumb, direct jobs in feast should only be used for testing. + *

As a rule of thumb, direct jobs in feast should only be used for testing. * * @param jobInfo jobInfo of target job to change * @return jobId of the job @@ -154,8 +155,7 @@ public void abortJob(String extId) { jobs.remove(extId); } - public PipelineResult runPipeline(ImportOptions pipelineOptions) - throws IOException { + public PipelineResult runPipeline(ImportOptions pipelineOptions) throws IOException { return ImportJob.runPipeline(pipelineOptions); } } diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java index e0b8a8ff64..ace86d305b 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java @@ -3,9 +3,6 @@ import feast.core.job.JobMonitor; import feast.core.model.JobInfo; import feast.core.model.JobStatus; -import feast.core.model.Metrics; -import java.util.ArrayList; -import java.util.List; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -27,5 +24,4 @@ public JobStatus getJobStatus(JobInfo job) { } return jobStateMapper.map(directJob.getPipelineResult().getState()); } - } diff --git a/core/src/main/java/feast/core/log/Action.java b/core/src/main/java/feast/core/log/Action.java index c020434ee6..7b0bd0ad30 100644 --- a/core/src/main/java/feast/core/log/Action.java +++ b/core/src/main/java/feast/core/log/Action.java @@ -1,8 +1,6 @@ package feast.core.log; -/** - * Actions taken for audit logging purposes - */ +/** Actions taken for audit logging purposes */ public enum Action { // Job-related actions SUBMIT, diff --git a/core/src/main/java/feast/core/log/AuditLogger.java b/core/src/main/java/feast/core/log/AuditLogger.java index bd66720660..c65171a0f1 100644 --- a/core/src/main/java/feast/core/log/AuditLogger.java +++ b/core/src/main/java/feast/core/log/AuditLogger.java @@ -18,13 +18,12 @@ package feast.core.log; import com.google.common.base.Strings; -import lombok.extern.log4j.Log4j2; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.message.ObjectMessage; - import java.util.Date; import java.util.Map; import java.util.TreeMap; +import lombok.extern.log4j.Log4j2; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.message.ObjectMessage; @Log4j2 public class AuditLogger { @@ -39,7 +38,8 @@ public class AuditLogger { * @param detail additional detail. Supports string formatting. * @param args arguments to the detail string */ - public static void log(Resource resource, String id, Action action, String detail, Object... args) { + public static void log( + Resource resource, String id, Action action, String detail, Object... args) { Map map = new TreeMap<>(); map.put("timestamp", new Date().toString()); map.put("resource", resource.toString()); diff --git a/core/src/main/java/feast/core/log/Resource.java b/core/src/main/java/feast/core/log/Resource.java index d57af53bbb..1fce2fd265 100644 --- a/core/src/main/java/feast/core/log/Resource.java +++ b/core/src/main/java/feast/core/log/Resource.java @@ -1,8 +1,6 @@ package feast.core.log; -/** - * Resources interacted with, for audit logging purposes - */ +/** Resources interacted with, for audit logging purposes */ public enum Resource { FEATURE, FEATURE_GROUP, diff --git a/core/src/main/java/feast/core/model/AbstractTimestampEntity.java b/core/src/main/java/feast/core/model/AbstractTimestampEntity.java index 52297c8b17..d77dd9d807 100644 --- a/core/src/main/java/feast/core/model/AbstractTimestampEntity.java +++ b/core/src/main/java/feast/core/model/AbstractTimestampEntity.java @@ -17,10 +17,9 @@ package feast.core.model; -import lombok.Data; - -import javax.persistence.*; import java.util.Date; +import javax.persistence.*; +import lombok.Data; /** * Base object class ensuring that all objects stored in the registry have an auto-generated diff --git a/core/src/main/java/feast/core/model/FeatureSet.java b/core/src/main/java/feast/core/model/FeatureSet.java index 947a0c6ca2..ff22160477 100644 --- a/core/src/main/java/feast/core/model/FeatureSet.java +++ b/core/src/main/java/feast/core/model/FeatureSet.java @@ -65,7 +65,11 @@ public FeatureSet() { super(); } - public FeatureSet(String name, int version, long maxAgeSeconds, List entities, + public FeatureSet( + String name, + int version, + long maxAgeSeconds, + List entities, List features, Source source) { this.id = String.format("%s:%s", name, version); @@ -82,18 +86,15 @@ public static FeatureSet fromProto(FeatureSetSpec featureSetSpec) { String id = String.format("%s:%d", featureSetSpec.getName(), featureSetSpec.getVersion()); List features = new ArrayList<>(); for (FeatureSpec feature : featureSetSpec.getFeaturesList()) { - features.add(new Field(id, - feature.getName(), - feature.getValueType())); + features.add(new Field(id, feature.getName(), feature.getValueType())); } List entities = new ArrayList<>(); for (EntitySpec entity : featureSetSpec.getEntitiesList()) { - entities.add(new Field(id, - entity.getName(), - entity.getValueType())); + entities.add(new Field(id, entity.getName(), entity.getValueType())); } - return new FeatureSet(featureSetSpec.getName(), + return new FeatureSet( + featureSetSpec.getName(), featureSetSpec.getVersion(), featureSetSpec.getMaxAge().getSeconds(), entities, @@ -104,18 +105,20 @@ public static FeatureSet fromProto(FeatureSetSpec featureSetSpec) { public FeatureSetSpec toProto() throws InvalidProtocolBufferException { List entitySpecs = new ArrayList<>(); for (Field entity : entities) { - entitySpecs.add(EntitySpec.newBuilder() - .setName(entity.getName()) - .setValueType(ValueType.Enum.valueOf(entity.getType())) - .build()); + entitySpecs.add( + EntitySpec.newBuilder() + .setName(entity.getName()) + .setValueType(ValueType.Enum.valueOf(entity.getType())) + .build()); } List featureSpecs = new ArrayList<>(); for (Field feature : features) { - featureSpecs.add(FeatureSpec.newBuilder() - .setName(feature.getName()) - .setValueType(ValueType.Enum.valueOf(feature.getType())) - .build()); + featureSpecs.add( + FeatureSpec.newBuilder() + .setName(feature.getName()) + .setValueType(ValueType.Enum.valueOf(feature.getType())) + .build()); } return FeatureSetSpec.newBuilder() .setName(name) @@ -134,8 +137,10 @@ public FeatureSetSpec toProto() throws InvalidProtocolBufferException { * @return boolean denoting if the source or schema have changed. */ public boolean equalTo(FeatureSet other) throws InvalidProtocolBufferException { - return name.equals(other.getName()) && entities.equals(other.entities) && features - .equals(other.features) && source.equalTo(other.getSource()) + return name.equals(other.getName()) + && entities.equals(other.entities) + && features.equals(other.features) + && source.equalTo(other.getSource()) && maxAgeSeconds == other.maxAgeSeconds; } diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java index fe618c3e9e..7f0feb3e55 100644 --- a/core/src/main/java/feast/core/model/Field.java +++ b/core/src/main/java/feast/core/model/Field.java @@ -58,8 +58,7 @@ public boolean equals(Object o) { return false; } Field field = (Field) o; - return name.equals(field.getName()) && - type.equals(field.getType()); + return name.equals(field.getName()) && type.equals(field.getType()); } @Override diff --git a/core/src/main/java/feast/core/model/JobInfo.java b/core/src/main/java/feast/core/model/JobInfo.java index e2e61fee08..b38c0c5cb7 100644 --- a/core/src/main/java/feast/core/model/JobInfo.java +++ b/core/src/main/java/feast/core/model/JobInfo.java @@ -17,8 +17,6 @@ package feast.core.model; -import com.google.api.Metric; -import feast.core.SourceProto.SourceType; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.Column; @@ -36,9 +34,7 @@ import lombok.Getter; import lombok.Setter; -/** - * Contains information about a run job. - */ +/** Contains information about a run job. */ @AllArgsConstructor @Getter @Setter @@ -47,8 +43,7 @@ public class JobInfo extends AbstractTimestampEntity { // Internal job name. Generated by feast ingestion upon invocation. - @Id - private String id; + @Id private String id; // External job id, generated by the runner and retrieved by feast. // Used internally for job management. @@ -69,7 +64,6 @@ public class JobInfo extends AbstractTimestampEntity { @JoinColumn(name = "store_name") private Store store; - // FeatureSets populated by the job @ManyToMany @JoinTable( @@ -89,8 +83,14 @@ public JobInfo() { super(); } - public JobInfo(String id, String extId, String runner, Source source, Store sink, - List featureSets, JobStatus jobStatus) { + public JobInfo( + String id, + String extId, + String runner, + Source source, + Store sink, + List featureSets, + JobStatus jobStatus) { this.id = id; this.extId = extId; this.source = source; diff --git a/core/src/main/java/feast/core/model/Metrics.java b/core/src/main/java/feast/core/model/Metrics.java index ce6bea489a..7ec770b988 100644 --- a/core/src/main/java/feast/core/model/Metrics.java +++ b/core/src/main/java/feast/core/model/Metrics.java @@ -34,7 +34,7 @@ @Setter @Entity @Table(name = "metrics") -public class Metrics extends AbstractTimestampEntity{ +public class Metrics extends AbstractTimestampEntity { @Id @GeneratedValue(strategy = GenerationType.AUTO) @@ -44,14 +44,10 @@ public class Metrics extends AbstractTimestampEntity{ @JoinColumn(name = "job_id") private JobInfo jobInfo; - /** - * Metrics name - */ + /** Metrics name */ private String name; - /** - * Metrics value - */ + /** Metrics value */ private double value; /** diff --git a/core/src/main/java/feast/core/model/Source.java b/core/src/main/java/feast/core/model/Source.java index 1f2e538209..6f817e3b56 100644 --- a/core/src/main/java/feast/core/model/Source.java +++ b/core/src/main/java/feast/core/model/Source.java @@ -46,8 +46,9 @@ public Source() { public Source(SourceType type, KafkaSourceConfig config, boolean isDefault) { if (config.getBootstrapServers().isEmpty() || config.getTopic().isEmpty()) { - throw Status.INVALID_ARGUMENT.withDescription( - "Unsupported source options. Kafka source requires bootstrap servers and topic to be specified.") + throw Status.INVALID_ARGUMENT + .withDescription( + "Unsupported source options. Kafka source requires bootstrap servers and topic to be specified.") .asRuntimeException(); } this.type = type.toString(); @@ -87,13 +88,14 @@ public static Source fromProto(SourceProto.Source sourceProto) { * @return SourceProto.Source */ public SourceProto.Source toProto() { - Builder builder = SourceProto.Source.newBuilder() - .setType(SourceType.valueOf(type)); + Builder builder = SourceProto.Source.newBuilder().setType(SourceType.valueOf(type)); switch (SourceType.valueOf(type)) { case KAFKA: - KafkaSourceConfig config = KafkaSourceConfig.newBuilder() - .setBootstrapServers(bootstrapServers) - .setTopic(topics).build(); + KafkaSourceConfig config = + KafkaSourceConfig.newBuilder() + .setBootstrapServers(bootstrapServers) + .setTopic(topics) + .build(); return builder.setKafkaSourceConfig(config).build(); case UNRECOGNIZED: default: @@ -118,8 +120,7 @@ public String getId() { public Message getOptions() { switch (SourceType.valueOf(type)) { case KAFKA: - return KafkaSourceConfig - .newBuilder() + return KafkaSourceConfig.newBuilder() .setBootstrapServers(bootstrapServers) .setTopic(topics) .build(); @@ -166,8 +167,7 @@ public boolean equalTo(Source other) { switch (SourceType.valueOf(type)) { case KAFKA: - return bootstrapServers.equals(other.bootstrapServers) && - topics.equals(other.topics); + return bootstrapServers.equals(other.bootstrapServers) && topics.equals(other.topics); case UNRECOGNIZED: default: return false; @@ -184,5 +184,3 @@ private String generateId() { } } } - - diff --git a/core/src/main/java/feast/core/model/Store.java b/core/src/main/java/feast/core/model/Store.java index 93dd6362e6..588da9dc3c 100644 --- a/core/src/main/java/feast/core/model/Store.java +++ b/core/src/main/java/feast/core/model/Store.java @@ -20,7 +20,6 @@ import lombok.AllArgsConstructor; import lombok.Getter; import lombok.Setter; -import com.google.protobuf.util.JsonFormat; @Getter @Setter @@ -70,16 +69,17 @@ public static Store fromProto(StoreProto.Store storeProto) throws IllegalArgumen default: throw new IllegalArgumentException("Invalid store provided"); } - return new Store(storeProto.getName(), storeProto.getType().toString(), - config, String.join(",", subs)); + return new Store( + storeProto.getName(), storeProto.getType().toString(), config, String.join(",", subs)); } public StoreProto.Store toProto() throws InvalidProtocolBufferException { List subscriptionProtos = getSubscriptions(); - Builder storeProtoBuilder = StoreProto.Store.newBuilder() - .setName(name) - .setType(StoreType.valueOf(type)) - .addAllSubscriptions(subscriptionProtos); + Builder storeProtoBuilder = + StoreProto.Store.newBuilder() + .setName(name) + .setType(StoreType.valueOf(type)) + .addAllSubscriptions(subscriptionProtos); switch (StoreType.valueOf(type)) { case REDIS: RedisConfig redisConfig = RedisConfig.parseFrom(config); @@ -110,9 +110,6 @@ private Subscription convertStringToSubscription(String sub) { return Subscription.newBuilder().build(); } String[] split = sub.split(":"); - return Subscription.newBuilder() - .setName(split[0]) - .setVersion(split[1]) - .build(); + return Subscription.newBuilder().setName(split[0]).setVersion(split[1]).build(); } } diff --git a/core/src/main/java/feast/core/service/JobCoordinatorService.java b/core/src/main/java/feast/core/service/JobCoordinatorService.java index 2cd5cd6778..1cd3515026 100644 --- a/core/src/main/java/feast/core/service/JobCoordinatorService.java +++ b/core/src/main/java/feast/core/service/JobCoordinatorService.java @@ -1,10 +1,8 @@ package feast.core.service; import com.google.common.base.Strings; -import feast.core.FeatureSetProto; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.SourceProto; -import feast.core.SourceProto.SourceType; import feast.core.StoreProto; import feast.core.dao.JobInfoRepository; import feast.core.exception.JobExecutionException; @@ -36,8 +34,7 @@ public class JobCoordinatorService { private JobManager jobManager; @Autowired - public JobCoordinatorService( - JobInfoRepository jobInfoRepository, JobManager jobManager) { + public JobCoordinatorService(JobInfoRepository jobInfoRepository, JobManager jobManager) { this.jobInfoRepository = jobInfoRepository; this.jobManager = jobManager; } @@ -47,9 +44,8 @@ public JobCoordinatorService( * there has been no change in the featureSet, and there is a running job for the featureSet, this * method will do nothing. */ - public JobInfo startOrUpdateJob(List featureSetSpecs, - SourceProto.Source sourceSpec, - StoreProto.Store store) { + public JobInfo startOrUpdateJob( + List featureSetSpecs, SourceProto.Source sourceSpec, StoreProto.Store store) { Source source = Source.fromProto(sourceSpec); Optional job = getJob(source.getId(), store.getName()); if (job.isPresent()) { @@ -66,17 +62,14 @@ public JobInfo startOrUpdateJob(List featureSetSpecs, return updateJob(job.get(), featureSetSpecs, store); } } else { - return startJob(createJobId(source.getId(), store.getName()), - featureSetSpecs, sourceSpec, store); + return startJob( + createJobId(source.getId(), store.getName()), featureSetSpecs, sourceSpec, store); } } - /** - * Get the non-terminal job associated with the given featureSet name and store name, if any. - */ + /** Get the non-terminal job associated with the given featureSet name and store name, if any. */ private Optional getJob(String sourceId, String storeName) { - List jobs = - jobInfoRepository.findBySourceIdAndStoreName(sourceId, storeName); + List jobs = jobInfoRepository.findBySourceIdAndStoreName(sourceId, storeName); if (jobs.isEmpty()) { return Optional.empty(); } @@ -85,11 +78,11 @@ private Optional getJob(String sourceId, String storeName) { .findFirst(); } - /** - * Start or update the job to ingest data to the sink. - */ + /** Start or update the job to ingest data to the sink. */ private JobInfo startJob( - String jobId, List featureSetSpecs, SourceProto.Source source, + String jobId, + List featureSetSpecs, + SourceProto.Source source, StoreProto.Store sinkSpec) { try { AuditLogger.log( @@ -144,9 +137,7 @@ private JobInfo startJob( } } - /** - * Update the given job - */ + /** Update the given job */ private JobInfo updateJob( JobInfo jobInfo, List featureSetSpecs, StoreProto.Store store) { jobInfo.setFeatureSets( @@ -183,9 +174,7 @@ public void abortJob(String id) { jobInfoRepository.saveAndFlush(job); } - /** - * Update a given job's status - */ + /** Update a given job's status */ public void updateJobStatus(String jobId, JobStatus status) { Optional jobRecordOptional = jobInfoRepository.findById(jobId); if (jobRecordOptional.isPresent()) { diff --git a/core/src/main/java/feast/core/service/JobStatusService.java b/core/src/main/java/feast/core/service/JobStatusService.java index e47ebd39d3..0307afa0e1 100644 --- a/core/src/main/java/feast/core/service/JobStatusService.java +++ b/core/src/main/java/feast/core/service/JobStatusService.java @@ -3,62 +3,62 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; - @Slf4j @Service public class JobStatusService { -// -// private JobInfoRepository jobInfoRepository; -// private MetricsRepository metricsRepository; -// -// @Autowired -// public JobStatusService( -// JobInfoRepository jobInfoRepository, -// MetricsRepository metricsRepository) { -// this.jobInfoRepository = jobInfoRepository; -// this.metricsRepository = metricsRepository; -// } -// -// /** -// * Lists all jobs registered to the db, sorted by provided orderBy -// * -// * @param orderBy list order -// * @return list of JobDetails -// */ -// @Transactional -// public List listJobs(Sort orderBy) { -// List jobs = jobInfoRepository.findAll(orderBy); -// return jobs.stream().map(JobInfo::getJobDetail).collect(Collectors.toList()); -// } -// -// /** -// * Lists all jobs registered to the db, sorted chronologically by creation time -// * -// * @return list of JobDetails -// */ -// @Transactional -// public List listJobs() { -// return listJobs(Sort.by(Sort.Direction.ASC, "created")); -// } -// -// /** -// * Gets information regarding a single job. -// * -// * @param id feast-internal job id -// * @return JobDetail for that job -// */ -// @Transactional -// public JobDetail getJob(String id) { -// Optional job = jobInfoRepository.findById(id); -// if (!job.isPresent()) { -// throw new RetrievalException(Strings.lenientFormat("Unable to retrieve job with id %s", id)); -// } -// JobDetail.Builder jobDetailBuilder = job.get().getJobDetail().toBuilder(); -// List metrics = metricsRepository.findByJobInfo_Id(id); -// for (Metrics metric : metrics) { -// jobDetailBuilder.putMetrics(metric.getName(), metric.getValue()); -// } -// return jobDetailBuilder.build(); -// } + // + // private JobInfoRepository jobInfoRepository; + // private MetricsRepository metricsRepository; + // + // @Autowired + // public JobStatusService( + // JobInfoRepository jobInfoRepository, + // MetricsRepository metricsRepository) { + // this.jobInfoRepository = jobInfoRepository; + // this.metricsRepository = metricsRepository; + // } + // + // /** + // * Lists all jobs registered to the db, sorted by provided orderBy + // * + // * @param orderBy list order + // * @return list of JobDetails + // */ + // @Transactional + // public List listJobs(Sort orderBy) { + // List jobs = jobInfoRepository.findAll(orderBy); + // return jobs.stream().map(JobInfo::getJobDetail).collect(Collectors.toList()); + // } + // + // /** + // * Lists all jobs registered to the db, sorted chronologically by creation time + // * + // * @return list of JobDetails + // */ + // @Transactional + // public List listJobs() { + // return listJobs(Sort.by(Sort.Direction.ASC, "created")); + // } + // + // /** + // * Gets information regarding a single job. + // * + // * @param id feast-internal job id + // * @return JobDetail for that job + // */ + // @Transactional + // public JobDetail getJob(String id) { + // Optional job = jobInfoRepository.findById(id); + // if (!job.isPresent()) { + // throw new RetrievalException(Strings.lenientFormat("Unable to retrieve job with id %s", + // id)); + // } + // JobDetail.Builder jobDetailBuilder = job.get().getJobDetail().toBuilder(); + // List metrics = metricsRepository.findByJobInfo_Id(id); + // for (Metrics metric : metrics) { + // jobDetailBuilder.putMetrics(metric.getName(), metric.getValue()); + // } + // return jobDetailBuilder.build(); + // } } diff --git a/core/src/main/java/feast/core/service/SpecService.java b/core/src/main/java/feast/core/service/SpecService.java index a2bde4a5b4..6811ae8668 100644 --- a/core/src/main/java/feast/core/service/SpecService.java +++ b/core/src/main/java/feast/core/service/SpecService.java @@ -64,8 +64,8 @@ public class SpecService { private final StoreRepository storeRepository; private final Source defaultSource; - private final Pattern versionPattern = Pattern - .compile("^(?[\\>\\<\\=]{0,2})(?\\d*)$"); + private final Pattern versionPattern = + Pattern.compile("^(?[\\>\\<\\=]{0,2})(?\\d*)$"); @Autowired public SpecService( @@ -78,9 +78,9 @@ public SpecService( } /** - * Get a feature set matching the feature name and version provided in the filter. The name - * is required. If the version is provided then it will be used for the lookup. If the version - * is omitted then the latest version will be returned. + * Get a feature set matching the feature name and version provided in the filter. The name is + * required. If the version is provided then it will be used for the lookup. If the version is + * omitted then the latest version will be returned. * * @param GetFeatureSetRequest containing the name and version of the feature set * @return GetFeatureSetResponse containing a single feature set @@ -88,46 +88,49 @@ public SpecService( public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) throws InvalidProtocolBufferException { - // Validate input arguments - checkValidCharacters(request.getName(), "featureSetName"); - if (request.getName().isEmpty()) { - throw io.grpc.Status.INVALID_ARGUMENT - .withDescription("No feature set name provided") - .asRuntimeException(); - } - if (request.getVersion() < 0){ - throw io.grpc.Status.INVALID_ARGUMENT - .withDescription("Version number cannot be less than 0") - .asRuntimeException(); - } + // Validate input arguments + checkValidCharacters(request.getName(), "featureSetName"); + if (request.getName().isEmpty()) { + throw io.grpc.Status.INVALID_ARGUMENT + .withDescription("No feature set name provided") + .asRuntimeException(); + } + if (request.getVersion() < 0) { + throw io.grpc.Status.INVALID_ARGUMENT + .withDescription("Version number cannot be less than 0") + .asRuntimeException(); + } - FeatureSet featureSet; + FeatureSet featureSet; - // Filter the list based on version - if (request.getVersion() == 0){ - featureSet = featureSetRepository.findFirstFeatureSetByNameOrderByVersionDesc(request.getName()); - } else { - featureSet = featureSetRepository.findFeatureSetByNameAndVersion(request.getName(), request.getVersion()); - } + // Filter the list based on version + if (request.getVersion() == 0) { + featureSet = + featureSetRepository.findFirstFeatureSetByNameOrderByVersionDesc(request.getName()); + } else { + featureSet = + featureSetRepository.findFeatureSetByNameAndVersion( + request.getName(), request.getVersion()); + } - if (featureSet == null){ - throw io.grpc.Status.NOT_FOUND - .withDescription("Feature set could not be found") - .asRuntimeException(); - } + if (featureSet == null) { + throw io.grpc.Status.NOT_FOUND + .withDescription("Feature set could not be found") + .asRuntimeException(); + } - // Only a single item in list, return successfully - return GetFeatureSetResponse.newBuilder().setFeatureSet(featureSet.toProto()).build(); + // Only a single item in list, return successfully + return GetFeatureSetResponse.newBuilder().setFeatureSet(featureSet.toProto()).build(); } /** * Get featureSets matching the feature name and version provided in the filter. If the feature * name is not provided, the method will return all featureSets currently registered to Feast. * - * The feature set name in the filter accepts any valid regex string. All matching featureSets + *

The feature set name in the filter accepts any valid regex string. All matching featureSets * will be returned. * - * The version filter is optional; If not provided, this method will return all featureSet + *

The version filter is optional; If not provided, this method will return all featureSet * versions of the featureSet name provided. Valid version filters should optionally contain a * comparator (<, <=, >, etc) and a version number, e.g. 10, <10, >=1 * @@ -143,8 +146,10 @@ public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest.Filter fil featureSets = featureSetRepository.findAll(); } else { featureSets = featureSetRepository.findByNameWithWildcard(name.replace('*', '%')); - featureSets = featureSets.stream().filter(getVersionFilter(filter.getFeatureSetVersion())) - .collect(Collectors.toList()); + featureSets = + featureSets.stream() + .filter(getVersionFilter(filter.getFeatureSetVersion())) + .collect(Collectors.toList()); } ListFeatureSetsResponse.Builder response = ListFeatureSetsResponse.newBuilder(); for (FeatureSet featureSet : featureSets) { @@ -170,12 +175,14 @@ public ListStoresResponse listStores(ListStoresRequest.Filter filter) { } return responseBuilder.build(); } - Store store = storeRepository.findById(name) - .orElseThrow(() -> new RetrievalException(String.format("Store with name '%s' not found", - name))); - return ListStoresResponse.newBuilder() - .addStore(store.toProto()) - .build(); + Store store = + storeRepository + .findById(name) + .orElseThrow( + () -> + new RetrievalException( + String.format("Store with name '%s' not found", name))); + return ListStoresResponse.newBuilder().addStore(store.toProto()).build(); } catch (InvalidProtocolBufferException e) { throw io.grpc.Status.NOT_FOUND .withDescription("Unable to retrieve stores") @@ -189,7 +196,7 @@ public ListStoresResponse listStores(ListStoresRequest.Filter filter) { * to. If there is a change in the featureSet's schema or source, the featureSet version will be * incremented. * - * This function is idempotent. If no changes are detected in the incoming featureSet's schema, + *

This function is idempotent. If no changes are detected in the incoming featureSet's schema, * this method will update the incoming featureSet spec with the latest version stored in the * repository, and return that. * @@ -198,8 +205,8 @@ public ListStoresResponse listStores(ListStoresRequest.Filter filter) { public ApplyFeatureSetResponse applyFeatureSet(FeatureSetSpec newFeatureSetSpec) throws InvalidProtocolBufferException { FeatureSetValidator.validateSpec(newFeatureSetSpec); - List existingFeatureSets = featureSetRepository - .findByName(newFeatureSetSpec.getName()); + List existingFeatureSets = + featureSetRepository.findByName(newFeatureSetSpec.getName()); if (existingFeatureSets.size() == 0) { newFeatureSetSpec = newFeatureSetSpec.toBuilder().setVersion(1).build(); } else { @@ -214,9 +221,7 @@ public ApplyFeatureSetResponse applyFeatureSet(FeatureSetSpec newFeatureSetSpec) .setStatus(Status.NO_CHANGE) .build(); } - newFeatureSetSpec = newFeatureSetSpec.toBuilder() - .setVersion(latest.getVersion() + 1) - .build(); + newFeatureSetSpec = newFeatureSetSpec.toBuilder().setVersion(latest.getVersion() + 1).build(); } FeatureSet featureSet = FeatureSet.fromProto(newFeatureSetSpec); if (newFeatureSetSpec.getSource() == SourceProto.Source.getDefaultInstance()) { @@ -267,10 +272,11 @@ private Predicate getVersionFilter(String versionFilter) { if (!match.matches()) { throw io.grpc.Status.INVALID_ARGUMENT - .withDescription(String.format( - "Invalid version string '%s' provided. Version string may either " - + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", - versionFilter)) + .withDescription( + String.format( + "Invalid version string '%s' provided. Version string may either " + + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", + versionFilter)) .asRuntimeException(); } @@ -289,12 +295,12 @@ private Predicate getVersionFilter(String versionFilter) { return v -> v.getVersion() == versionNumber; default: throw io.grpc.Status.INVALID_ARGUMENT - .withDescription(String.format( - "Invalid comparator '%s' provided. Version string may either " - + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", - comparator)) + .withDescription( + String.format( + "Invalid comparator '%s' provided. Version string may either " + + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", + comparator)) .asRuntimeException(); } } - } diff --git a/core/src/main/java/feast/core/util/PackageUtil.java b/core/src/main/java/feast/core/util/PackageUtil.java index 3ee3744d96..eff4705aed 100644 --- a/core/src/main/java/feast/core/util/PackageUtil.java +++ b/core/src/main/java/feast/core/util/PackageUtil.java @@ -61,7 +61,8 @@ public static String resolveSpringBootPackageClasspath(URL url) throws IOExcepti if (Files.notExists(Paths.get(extractedJarPath))) { LOG.info( "Extracting '{}' to '{}' so we can get a local file path for the resource.", - jarPath, extractedJarPath); + jarPath, + extractedJarPath); extractJar(jarPath, extractedJarPath); } path = path.replace(".jar/BOOT-INF/", "/BOOT-INF/"); @@ -71,7 +72,8 @@ public static String resolveSpringBootPackageClasspath(URL url) throws IOExcepti } // TODO: extractJar() currently is quite slow because it only uses a single core to extract the - // jar. Extracting a jar packaged by Spring boot, for example, can take more than 5 minutes. One + // jar. Extracting a jar packaged by Spring boot, for example, can take more than 5 minutes. + // One // way to speed it up is to parallelize the extraction. /** @@ -79,9 +81,9 @@ public static String resolveSpringBootPackageClasspath(URL url) throws IOExcepti * *

Adapted from: https://stackoverflow.com/a/1529707/3949303 * - * @param jarPath File path of the jar file to extract. + * @param jarPath File path of the jar file to extract. * @param destDirPath Destination directory to extract the jar content, will be created if not - * exists. + * exists. * @throws IOException If error occured when reading or writing files. */ public static void extractJar(String jarPath, String destDirPath) throws IOException { diff --git a/core/src/main/java/feast/core/util/PipelineUtil.java b/core/src/main/java/feast/core/util/PipelineUtil.java index 9d08aee12a..959f417520 100644 --- a/core/src/main/java/feast/core/util/PipelineUtil.java +++ b/core/src/main/java/feast/core/util/PipelineUtil.java @@ -15,16 +15,16 @@ public class PipelineUtil { /** * Attempts to detect all the resources the class loader has access to. This does not recurse to * class loader parents stopping it from pulling in resources from the system class loader. - *

- * This method extends this implemention https://github.com/apache/beam/blob/01726e9c62313749f9ea7c93063a1178abd1a8db/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineResources.java#L51 + * + *

This method extends this implemention + * https://github.com/apache/beam/blob/01726e9c62313749f9ea7c93063a1178abd1a8db/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineResources.java#L51 * to support URL that starts with "jar:file:", usually coming from a packaged Spring Boot jar. * * @param classLoader The URLClassLoader to use to detect resources to stage. * @return A list of absolute paths to the resources the class loader uses. * @throws IllegalArgumentException If either the class loader is not a URLClassLoader or one of - * the resources the class loader exposes is not a file - * resource. - * @throws IOException If there is an error in reading or writing files. + * the resources the class loader exposes is not a file resource. + * @throws IOException If there is an error in reading or writing files. */ public static List detectClassPathResourcesToStage(ClassLoader classLoader) throws IOException { diff --git a/core/src/main/java/feast/core/util/TypeConversion.java b/core/src/main/java/feast/core/util/TypeConversion.java index 0c047cd882..3a2b7bdddc 100644 --- a/core/src/main/java/feast/core/util/TypeConversion.java +++ b/core/src/main/java/feast/core/util/TypeConversion.java @@ -20,7 +20,6 @@ import com.google.common.base.Strings; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; - import java.lang.reflect.Type; import java.util.*; import java.util.Map.Entry; @@ -61,8 +60,7 @@ public static Map convertJsonStringToMap(String jsonString) { if (jsonString == null || jsonString.equals("") || jsonString.equals("{}")) { return Collections.emptyMap(); } - Type stringMapType = new TypeToken>() { - }.getType(); + Type stringMapType = new TypeToken>() {}.getType(); return gson.fromJson(jsonString, stringMapType); } @@ -90,6 +88,6 @@ public static String[] convertMapToArgs(Map map) { for (Entry arg : map.entrySet()) { args.add(Strings.lenientFormat("--%s=%s", arg.getKey(), arg.getValue())); } - return args.toArray(new String[]{}); + return args.toArray(new String[] {}); } } diff --git a/core/src/main/java/feast/core/validators/FeatureSetValidator.java b/core/src/main/java/feast/core/validators/FeatureSetValidator.java index 3aa365eebf..50924dcadd 100644 --- a/core/src/main/java/feast/core/validators/FeatureSetValidator.java +++ b/core/src/main/java/feast/core/validators/FeatureSetValidator.java @@ -22,13 +22,14 @@ public static void validateSpec(FeatureSetSpec featureSetSpec) { } } - private static void checkUniqueColumns(List entitySpecs, List featureSpecs) { + private static void checkUniqueColumns( + List entitySpecs, List featureSpecs) { List names = entitySpecs.stream().map(EntitySpec::getName).collect(Collectors.toList()); featureSpecs.stream().map(f -> names.add(f.getName())); HashSet nameSet = Sets.newHashSet(names); if (nameSet.size() != names.size()) { - throw new IllegalArgumentException(String.format( - "fields within a featureset must be unique.")); + throw new IllegalArgumentException( + String.format("fields within a featureset must be unique.")); } } } diff --git a/core/src/main/java/feast/core/validators/Matchers.java b/core/src/main/java/feast/core/validators/Matchers.java index f67a5bb380..edd3554831 100644 --- a/core/src/main/java/feast/core/validators/Matchers.java +++ b/core/src/main/java/feast/core/validators/Matchers.java @@ -24,7 +24,8 @@ public class Matchers { private static Pattern UPPER_SNAKE_CASE_REGEX = Pattern.compile("^[A-Z0-9]+(_[A-Z0-9]+)*$"); private static Pattern LOWER_SNAKE_CASE_REGEX = Pattern.compile("^[a-z0-9]+(_[a-z0-9]+)*$"); private static Pattern VALID_CHARACTERS_REGEX = Pattern.compile("^[a-zA-Z0-9\\-_]*$"); - private static Pattern VALID_CHARACTERS_FSET_FILTER_REGEX = Pattern.compile("^[a-zA-Z0-9\\-_*]*$"); + private static Pattern VALID_CHARACTERS_FSET_FILTER_REGEX = + Pattern.compile("^[a-zA-Z0-9\\-_*]*$"); private static String ERROR_MESSAGE_TEMPLATE = "invalid value for field %s: %s"; diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index bf2872c73a..b91bfc91c4 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -1,46 +1,46 @@ package feast.core; // -//import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; -//import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -//import static org.junit.Assert.assertEquals; -//import static org.mockito.ArgumentMatchers.any; -//import static org.mockito.Mockito.when; -// -//import feast.core.config.ImportJobDefaults; -//import feast.core.job.JobManager; -//import feast.core.model.StorageInfo; -//import feast.core.service.SpecService; -//import feast.core.stream.FeatureStream; -//import feast.specs.EntitySpecProto.EntitySpec; -//import feast.specs.FeatureSpecProto.FeatureSpec; -//import feast.specs.StorageSpecProto.StorageSpec; -//import feast.types.ValueProto.ValueType; -//import io.grpc.ManagedChannel; -//import io.grpc.ManagedChannelBuilder; -//import java.io.IOException; -//import java.nio.file.Files; -//import java.nio.file.Paths; -//import java.util.Collections; -//import java.util.HashMap; -//import java.util.List; -//import java.util.Map; -//import org.junit.Test; -//import org.junit.runner.RunWith; -//import org.mockito.ArgumentMatchers; -//import org.mockito.Mockito; -//import org.mockito.stubbing.Answer; -//import org.springframework.beans.factory.annotation.Autowired; -//import org.springframework.boot.test.context.SpringBootTest; -//import org.springframework.boot.test.context.TestConfiguration; -//import org.springframework.context.annotation.Bean; -//import org.springframework.test.annotation.DirtiesContext; -//import org.springframework.test.context.junit4.SpringRunner; -// -///** +// import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; +// import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; +// import static org.junit.Assert.assertEquals; +// import static org.mockito.ArgumentMatchers.any; +// import static org.mockito.Mockito.when; +// +// import feast.core.config.ImportJobDefaults; +// import feast.core.job.JobManager; +// import feast.core.model.StorageInfo; +// import feast.core.service.SpecService; +// import feast.core.stream.FeatureStream; +// import feast.specs.EntitySpecProto.EntitySpec; +// import feast.specs.FeatureSpecProto.FeatureSpec; +// import feast.specs.StorageSpecProto.StorageSpec; +// import feast.types.ValueProto.ValueType; +// import io.grpc.ManagedChannel; +// import io.grpc.ManagedChannelBuilder; +// import java.io.IOException; +// import java.nio.file.Files; +// import java.nio.file.Paths; +// import java.util.Collections; +// import java.util.HashMap; +// import java.util.List; +// import java.util.Map; +// import org.junit.Test; +// import org.junit.runner.RunWith; +// import org.mockito.ArgumentMatchers; +// import org.mockito.Mockito; +// import org.mockito.stubbing.Answer; +// import org.springframework.beans.factory.annotation.Autowired; +// import org.springframework.boot.test.context.SpringBootTest; +// import org.springframework.boot.test.context.TestConfiguration; +// import org.springframework.context.annotation.Bean; +// import org.springframework.test.annotation.DirtiesContext; +// import org.springframework.test.context.junit4.SpringRunner; +// +/// ** // * Starts the application context with some properties // */ -//@RunWith(SpringRunner.class) -//@SpringBootTest(properties = { +// @RunWith(SpringRunner.class) +// @SpringBootTest(properties = { // "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", // "spring.datasource.url=jdbc:h2:mem:testdb", // "feast.store.warehouse.type=FILE.JSON", @@ -50,79 +50,82 @@ // "feast.store.errors.type=STDERR", // "feast.stream.type=kafka", // "feast.stream.options={\"bootstrapServers\":\"localhost:8081\"}" -//}) -//@DirtiesContext +// }) +// @DirtiesContext public class CoreApplicationTest { -// -// @Autowired -// SpecService specService; -// @Autowired -// ImportJobDefaults jobDefaults; -// @Autowired -// JobManager jobManager; -// @Autowired -// FeatureStream featureStream; -// -// @Test -// public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException { -// Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); -// -// List warehouseStorageInfo = specService -// .getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); -// assertEquals(warehouseStorageInfo.size(), 1); -// assertEquals(warehouseStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() -// .setId(DEFAULT_WAREHOUSE_ID).setType("FILE.JSON").putOptions("path", "/tmp/foobar") -// .build()); -// -// List servingStorageInfo = specService -// .getStorage(Collections.singletonList(DEFAULT_SERVING_ID)); -// assertEquals(servingStorageInfo.size(), 1); -// assertEquals(servingStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() -// .setId(DEFAULT_SERVING_ID).setType("REDIS") -// .putOptions("host", "localhost") -// .putOptions("port", "1234") -// .build()); -// -// ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", 6565); -// ManagedChannel channel = channelBuilder.usePlaintext(true).build(); -// CoreServiceGrpc.CoreServiceBlockingStub coreService = CoreServiceGrpc.newBlockingStub(channel); -// -// EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); -// FeatureSpec featureSpec = FeatureSpec.newBuilder() -// .setId("test.int64") -// .setName("int64") -// .setEntity("test") -// .setValueType(ValueType.Enum.INT64) -// .setOwner("hermione@example.com") -// .setDescription("Test is a test") -// .setUri("http://example.com/test.int64").build(); -// -// when(featureStream.generateTopicName(ArgumentMatchers.anyString())).thenReturn("my-topic"); -// when(featureStream.getType()).thenReturn("kafka"); -// -// coreService.applyEntity(entitySpec); -// -// Map args = new HashMap<>(); -// when(jobManager.startJob(any(), any())).thenAnswer((Answer) invocation -> { -// args.put(0, invocation.getArgument(0)); -// args.put(1, invocation.getArgument(1)); -// return "externalJobId1234"; -// }); -// -// coreService.applyFeature(featureSpec); -// } -// -// @TestConfiguration -// public static class MockProvider { -// -// @Bean -// public JobManager jobManager() { -// return Mockito.mock(JobManager.class); -// } -// -// @Bean -// public FeatureStream featureStream() { -// return Mockito.mock(FeatureStream.class); -// } -// } -} \ No newline at end of file + // + // @Autowired + // SpecService specService; + // @Autowired + // ImportJobDefaults jobDefaults; + // @Autowired + // JobManager jobManager; + // @Autowired + // FeatureStream featureStream; + // + // @Test + // public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException + // { + // Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); + // + // List warehouseStorageInfo = specService + // .getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); + // assertEquals(warehouseStorageInfo.size(), 1); + // assertEquals(warehouseStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() + // .setId(DEFAULT_WAREHOUSE_ID).setType("FILE.JSON").putOptions("path", "/tmp/foobar") + // .build()); + // + // List servingStorageInfo = specService + // .getStorage(Collections.singletonList(DEFAULT_SERVING_ID)); + // assertEquals(servingStorageInfo.size(), 1); + // assertEquals(servingStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() + // .setId(DEFAULT_SERVING_ID).setType("REDIS") + // .putOptions("host", "localhost") + // .putOptions("port", "1234") + // .build()); + // + // ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", + // 6565); + // ManagedChannel channel = channelBuilder.usePlaintext(true).build(); + // CoreServiceGrpc.CoreServiceBlockingStub coreService = + // CoreServiceGrpc.newBlockingStub(channel); + // + // EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); + // FeatureSpec featureSpec = FeatureSpec.newBuilder() + // .setId("test.int64") + // .setName("int64") + // .setEntity("test") + // .setValueType(ValueType.Enum.INT64) + // .setOwner("hermione@example.com") + // .setDescription("Test is a test") + // .setUri("http://example.com/test.int64").build(); + // + // when(featureStream.generateTopicName(ArgumentMatchers.anyString())).thenReturn("my-topic"); + // when(featureStream.getType()).thenReturn("kafka"); + // + // coreService.applyEntity(entitySpec); + // + // Map args = new HashMap<>(); + // when(jobManager.startJob(any(), any())).thenAnswer((Answer) invocation -> { + // args.put(0, invocation.getArgument(0)); + // args.put(1, invocation.getArgument(1)); + // return "externalJobId1234"; + // }); + // + // coreService.applyFeature(featureSpec); + // } + // + // @TestConfiguration + // public static class MockProvider { + // + // @Bean + // public JobManager jobManager() { + // return Mockito.mock(JobManager.class); + // } + // + // @Bean + // public FeatureStream featureStream() { + // return Mockito.mock(FeatureStream.class); + // } + // } +} diff --git a/core/src/test/java/feast/core/http/HealthControllerTest.java b/core/src/test/java/feast/core/http/HealthControllerTest.java index 030033b789..70e6e632e0 100644 --- a/core/src/test/java/feast/core/http/HealthControllerTest.java +++ b/core/src/test/java/feast/core/http/HealthControllerTest.java @@ -1,15 +1,14 @@ package feast.core.http; -import org.junit.Test; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.*; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.SQLException; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.*; +import javax.sql.DataSource; +import org.junit.Test; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; public class HealthControllerTest { @Test diff --git a/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java b/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java index 631e5a257a..b5c2bab89d 100644 --- a/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java +++ b/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java @@ -24,22 +24,15 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.collect.Lists; -import feast.core.SourceProto; import feast.core.SourceProto.KafkaSourceConfig; import feast.core.SourceProto.SourceType; import feast.core.dao.JobInfoRepository; -import feast.core.dao.MetricsRepository; import feast.core.model.JobInfo; import feast.core.model.JobStatus; -import feast.core.model.Metrics; import feast.core.model.Source; import feast.core.model.Store; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.List; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; @@ -50,11 +43,9 @@ public class ScheduledJobMonitorTest { ScheduledJobMonitor scheduledJobMonitor; - @Mock - JobMonitor jobMonitor; + @Mock JobMonitor jobMonitor; - @Mock - JobInfoRepository jobInfoRepository; + @Mock JobInfoRepository jobInfoRepository; @Before public void setUp() { @@ -64,9 +55,14 @@ public void setUp() { @Test public void getJobStatus_shouldUpdateJobInfoForRunningJob() { - Source source = new Source(SourceType.KAFKA, - KafkaSourceConfig.newBuilder().setBootstrapServers("kafka:9092") - .setTopic("feast-topic").build(), true); + Source source = + new Source( + SourceType.KAFKA, + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("feast-topic") + .build(), + true); JobInfo job = new JobInfo( "jobId", @@ -100,5 +96,4 @@ public void getJobStatus_shouldNotUpdateJobInfoForTerminalJob() { verify(jobInfoRepository, never()).save(any(JobInfo.class)); } - } diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java index a2200f805f..2eb4e53450 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java @@ -55,11 +55,9 @@ public class DataflowJobManagerTest { - @Rule - public final ExpectedException expectedException = ExpectedException.none(); + @Rule public final ExpectedException expectedException = ExpectedException.none(); - @Mock - private Dataflow dataflow; + @Mock private Dataflow dataflow; private Map defaults; private DataflowJobManager dfJobManager; @@ -78,23 +76,22 @@ public void setUp() { @Test public void shouldStartJobWithCorrectPipelineOptions() throws IOException { - StoreProto.Store store = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .build(); + StoreProto.Store store = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) + .build(); - FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .build(); + FeatureSetSpec featureSetSpec = + FeatureSetSpec.newBuilder().setName("featureSet").setVersion(1).build(); Printer printer = JsonFormat.printer(); String expectedExtJobId = "feast-job-0"; String jobName = "job"; - ImportOptions expectedPipelineOptions = PipelineOptionsFactory.fromArgs("") - .as(ImportOptions.class); + ImportOptions expectedPipelineOptions = + PipelineOptionsFactory.fromArgs("").as(ImportOptions.class); expectedPipelineOptions.setRunner(DataflowRunner.class); expectedPipelineOptions.setProject("project"); expectedPipelineOptions.setRegion("region"); @@ -102,11 +99,10 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { expectedPipelineOptions.setAppName("DataflowJobManager"); expectedPipelineOptions.setJobName(jobName); expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); - expectedPipelineOptions - .setFeatureSetSpecJson(Lists.newArrayList(printer.print(featureSetSpec))); + expectedPipelineOptions.setFeatureSetSpecJson( + Lists.newArrayList(printer.print(featureSetSpec))); - ArgumentCaptor captor = ArgumentCaptor - .forClass(ImportOptions.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(ImportOptions.class); DataflowPipelineJob mockPipelineResult = Mockito.mock(DataflowPipelineJob.class); when(mockPipelineResult.getState()).thenReturn(State.RUNNING); @@ -118,33 +114,36 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { verify(dfJobManager, times(1)).runPipeline(captor.capture()); ImportOptions actualPipelineOptions = captor.getValue(); - expectedPipelineOptions.setOptionsId(actualPipelineOptions.getOptionsId()); // avoid comparing this value + expectedPipelineOptions.setOptionsId( + actualPipelineOptions.getOptionsId()); // avoid comparing this value // We only check that we are calling getFilesToStage() manually, because the automatic approach - // throws an error: https://github.com/gojek/feast/pull/291 i.e. do not check for the actual files that are staged - assertThat("filesToStage in pipelineOptions should not be null, job manager should set it.", actualPipelineOptions.getFilesToStage() != null); - assertThat("filesToStage in pipelineOptions should contain at least 1 item", actualPipelineOptions.getFilesToStage().size() > 0); + // throws an error: https://github.com/gojek/feast/pull/291 i.e. do not check for the actual + // files that are staged + assertThat( + "filesToStage in pipelineOptions should not be null, job manager should set it.", + actualPipelineOptions.getFilesToStage() != null); + assertThat( + "filesToStage in pipelineOptions should contain at least 1 item", + actualPipelineOptions.getFilesToStage().size() > 0); // Assume the files that are staged are correct expectedPipelineOptions.setFilesToStage(actualPipelineOptions.getFilesToStage()); - assertThat(actualPipelineOptions.toString(), - equalTo(expectedPipelineOptions.toString())); + assertThat(actualPipelineOptions.toString(), equalTo(expectedPipelineOptions.toString())); assertThat(jobId, equalTo(expectedExtJobId)); } - @Test public void shouldThrowExceptionWhenJobStateTerminal() throws IOException { - StoreProto.Store store = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .build(); - - FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .build(); + StoreProto.Store store = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) + .build(); + + FeatureSetSpec featureSetSpec = + FeatureSetSpec.newBuilder().setName("featureSet").setVersion(1).build(); dfJobManager = Mockito.spy(dfJobManager); diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java index 32cd663ae4..f68a8b916b 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java @@ -98,7 +98,8 @@ public void getJobStatus_shouldReturnUnknownStateForInvalidDataflowJobState() th public void getJobStatus_shouldReturnUnknownStateWhenExceptionHappen() throws IOException { String jobId = "myJobId"; - when(jobService.get(projectId, location, jobId)).thenThrow(new RuntimeException("some thing wrong")); + when(jobService.get(projectId, location, jobId)) + .thenThrow(new RuntimeException("some thing wrong")); JobInfo jobInfo = mock(JobInfo.class); when(jobInfo.getExtId()).thenReturn(jobId); @@ -108,11 +109,16 @@ public void getJobStatus_shouldReturnUnknownStateWhenExceptionHappen() throws IO @Test public void test() { - Field field = Field.newBuilder() - .setName("Hello") - .setValue(Value.newBuilder().setBoolListVal(BoolList.newBuilder().addAllVal( - Lists.newArrayList(true,false,true,true)).build())) - .build(); + Field field = + Field.newBuilder() + .setName("Hello") + .setValue( + Value.newBuilder() + .setBoolListVal( + BoolList.newBuilder() + .addAllVal(Lists.newArrayList(true, false, true, true)) + .build())) + .build(); field.getName(); } -} \ No newline at end of file +} diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java index 09208ae9f0..86729b4b4d 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java @@ -27,4 +27,4 @@ public class DataflowJobStateMapperTest { public void shouldThrowIllegalArgumentExceptionForInvalidString() { mapper.map("INVALID_STATE"); } -} \ No newline at end of file +} diff --git a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java index 2dbd53105e..a75565f2dc 100644 --- a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java +++ b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java @@ -33,11 +33,9 @@ import org.mockito.Mockito; public class DirectRunnerJobManagerTest { - @Rule - public final ExpectedException expectedException = ExpectedException.none(); + @Rule public final ExpectedException expectedException = ExpectedException.none(); - @Mock - private DirectJobRegistry directJobRegistry; + @Mock private DirectJobRegistry directJobRegistry; private DirectRunnerJobManager drJobManager; private Map defaults; @@ -55,35 +53,33 @@ public void setUp() { @Test public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { - StoreProto.Store store = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .build(); + StoreProto.Store store = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) + .build(); - FeatureSetSpec featureSetSpec = FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .build(); + FeatureSetSpec featureSetSpec = + FeatureSetSpec.newBuilder().setName("featureSet").setVersion(1).build(); Printer printer = JsonFormat.printer(); - ImportOptions expectedPipelineOptions = PipelineOptionsFactory.fromArgs("") - .as(ImportOptions.class); + ImportOptions expectedPipelineOptions = + PipelineOptionsFactory.fromArgs("").as(ImportOptions.class); expectedPipelineOptions.setAppName("DirectRunnerJobManager"); expectedPipelineOptions.setRunner(DirectRunner.class); expectedPipelineOptions.setBlockOnRun(false); expectedPipelineOptions.setProject(""); expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); expectedPipelineOptions.setProject(""); - expectedPipelineOptions - .setFeatureSetSpecJson(Lists.newArrayList(printer.print(featureSetSpec))); + expectedPipelineOptions.setFeatureSetSpecJson( + Lists.newArrayList(printer.print(featureSetSpec))); String expectedJobId = "feast-job-0"; - ArgumentCaptor pipelineOptionsCaptor = ArgumentCaptor - .forClass(ImportOptions.class); - ArgumentCaptor directJobCaptor = ArgumentCaptor - .forClass(DirectJob.class); + ArgumentCaptor pipelineOptionsCaptor = + ArgumentCaptor.forClass(ImportOptions.class); + ArgumentCaptor directJobCaptor = ArgumentCaptor.forClass(DirectJob.class); PipelineResult mockPipelineResult = Mockito.mock(PipelineResult.class); doReturn(mockPipelineResult).when(drJobManager).runPipeline(any()); @@ -94,10 +90,10 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { ImportOptions actualPipelineOptions = pipelineOptionsCaptor.getValue(); DirectJob jobStarted = directJobCaptor.getValue(); - expectedPipelineOptions.setOptionsId(actualPipelineOptions.getOptionsId()); // avoid comparing this value + expectedPipelineOptions.setOptionsId( + actualPipelineOptions.getOptionsId()); // avoid comparing this value - assertThat(actualPipelineOptions.toString(), - equalTo(expectedPipelineOptions.toString())); + assertThat(actualPipelineOptions.toString(), equalTo(expectedPipelineOptions.toString())); assertThat(jobStarted.getPipelineResult(), equalTo(mockPipelineResult)); assertThat(jobStarted.getJobId(), equalTo(expectedJobId)); assertThat(jobId, equalTo(expectedJobId)); @@ -111,4 +107,4 @@ public void shouldAbortJobThenRemoveFromRegistry() throws IOException { verify(job, times(1)).abort(); verify(directJobRegistry, times(1)).remove("job"); } -} \ No newline at end of file +} diff --git a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java index 47d3e8486d..8091c148cd 100644 --- a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java +++ b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java @@ -8,7 +8,6 @@ import com.google.common.collect.Lists; import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.SourceProto; import feast.core.SourceProto.KafkaSourceConfig; import feast.core.SourceProto.SourceType; import feast.core.StoreProto; @@ -30,12 +29,9 @@ public class JobCoordinatorServiceTest { - @Rule - public final ExpectedException exception = ExpectedException.none(); - @Mock - JobInfoRepository jobInfoRepository; - @Mock - JobManager jobManager; + @Rule public final ExpectedException exception = ExpectedException.none(); + @Mock JobInfoRepository jobInfoRepository; + @Mock JobManager jobManager; private JobCoordinatorService jobCoordinatorService; private JobInfo existingJob; @@ -45,23 +41,37 @@ public class JobCoordinatorServiceTest { public void setUp() { initMocks(this); - Store store = Store.fromProto(StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .build()); - defaultSource = new Source(SourceType.KAFKA, - KafkaSourceConfig.newBuilder().setBootstrapServers("kafka:9092").setTopic("feast-topic") - .build(), true); + Store store = + Store.fromProto( + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build()); + defaultSource = + new Source( + SourceType.KAFKA, + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("feast-topic") + .build(), + true); FeatureSet featureSet1 = new FeatureSet(); featureSet1.setId("featureSet1:1"); featureSet1.setSource(defaultSource); FeatureSet featureSet2 = new FeatureSet(); featureSet2.setId("featureSet2:1"); featureSet2.setSource(defaultSource); - existingJob = new JobInfo("extid", "name", "DirectRunner", defaultSource, store, - Lists.newArrayList(featureSet1, featureSet2), Lists.newArrayList(), - JobStatus.RUNNING); + existingJob = + new JobInfo( + "extid", + "name", + "DirectRunner", + defaultSource, + store, + Lists.newArrayList(featureSet1, featureSet2), + Lists.newArrayList(), + JobStatus.RUNNING); when(jobInfoRepository.findBySourceIdAndStoreName(defaultSource.getId(), "SERVING")) .thenReturn(Lists.newArrayList(existingJob)); @@ -71,82 +81,98 @@ public void setUp() { @Test public void shouldNotStartOrUpdateJobIfNoChanges() { - FeatureSetSpec featureSet1 = FeatureSetSpec.newBuilder() - .setName("featureSet1") - .setVersion(1) - .setSource(defaultSource.toProto()) - .build(); - FeatureSetSpec featureSet2 = FeatureSetSpec.newBuilder() - .setName("featureSet2") - .setVersion(1) - .setSource(defaultSource.toProto()) - .build(); - StoreProto.Store store = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .build(); - JobInfo jobInfo = jobCoordinatorService - .startOrUpdateJob(Lists.newArrayList(featureSet1, featureSet2), - defaultSource.toProto(), store); + FeatureSetSpec featureSet1 = + FeatureSetSpec.newBuilder() + .setName("featureSet1") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + FeatureSetSpec featureSet2 = + FeatureSetSpec.newBuilder() + .setName("featureSet2") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + StoreProto.Store store = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build(); + JobInfo jobInfo = + jobCoordinatorService.startOrUpdateJob( + Lists.newArrayList(featureSet1, featureSet2), defaultSource.toProto(), store); assertThat(jobInfo, equalTo(existingJob)); } @Test public void shouldStartJobIfNotExists() { - FeatureSetSpec featureSet = FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .setSource(defaultSource.toProto()) - .build(); - StoreProto.Store store = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .build(); + FeatureSetSpec featureSet = + FeatureSetSpec.newBuilder() + .setName("featureSet") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + StoreProto.Store store = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build(); String jobId = "featureSet-to-SERVING"; String extJobId = "extId123"; - when(jobCoordinatorService.createJobId("featureSet", "SERVING")) - .thenReturn(jobId); - when(jobManager.startJob(jobId, Lists.newArrayList(featureSet), store)) - .thenReturn(extJobId); + when(jobCoordinatorService.createJobId("featureSet", "SERVING")).thenReturn(jobId); + when(jobManager.startJob(jobId, Lists.newArrayList(featureSet), store)).thenReturn(extJobId); when(jobManager.getRunnerType()).thenReturn(Runner.DIRECT); FeatureSet expectedFeatureSet = new FeatureSet(); expectedFeatureSet.setId("featureSet:1"); - JobInfo expectedJobInfo = new JobInfo(jobId, extJobId, "DirectRunner", - defaultSource, Store.fromProto(store), Lists.newArrayList(expectedFeatureSet), - JobStatus.RUNNING); + JobInfo expectedJobInfo = + new JobInfo( + jobId, + extJobId, + "DirectRunner", + defaultSource, + Store.fromProto(store), + Lists.newArrayList(expectedFeatureSet), + JobStatus.RUNNING); when(jobInfoRepository.save(expectedJobInfo)).thenReturn(expectedJobInfo); - JobInfo jobInfo = jobCoordinatorService - .startOrUpdateJob(Lists.newArrayList(featureSet), defaultSource.toProto(), - store); + JobInfo jobInfo = + jobCoordinatorService.startOrUpdateJob( + Lists.newArrayList(featureSet), defaultSource.toProto(), store); assertThat(jobInfo, equalTo(expectedJobInfo)); } @Test public void shouldUpdateJobIfAlreadyExistsButThereIsAChange() { - FeatureSetSpec featureSet = FeatureSetSpec.newBuilder() - .setName("featureSet1") - .setVersion(1) - .setSource(defaultSource.toProto()) - .build(); - StoreProto.Store store = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .build(); + FeatureSetSpec featureSet = + FeatureSetSpec.newBuilder() + .setName("featureSet1") + .setVersion(1) + .setSource(defaultSource.toProto()) + .build(); + StoreProto.Store store = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .build(); String extId = "extId123"; - JobInfo modifiedJob = new JobInfo(existingJob.getId(), existingJob.getExtId(), - existingJob.getRunner(), defaultSource, Store.fromProto(store), - Lists.newArrayList(FeatureSet.fromProto(featureSet)), JobStatus.RUNNING); + JobInfo modifiedJob = + new JobInfo( + existingJob.getId(), + existingJob.getExtId(), + existingJob.getRunner(), + defaultSource, + Store.fromProto(store), + Lists.newArrayList(FeatureSet.fromProto(featureSet)), + JobStatus.RUNNING); when(jobManager.updateJob(modifiedJob)).thenReturn(extId); JobInfo expectedJobInfo = modifiedJob; expectedJobInfo.setExtId(extId); when(jobInfoRepository.save(expectedJobInfo)).thenReturn(expectedJobInfo); - JobInfo jobInfo = jobCoordinatorService - .startOrUpdateJob(Lists.newArrayList(featureSet), defaultSource.toProto(), - store); + JobInfo jobInfo = + jobCoordinatorService.startOrUpdateJob( + Lists.newArrayList(featureSet), defaultSource.toProto(), store); assertThat(jobInfo, equalTo(expectedJobInfo)); } - -} \ No newline at end of file +} diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index cea80dbebe..025cee9e2c 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -68,14 +68,11 @@ public class SpecServiceTest { - @Mock - private FeatureSetRepository featureSetRepository; + @Mock private FeatureSetRepository featureSetRepository; - @Mock - private StoreRepository storeRepository; + @Mock private StoreRepository storeRepository; - @Rule - public final ExpectedException expectedException = ExpectedException.none(); + @Rule public final ExpectedException expectedException = ExpectedException.none(); private SpecService specService; private List featureSets; @@ -85,9 +82,14 @@ public class SpecServiceTest { @Before public void setUp() { initMocks(this); - defaultSource = new Source(SourceType.KAFKA, - KafkaSourceConfig.newBuilder().setBootstrapServers("kafka:9092").setTopic("my-topic") - .build(), true); + defaultSource = + new Source( + SourceType.KAFKA, + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("my-topic") + .build(), + true); FeatureSet featureSet1v1 = newDummyFeatureSet("f1", 1); FeatureSet featureSet1v2 = newDummyFeatureSet("f1", 2); @@ -95,18 +97,13 @@ public void setUp() { FeatureSet featureSet2v1 = newDummyFeatureSet("f2", 1); featureSets = Arrays.asList(featureSet1v1, featureSet1v2, featureSet1v3, featureSet2v1); - when(featureSetRepository.findAll()) - .thenReturn(featureSets); - when(featureSetRepository.findByName("f1")) - .thenReturn(featureSets.subList(0, 3)); + when(featureSetRepository.findAll()).thenReturn(featureSets); + when(featureSetRepository.findByName("f1")).thenReturn(featureSets.subList(0, 3)); when(featureSetRepository.findFirstFeatureSetByNameOrderByVersionDesc("f1")) .thenReturn(featureSet1v3); - when(featureSetRepository.findByNameWithWildcard("f1")) - .thenReturn(featureSets.subList(0, 3)); - when(featureSetRepository.findByName("asd")) - .thenReturn(Lists.newArrayList()); - when(featureSetRepository.findByNameWithWildcard("f%")) - .thenReturn(featureSets); + when(featureSetRepository.findByNameWithWildcard("f1")).thenReturn(featureSets.subList(0, 3)); + when(featureSetRepository.findByName("asd")).thenReturn(Lists.newArrayList()); + when(featureSetRepository.findByNameWithWildcard("f%")).thenReturn(featureSets); Store store1 = newDummyStore("SERVING"); Store store2 = newDummyStore("WAREHOUSE"); @@ -115,121 +112,106 @@ public void setUp() { when(storeRepository.findById("SERVING")).thenReturn(Optional.of(store1)); when(storeRepository.findById("NOTFOUND")).thenReturn(Optional.empty()); - - specService = new SpecService(featureSetRepository, storeRepository, defaultSource); } @Test public void shouldGetAllFeatureSetsIfNoFilterProvided() throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = specService - .listFeatureSets(Filter.newBuilder().setFeatureSetName("").build()); + ListFeatureSetsResponse actual = + specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("").build()); List list = new ArrayList<>(); for (FeatureSet featureSet : featureSets) { FeatureSetSpec toProto = featureSet.toProto(); list.add(toProto); } - ListFeatureSetsResponse expected = ListFeatureSetsResponse - .newBuilder() - .addAllFeatureSets( - list) - .build(); + ListFeatureSetsResponse expected = + ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); assertThat(actual, equalTo(expected)); } @Test public void shouldGetAllFeatureSetsMatchingNameIfNoVersionProvided() throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = specService - .listFeatureSets(Filter.newBuilder().setFeatureSetName("f1").build()); - List expectedFeatureSets = featureSets.stream() - .filter(fs -> fs.getName().equals("f1")) - .collect(Collectors.toList()); + ListFeatureSetsResponse actual = + specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("f1").build()); + List expectedFeatureSets = + featureSets.stream().filter(fs -> fs.getName().equals("f1")).collect(Collectors.toList()); List list = new ArrayList<>(); for (FeatureSet expectedFeatureSet : expectedFeatureSets) { FeatureSetSpec toProto = expectedFeatureSet.toProto(); list.add(toProto); } - ListFeatureSetsResponse expected = ListFeatureSetsResponse - .newBuilder() - .addAllFeatureSets( - list) - .build(); + ListFeatureSetsResponse expected = + ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); assertThat(actual, equalTo(expected)); } @Test public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = specService - .listFeatureSets(Filter.newBuilder().setFeatureSetName("f*").build()); - List expectedFeatureSets = featureSets.stream() - .filter(fs -> fs.getName().startsWith("f")) - .collect(Collectors.toList()); + ListFeatureSetsResponse actual = + specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("f*").build()); + List expectedFeatureSets = + featureSets.stream() + .filter(fs -> fs.getName().startsWith("f")) + .collect(Collectors.toList()); List list = new ArrayList<>(); for (FeatureSet expectedFeatureSet : expectedFeatureSets) { FeatureSetSpec toProto = expectedFeatureSet.toProto(); list.add(toProto); } - ListFeatureSetsResponse expected = ListFeatureSetsResponse - .newBuilder() - .addAllFeatureSets( - list) - .build(); + ListFeatureSetsResponse expected = + ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); assertThat(actual, equalTo(expected)); } @Test public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = specService - .listFeatureSets( + ListFeatureSetsResponse actual = + specService.listFeatureSets( Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("1").build()); - List expectedFeatureSets = featureSets.stream() - .filter(fs -> fs.getName().equals("f1")) - .filter(fs -> fs.getVersion() == 1) - .collect(Collectors.toList()); + List expectedFeatureSets = + featureSets.stream() + .filter(fs -> fs.getName().equals("f1")) + .filter(fs -> fs.getVersion() == 1) + .collect(Collectors.toList()); List list = new ArrayList<>(); for (FeatureSet expectedFeatureSet : expectedFeatureSets) { FeatureSetSpec toProto = expectedFeatureSet.toProto(); list.add(toProto); } - ListFeatureSetsResponse expected = ListFeatureSetsResponse - .newBuilder() - .addAllFeatureSets( - list) - .build(); + ListFeatureSetsResponse expected = + ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); assertThat(actual, equalTo(expected)); } @Test public void shouldGetAllFeatureSetsGivenVersionWithComparator() throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = specService - .listFeatureSets( + ListFeatureSetsResponse actual = + specService.listFeatureSets( Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion(">1").build()); - List expectedFeatureSets = featureSets.stream() - .filter(fs -> fs.getName().equals("f1")) - .filter(fs -> fs.getVersion() > 1) - .collect(Collectors.toList()); + List expectedFeatureSets = + featureSets.stream() + .filter(fs -> fs.getName().equals("f1")) + .filter(fs -> fs.getVersion() > 1) + .collect(Collectors.toList()); List list = new ArrayList<>(); for (FeatureSet expectedFeatureSet : expectedFeatureSets) { FeatureSetSpec toProto = expectedFeatureSet.toProto(); list.add(toProto); } - ListFeatureSetsResponse expected = ListFeatureSetsResponse - .newBuilder() - .addAllFeatureSets( - list) - .build(); + ListFeatureSetsResponse expected = + ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); assertThat(actual, equalTo(expected)); } @Test public void shouldGetLatestFeatureSetGivenMissingVersionFilter() throws InvalidProtocolBufferException { - GetFeatureSetResponse actual = specService - .getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").build()); + GetFeatureSetResponse actual = + specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").build()); FeatureSet expected = featureSets.get(2); assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); } @@ -239,8 +221,9 @@ public void shouldGetSpecificFeatureSetGivenSpecificVersionFilter() throws InvalidProtocolBufferException { when(featureSetRepository.findFeatureSetByNameAndVersion("f1", 2)) .thenReturn(featureSets.get(1)); - GetFeatureSetResponse actual = specService - .getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").setVersion(2).build()); + GetFeatureSetResponse actual = + specService.getFeatureSet( + GetFeatureSetRequest.newBuilder().setName("f1").setVersion(2).build()); FeatureSet expected = featureSets.get(1); assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); } @@ -254,11 +237,11 @@ public void shouldThrowExceptionGivenMissingFeatureSetName() } @Test - public void shouldThrowExceptionGivenMissingFeatureSet() - throws InvalidProtocolBufferException { + public void shouldThrowExceptionGivenMissingFeatureSet() throws InvalidProtocolBufferException { expectedException.expect(StatusRuntimeException.class); expectedException.expectMessage("NOT_FOUND: Feature set could not be found"); - specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1000").setVersion(2).build()); + specService.getFeatureSet( + GetFeatureSetRequest.newBuilder().setName("f1000").setVersion(2).build()); } @Test @@ -272,8 +255,8 @@ public void shouldThrowRetrievalExceptionGivenInvalidFeatureSetVersionComparator @Test public void shouldReturnAllStoresIfNoNameProvided() throws InvalidProtocolBufferException { - ListStoresResponse actual = specService - .listStores(ListStoresRequest.Filter.newBuilder().build()); + ListStoresResponse actual = + specService.listStores(ListStoresRequest.Filter.newBuilder().build()); ListStoresResponse.Builder expected = ListStoresResponse.newBuilder(); for (Store expectedStore : stores) { expected.addStore(expectedStore.toProto()); @@ -283,10 +266,10 @@ public void shouldReturnAllStoresIfNoNameProvided() throws InvalidProtocolBuffer @Test public void shouldReturnStoreWithName() throws InvalidProtocolBufferException { - ListStoresResponse actual = specService - .listStores(ListStoresRequest.Filter.newBuilder().setName("SERVING").build()); - List expectedStores = stores.stream().filter(s -> s.getName().equals("SERVING")) - .collect(Collectors.toList()); + ListStoresResponse actual = + specService.listStores(ListStoresRequest.Filter.newBuilder().setName("SERVING").build()); + List expectedStores = + stores.stream().filter(s -> s.getName().equals("SERVING")).collect(Collectors.toList()); ListStoresResponse.Builder expected = ListStoresResponse.newBuilder(); for (Store expectedStore : expectedStores) { expected.addStore(expectedStore.toProto()); @@ -298,20 +281,16 @@ public void shouldReturnStoreWithName() throws InvalidProtocolBufferException { public void shouldThrowRetrievalExceptionIfNoStoresFoundWithName() { expectedException.expect(RetrievalException.class); expectedException.expectMessage("Store with name 'NOTFOUND' not found"); - specService - .listStores(ListStoresRequest.Filter.newBuilder().setName("NOTFOUND").build()); + specService.listStores(ListStoresRequest.Filter.newBuilder().setName("NOTFOUND").build()); } @Test public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHasNotChanged() throws InvalidProtocolBufferException { - FeatureSetSpec incomingFeatureSet = featureSets.get(2) - .toProto() - .toBuilder() - .clearVersion() - .build(); - ApplyFeatureSetResponse applyFeatureSetResponse = specService - .applyFeatureSet(incomingFeatureSet); + FeatureSetSpec incomingFeatureSet = + featureSets.get(2).toProto().toBuilder().clearVersion().build(); + ApplyFeatureSetResponse applyFeatureSetResponse = + specService.applyFeatureSet(incomingFeatureSet); verify(featureSetRepository, times(0)).save(ArgumentMatchers.any(FeatureSet.class)); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.NO_CHANGE)); @@ -322,18 +301,13 @@ public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHa public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() throws InvalidProtocolBufferException { when(featureSetRepository.findByName("f2")).thenReturn(Lists.newArrayList()); - FeatureSetSpec incomingFeatureSet = newDummyFeatureSet("f2", 1) - .toProto() - .toBuilder() - .clearVersion() - .build(); - ApplyFeatureSetResponse applyFeatureSetResponse = specService - .applyFeatureSet(incomingFeatureSet); + FeatureSetSpec incomingFeatureSet = + newDummyFeatureSet("f2", 1).toProto().toBuilder().clearVersion().build(); + ApplyFeatureSetResponse applyFeatureSetResponse = + specService.applyFeatureSet(incomingFeatureSet); verify(featureSetRepository).saveAndFlush(ArgumentMatchers.any(FeatureSet.class)); - FeatureSetSpec expected = incomingFeatureSet.toBuilder() - .setVersion(1) - .setSource(defaultSource.toProto()) - .build(); + FeatureSetSpec expected = + incomingFeatureSet.toBuilder().setVersion(1).setSource(defaultSource.toProto()).build(); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(expected)); } @@ -341,16 +315,18 @@ public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() @Test public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() throws InvalidProtocolBufferException { - FeatureSetSpec incomingFeatureSet = featureSets.get(2).toProto().toBuilder() - .clearVersion() - .addFeatures(FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)) - .build(); - FeatureSetSpec expected = incomingFeatureSet.toBuilder() - .setVersion(4) - .setSource(defaultSource.toProto()) - .build(); - ApplyFeatureSetResponse applyFeatureSetResponse = specService - .applyFeatureSet(incomingFeatureSet); + FeatureSetSpec incomingFeatureSet = + featureSets + .get(2) + .toProto() + .toBuilder() + .clearVersion() + .addFeatures(FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)) + .build(); + FeatureSetSpec expected = + incomingFeatureSet.toBuilder().setVersion(4).setSource(defaultSource.toProto()).build(); + ApplyFeatureSetResponse applyFeatureSetResponse = + specService.applyFeatureSet(incomingFeatureSet); verify(featureSetRepository).saveAndFlush(ArgumentMatchers.any(FeatureSet.class)); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(expected)); @@ -359,18 +335,20 @@ public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() @Test public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferException { when(storeRepository.findById("SERVING")).thenReturn(Optional.of(stores.get(0))); - StoreProto.Store newStore = StoreProto.Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder()) - .addSubscriptions(Subscription.newBuilder().setName("a").setVersion(">1")) - .build(); - UpdateStoreResponse actual = specService - .updateStore(UpdateStoreRequest.newBuilder().setStore(newStore).build()); - UpdateStoreResponse expected = UpdateStoreResponse.newBuilder() - .setStore(newStore) - .setStatus(UpdateStoreResponse.Status.UPDATED) - .build(); + StoreProto.Store newStore = + StoreProto.Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder()) + .addSubscriptions(Subscription.newBuilder().setName("a").setVersion(">1")) + .build(); + UpdateStoreResponse actual = + specService.updateStore(UpdateStoreRequest.newBuilder().setStore(newStore).build()); + UpdateStoreResponse expected = + UpdateStoreResponse.newBuilder() + .setStore(newStore) + .setStatus(UpdateStoreResponse.Status.UPDATED) + .build(); ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Store.class); verify(storeRepository, times(1)).save(argumentCaptor.capture()); assertThat(argumentCaptor.getValue().toProto(), equalTo(newStore)); @@ -380,12 +358,14 @@ public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferExcep @Test public void shouldDoNothingIfNoChange() throws InvalidProtocolBufferException { when(storeRepository.findById("SERVING")).thenReturn(Optional.of(stores.get(0))); - UpdateStoreResponse actual = specService - .updateStore(UpdateStoreRequest.newBuilder().setStore(stores.get(0).toProto()).build()); - UpdateStoreResponse expected = UpdateStoreResponse.newBuilder() - .setStore(stores.get(0).toProto()) - .setStatus(UpdateStoreResponse.Status.NO_CHANGE) - .build(); + UpdateStoreResponse actual = + specService.updateStore( + UpdateStoreRequest.newBuilder().setStore(stores.get(0).toProto()).build()); + UpdateStoreResponse expected = + UpdateStoreResponse.newBuilder() + .setStore(stores.get(0).toProto()) + .setStatus(UpdateStoreResponse.Status.NO_CHANGE) + .build(); verify(storeRepository, times(0)).save(ArgumentMatchers.any()); assertThat(actual, equalTo(expected)); } @@ -393,8 +373,8 @@ public void shouldDoNothingIfNoChange() throws InvalidProtocolBufferException { private FeatureSet newDummyFeatureSet(String name, int version) { Field feature = new Field(name, "feature", Enum.INT64); Field entity = new Field(name, "entity", Enum.STRING); - return new FeatureSet(name, version, 100L, Arrays.asList(entity), Arrays.asList(feature), - defaultSource); + return new FeatureSet( + name, version, 100L, Arrays.asList(entity), Arrays.asList(feature), defaultSource); } private Store newDummyStore(String name) { @@ -407,4 +387,3 @@ private Store newDummyStore(String name) { return store; } } - diff --git a/core/src/test/java/feast/core/util/TypeConversionTest.java b/core/src/test/java/feast/core/util/TypeConversionTest.java index decd26514d..07b4f9c7b1 100644 --- a/core/src/test/java/feast/core/util/TypeConversionTest.java +++ b/core/src/test/java/feast/core/util/TypeConversionTest.java @@ -17,16 +17,15 @@ package feast.core.util; -import com.google.protobuf.Timestamp; -import org.junit.Test; - -import java.util.*; - import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.protobuf.Timestamp; +import java.util.*; +import org.junit.Test; + public class TypeConversionTest { @Test public void convertTimeStampShouldCorrectlyConvertDateToProtobufTimestamp() { @@ -68,7 +67,8 @@ public void convertJsonStringToMapShouldReturnEmptyMapForEmptyJson() { public void convertMapToJsonStringShouldReturnJsonStringForGivenMap() { Map input = new HashMap<>(); input.put("key", "value"); - assertThat(TypeConversion.convertMapToJsonString(input), hasJsonPath("$.key", equalTo("value"))); + assertThat( + TypeConversion.convertMapToJsonString(input), hasJsonPath("$.key", equalTo("value"))); } @Test @@ -77,7 +77,7 @@ public void convertJsonStringToArgsShouldReturnCorrectListOfArgs() { input.put("key", "value"); input.put("key2", "value2"); - String[] expected = new String[]{"--key=value", "--key2=value2"}; + String[] expected = new String[] {"--key=value", "--key2=value2"}; String[] actual = TypeConversion.convertMapToArgs(input); assertThat(actual.length, equalTo(expected.length)); assertTrue(Arrays.asList(actual).containsAll(Arrays.asList(expected))); diff --git a/core/src/test/java/feast/core/validators/MatchersTest.java b/core/src/test/java/feast/core/validators/MatchersTest.java index 6f01a4e26a..f167d67f4f 100644 --- a/core/src/test/java/feast/core/validators/MatchersTest.java +++ b/core/src/test/java/feast/core/validators/MatchersTest.java @@ -17,17 +17,16 @@ package feast.core.validators; +import static feast.core.validators.Matchers.checkLowerSnakeCase; +import static feast.core.validators.Matchers.checkUpperSnakeCase; + import com.google.common.base.Strings; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import static feast.core.validators.Matchers.checkLowerSnakeCase; -import static feast.core.validators.Matchers.checkUpperSnakeCase; - public class MatchersTest { - @Rule - public final ExpectedException exception = ExpectedException.none(); + @Rule public final ExpectedException exception = ExpectedException.none(); @Test public void checkUpperSnakeCaseShouldPassForLegitUpperSnakeCase() { @@ -44,7 +43,8 @@ public void checkUpperSnakeCaseShouldPassForLegitUpperSnakeCaseWithNumbers() { @Test public void checkUpperSnakeCaseShouldThrowIllegalArgumentExceptionWithFieldForInvalidString() { exception.expect(IllegalArgumentException.class); - exception.expectMessage(Strings.lenientFormat( + exception.expectMessage( + Strings.lenientFormat( "invalid value for field %s: %s", "someField", "argument must be in upper snake case, and cannot include any special characters.")); @@ -61,11 +61,12 @@ public void checkLowerSnakeCaseShouldPassForLegitLowerSnakeCase() { @Test public void checkLowerSnakeCaseShouldThrowIllegalArgumentExceptionWithFieldForInvalidString() { exception.expect(IllegalArgumentException.class); - exception.expectMessage(Strings.lenientFormat( + exception.expectMessage( + Strings.lenientFormat( "invalid value for field %s: %s", "someField", "argument must be in lower snake case, and cannot include any special characters.")); String in = "Invalid_feature name"; checkLowerSnakeCase(in, "someField"); } -} \ No newline at end of file +} diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 0154872de0..7ea5f589ec 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -30,12 +30,10 @@ public class ImportJob { // Tag for main output containing Feature Row that has been successfully processed. - private static final TupleTag FEATURE_ROW_OUT = new TupleTag() { - }; + private static final TupleTag FEATURE_ROW_OUT = new TupleTag() {}; // Tag for deadletter output containing elements and error messages from invalid input/transform. - private static final TupleTag DEADLETTER_OUT = new TupleTag() { - }; + private static final TupleTag DEADLETTER_OUT = new TupleTag() {}; private static final Logger log = org.slf4j.LoggerFactory.getLogger(ImportJob.class); /** @@ -74,12 +72,14 @@ public static PipelineResult runPipeline(ImportOptions options) SpecUtil.getSubscribedFeatureSets(store.getSubscriptionsList(), featureSetSpecs); // Generate tags by key - Map featureSetSpecsByKey = subscribedFeatureSets.stream() - .map(fs -> { - String id = String.format("%s:%s", fs.getName(), fs.getVersion()); - return Pair.of(id, fs); - }) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); + Map featureSetSpecsByKey = + subscribedFeatureSets.stream() + .map( + fs -> { + String id = String.format("%s:%s", fs.getName(), fs.getVersion()); + return Pair.of(id, fs); + }) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); // TODO: make the source part of the job initialisation options Source source = subscribedFeatureSets.get(0).getSource(); @@ -100,20 +100,23 @@ public static PipelineResult runPipeline(ImportOptions options) } // Step 2. Validate incoming FeatureRows - PCollectionTuple validatedRows = convertedFeatureRows - .get(FEATURE_ROW_OUT) - .apply(ValidateFeatureRows.newBuilder() - .setFeatureSetSpecs(featureSetSpecsByKey) - .setSuccessTag(FEATURE_ROW_OUT) - .setFailureTag(DEADLETTER_OUT) - .build()); + PCollectionTuple validatedRows = + convertedFeatureRows + .get(FEATURE_ROW_OUT) + .apply( + ValidateFeatureRows.newBuilder() + .setFeatureSetSpecs(featureSetSpecsByKey) + .setSuccessTag(FEATURE_ROW_OUT) + .setFailureTag(DEADLETTER_OUT) + .build()); // Step 3. Write FeatureRow to the corresponding Store. validatedRows .get(FEATURE_ROW_OUT) .apply( "WriteFeatureRowToStore", - WriteToStore.newBuilder().setFeatureSetSpecs(featureSetSpecsByKey) + WriteToStore.newBuilder() + .setFeatureSetSpecs(featureSetSpecsByKey) .setStore(store) .build()); @@ -130,7 +133,8 @@ public static PipelineResult runPipeline(ImportOptions options) validatedRows .get(DEADLETTER_OUT) - .apply("WriteFailedElements_ValidateRows", + .apply( + "WriteFailedElements_ValidateRows", WriteFailedElementToBigQuery.newBuilder() .setJsonSchema(ResourceUtil.getDeadletterTableSchemaJson()) .setTableSpec(options.getDeadLetterTableSpec()) @@ -138,8 +142,9 @@ public static PipelineResult runPipeline(ImportOptions options) } // Step 5. Write metrics to a metrics sink. - validatedRows - .apply("WriteMetrics", WriteMetricsTransform.newBuilder() + validatedRows.apply( + "WriteMetrics", + WriteMetricsTransform.newBuilder() .setStoreName(store.getName()) .setSuccessTag(FEATURE_ROW_OUT) .setFailureTag(DEADLETTER_OUT) diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java index 1417d22a4d..e379dc7a4e 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -61,31 +61,25 @@ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, /** * @param deadLetterTableSpec (Optional) BigQuery table for storing elements that failed to be - * processed. Table spec must follow this format - * PROJECT_ID:DATASET_ID.PROJECT_ID + * processed. Table spec must follow this format PROJECT_ID:DATASET_ID.PROJECT_ID */ void setDeadLetterTableSpec(String deadLetterTableSpec); // TODO: expound - @Description( - "MetricsAccumulator exporter type to instantiate." - ) + @Description("MetricsAccumulator exporter type to instantiate.") @Default.String("none") String getMetricsExporterType(); void setMetricsExporterType(String metricsExporterType); - @Description( - "Host to write the metrics to. Required if the metrics exporter is set to StatsD." - ) + @Description("Host to write the metrics to. Required if the metrics exporter is set to StatsD.") @Default.String("localhost") String getStatsdHost(); void setStatsdHost(String StatsdHost); @Description( - "Port on StatsD server to write metrics to. Required if the metrics exporter is set to StatsD." - ) + "Port on StatsD server to write metrics to. Required if the metrics exporter is set to StatsD.") @Default.Integer(8125) int getStatsdPort(); diff --git a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java index 469952708d..bbaed02011 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java @@ -2,14 +2,11 @@ import com.google.auto.value.AutoValue; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import feast.core.SourceProto.Source; import feast.core.SourceProto.SourceType; import feast.ingestion.transform.fn.KafkaRecordToFeatureRowDoFn; import feast.ingestion.values.FailedElement; import feast.types.FeatureRowProto.FeatureRow; -import java.util.List; -import java.util.Map; import org.apache.beam.sdk.io.kafka.KafkaIO; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; @@ -74,10 +71,12 @@ public PCollectionTuple expand(PBegin input) { .withReadCommitted() .commitOffsetsInFinalize()) .apply( - "KafkaRecordToFeatureRow", ParDo.of(KafkaRecordToFeatureRowDoFn.newBuilder() - .setSuccessTag(getSuccessTag()) - .setFailureTag(getFailureTag()) - .build()) + "KafkaRecordToFeatureRow", + ParDo.of( + KafkaRecordToFeatureRowDoFn.newBuilder() + .setSuccessTag(getSuccessTag()) + .setFailureTag(getFailureTag()) + .build()) .withOutputTags(getSuccessTag(), TupleTagList.of(getFailureTag()))); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java index a2bd23077f..f9261035ea 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java @@ -2,12 +2,11 @@ import com.google.auto.value.AutoValue; import feast.core.FeatureSetProto; -import feast.ingestion.values.FeatureSetSpec; import feast.ingestion.transform.fn.ValidateFeatureRowDoFn; import feast.ingestion.values.FailedElement; +import feast.ingestion.values.FeatureSetSpec; import feast.types.FeatureRowProto.FeatureRow; import java.util.Map; -import java.util.Map.Entry; import java.util.stream.Collectors; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; @@ -18,8 +17,8 @@ import org.apache.commons.lang3.tuple.Pair; @AutoValue -public abstract class ValidateFeatureRows extends - PTransform, PCollectionTuple> { +public abstract class ValidateFeatureRows + extends PTransform, PCollectionTuple> { public abstract Map getFeatureSetSpecs(); @@ -34,7 +33,8 @@ public static Builder newBuilder() { @AutoValue.Builder public abstract static class Builder { - public abstract Builder setFeatureSetSpecs(Map featureSetSpec); + public abstract Builder setFeatureSetSpecs( + Map featureSetSpec); public abstract Builder setSuccessTag(TupleTag successTag); @@ -46,18 +46,19 @@ public abstract static class Builder { @Override public PCollectionTuple expand(PCollection input) { - Map featureSetSpecs = getFeatureSetSpecs().entrySet().stream() - .map(e -> Pair.of(e.getKey(), new FeatureSetSpec(e.getValue()))) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); - - return input.apply("ValidateFeatureRows", - ParDo.of(ValidateFeatureRowDoFn.newBuilder() - .setFeatureSetSpecs(featureSetSpecs) - .setSuccessTag(getSuccessTag()) - .setFailureTag(getFailureTag()) - .build()) + Map featureSetSpecs = + getFeatureSetSpecs().entrySet().stream() + .map(e -> Pair.of(e.getKey(), new FeatureSetSpec(e.getValue()))) + .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); + + return input.apply( + "ValidateFeatureRows", + ParDo.of( + ValidateFeatureRowDoFn.newBuilder() + .setFeatureSetSpecs(featureSetSpecs) + .setSuccessTag(getSuccessTag()) + .setFailureTag(getFailureTag()) + .build()) .withOutputTags(getSuccessTag(), TupleTagList.of(getFailureTag()))); } - - } diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java b/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java index fc66a8afae..e82e0c1e8c 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java @@ -27,12 +27,15 @@ public static Builder newBuilder() { public abstract static class Builder { /** - * @param tableSpec Table spec should follow the format "PROJECT_ID:DATASET_ID.TABLE_ID". Table will be created if not exists. + * @param tableSpec Table spec should follow the format "PROJECT_ID:DATASET_ID.TABLE_ID". Table + * will be created if not exists. */ public abstract Builder setTableSpec(String tableSpec); /** - * @param jsonSchema JSON string describing the schema of the table. + * @param jsonSchema JSON string describing the schema + * of the table. */ public abstract Builder setJsonSchema(String jsonSchema); diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java index 201e739c06..806a5f8b57 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java @@ -2,7 +2,6 @@ import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TimePartitioning; import com.google.auto.value.AutoValue; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.StoreProto.Store; @@ -63,7 +62,6 @@ public PDone expand(PCollection input) { switch (storeType) { case REDIS: - RedisConfig redisConfig = getStore().getRedisConfig(); input .apply( @@ -74,22 +72,21 @@ public PDone expand(PCollection input) { RedisCustomIO.write(redisConfig.getHost(), redisConfig.getPort())); break; case BIGQUERY: - BigQueryConfig bigqueryConfig = getStore().getBigqueryConfig(); WriteResult bigqueryWriteResult = - input - .apply( - "WriteTableRowToBigQuery", - BigQueryIO.write() - .to(new GetTableDestination(bigqueryConfig.getProjectId(), - bigqueryConfig.getDatasetId())) - .withFormatFunction(new FeatureRowToTableRow(options.getJobName())) - .withCreateDisposition(CreateDisposition.CREATE_NEVER) - .withWriteDisposition(WriteDisposition.WRITE_APPEND) - .withExtendedErrorInfo() - .withMethod(Method.STREAMING_INSERTS) - .withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors())); + input.apply( + "WriteTableRowToBigQuery", + BigQueryIO.write() + .to( + new GetTableDestination( + bigqueryConfig.getProjectId(), bigqueryConfig.getDatasetId())) + .withFormatFunction(new FeatureRowToTableRow(options.getJobName())) + .withCreateDisposition(CreateDisposition.CREATE_NEVER) + .withWriteDisposition(WriteDisposition.WRITE_APPEND) + .withExtendedErrorInfo() + .withMethod(Method.STREAMING_INSERTS) + .withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors())); if (options.getDeadLetterTableSpec() != null) { bigqueryWriteResult diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java index 7d400f3810..f9975490d4 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java @@ -2,23 +2,18 @@ import com.google.auto.value.AutoValue; import com.google.protobuf.InvalidProtocolBufferException; -import feast.ingestion.transform.ReadFromSource; import feast.ingestion.transform.ReadFromSource.Builder; import feast.ingestion.values.FailedElement; -import feast.ingestion.values.Field; import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto; -import feast.types.ValueProto.Value.ValCase; import java.util.Base64; -import java.util.Map; import org.apache.beam.sdk.io.kafka.KafkaRecord; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.values.TupleTag; import org.apache.commons.lang3.exception.ExceptionUtils; @AutoValue -public abstract class KafkaRecordToFeatureRowDoFn extends - DoFn, FeatureRow> { +public abstract class KafkaRecordToFeatureRowDoFn + extends DoFn, FeatureRow> { public abstract TupleTag getSuccessTag(); diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java index 3eff57004e..777b721d3b 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java @@ -1,9 +1,9 @@ package feast.ingestion.transform.fn; import com.google.auto.value.AutoValue; +import feast.ingestion.values.FailedElement; import feast.ingestion.values.FailedElement.Builder; import feast.ingestion.values.FeatureSetSpec; -import feast.ingestion.values.FailedElement; import feast.ingestion.values.Field; import feast.types.FeatureRowProto.FeatureRow; import feast.types.FieldProto; @@ -41,8 +41,8 @@ public abstract static class Builder { public void processElement(ProcessContext context) { String error = null; FeatureRow featureRow = context.element(); - FeatureSetSpec featureSetSpec = getFeatureSetSpecs() - .getOrDefault(featureRow.getFeatureSet(), null); + FeatureSetSpec featureSetSpec = + getFeatureSetSpecs().getOrDefault(featureRow.getFeatureSet(), null); if (featureSetSpec != null) { for (FieldProto.Field field : featureRow.getFieldsList()) { @@ -57,40 +57,36 @@ public void processElement(ProcessContext context) { // If value is set in the FeatureRow, make sure the value type matches // that defined in FeatureSetSpec if (!field.getValue().getValCase().equals(ValCase.VAL_NOT_SET)) { - int expectedTypeFieldNumber = - fieldSpec.getType().getNumber(); + int expectedTypeFieldNumber = fieldSpec.getType().getNumber(); int actualTypeFieldNumber = field.getValue().getValCase().getNumber(); if (expectedTypeFieldNumber != actualTypeFieldNumber) { error = String.format( "FeatureRow contains field '%s' with invalid type '%s'. Feast expects the field type to match that in FeatureSet '%s'. Please check the FeatureRow data.", - field.getName(), - field.getValue().getValCase(), - fieldSpec.getType()); + field.getName(), field.getValue().getValCase(), fieldSpec.getType()); break; } } } } else { - error = String.format( - "FeatureRow contains invalid feature set id %s. Please check that the feature rows are being published to the correct topic on the feature stream.", - featureRow.getFeatureSet()); + error = + String.format( + "FeatureRow contains invalid feature set id %s. Please check that the feature rows are being published to the correct topic on the feature stream.", + featureRow.getFeatureSet()); } if (error != null) { - FailedElement.Builder failedElement = FailedElement.newBuilder() - .setTransformName("ValidateFeatureRow") - .setJobName(context.getPipelineOptions().getJobName()) - .setPayload(featureRow.toString()) - .setErrorMessage(error); + FailedElement.Builder failedElement = + FailedElement.newBuilder() + .setTransformName("ValidateFeatureRow") + .setJobName(context.getPipelineOptions().getJobName()) + .setPayload(featureRow.toString()) + .setErrorMessage(error); if (featureSetSpec != null) { String[] split = featureSetSpec.getId().split(":"); - failedElement = failedElement - .setFeatureSetName(split[0]) - .setFeatureSetVersion(split[1]); + failedElement = failedElement.setFeatureSetName(split[0]).setFeatureSetVersion(split[1]); } - context.output( - getFailureTag(), failedElement.build()); + context.output(getFailureTag(), failedElement.build()); } else { context.output(getSuccessTag(), featureRow); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java index 884bd7a173..382063236b 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java @@ -9,11 +9,10 @@ import org.slf4j.Logger; @AutoValue -public abstract class WriteDeadletterRowMetricsDoFn extends - DoFn { +public abstract class WriteDeadletterRowMetricsDoFn extends DoFn { - private static final Logger log = org.slf4j.LoggerFactory - .getLogger(WriteDeadletterRowMetricsDoFn.class); + private static final Logger log = + org.slf4j.LoggerFactory.getLogger(WriteDeadletterRowMetricsDoFn.class); private final String INGESTION_JOB_NAME_KEY = "ingestion_job_name"; private final String METRIC_PREFIX = "feast_ingestion"; @@ -43,23 +42,20 @@ public abstract static class Builder { public abstract Builder setStatsdPort(int statsdPort); public abstract WriteDeadletterRowMetricsDoFn build(); - } @Setup public void setup() { - statsd = new NonBlockingStatsDClient( - METRIC_PREFIX, - getStatsdHost(), - getStatsdPort() - ); + statsd = new NonBlockingStatsDClient(METRIC_PREFIX, getStatsdHost(), getStatsdPort()); } @ProcessElement public void processElement(ProcessContext c) { FailedElement ignored = c.element(); try { - statsd.count("deadletter_row_count", 1, + statsd.count( + "deadletter_row_count", + 1, STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_NAME_TAG_KEY + ":" + ignored.getFeatureSetName(), FEATURE_SET_VERSION_TAG_KEY + ":" + ignored.getFeatureSetVersion(), diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java index 755089d363..5a460df0ee 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java @@ -10,8 +10,6 @@ import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.TupleTag; -import org.slf4j.Logger; - @AutoValue public abstract class WriteMetricsTransform extends PTransform { @@ -30,6 +28,7 @@ public static Builder newBuilder() { public abstract static class Builder { public abstract Builder setStoreName(String storeName); + public abstract Builder setSuccessTag(TupleTag successTag); public abstract Builder setFailureTag(TupleTag failureTag); @@ -39,36 +38,43 @@ public abstract static class Builder { @Override public PDone expand(PCollectionTuple input) { - ImportOptions options = input.getPipeline().getOptions() - .as(ImportOptions.class); + ImportOptions options = input.getPipeline().getOptions().as(ImportOptions.class); switch (options.getMetricsExporterType()) { case "statsd": + input + .get(getFailureTag()) + .apply( + "WriteDeadletterMetrics", + ParDo.of( + WriteDeadletterRowMetricsDoFn.newBuilder() + .setStatsdHost(options.getStatsdHost()) + .setStatsdPort(options.getStatsdPort()) + .setStoreName(getStoreName()) + .build())); - input.get(getFailureTag()) - .apply("WriteDeadletterMetrics", ParDo.of( - WriteDeadletterRowMetricsDoFn.newBuilder() - .setStatsdHost(options.getStatsdHost()) - .setStatsdPort(options.getStatsdPort()) - .setStoreName(getStoreName()) - .build())); - - input.get(getSuccessTag()) - .apply("WriteRowMetrics", ParDo - .of(WriteRowMetricsDoFn.newBuilder() - .setStatsdHost(options.getStatsdHost()) - .setStatsdPort(options.getStatsdPort()) - .setStoreName(getStoreName()) - .build())); + input + .get(getSuccessTag()) + .apply( + "WriteRowMetrics", + ParDo.of( + WriteRowMetricsDoFn.newBuilder() + .setStatsdHost(options.getStatsdHost()) + .setStatsdPort(options.getStatsdPort()) + .setStoreName(getStoreName()) + .build())); return PDone.in(input.getPipeline()); case "none": default: - input.get(getSuccessTag()).apply("Noop", - ParDo.of(new DoFn() { - @ProcessElement - public void processElement(ProcessContext c) { - } - })); + input + .get(getSuccessTag()) + .apply( + "Noop", + ParDo.of( + new DoFn() { + @ProcessElement + public void processElement(ProcessContext c) {} + })); return PDone.in(input.getPipeline()); } } diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index e0bcf48cea..ba65b56b0f 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -29,8 +29,11 @@ public abstract class WriteRowMetricsDoFn extends DoFn { public abstract int getStatsdPort(); - public static WriteRowMetricsDoFn create(String newStoreName, FeatureSetSpec newFeatureSetSpec, - String newStatsdHost, int newStatsdPort) { + public static WriteRowMetricsDoFn create( + String newStoreName, + FeatureSetSpec newFeatureSetSpec, + String newStatsdHost, + int newStatsdPort) { return newBuilder() .setStoreName(newStoreName) .setStatsdHost(newStatsdHost) @@ -58,11 +61,7 @@ public abstract static class Builder { @Setup public void setup() { - statsd = new NonBlockingStatsDClient( - METRIC_PREFIX, - getStatsdHost(), - getStatsdPort() - ); + statsd = new NonBlockingStatsDClient(METRIC_PREFIX, getStatsdHost(), getStatsdPort()); } @ProcessElement @@ -76,13 +75,17 @@ public void processElement(ProcessContext c) { String featureSetName = split[0]; String featureSetVersion = split[1]; - statsd.histogram("feature_row_lag_ms", System.currentTimeMillis() - eventTimestamp, + statsd.histogram( + "feature_row_lag_ms", + System.currentTimeMillis() - eventTimestamp, STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); - statsd.histogram("feature_row_event_time_epoch_ms", eventTimestamp, + statsd.histogram( + "feature_row_event_time_epoch_ms", + eventTimestamp, STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, @@ -90,14 +93,18 @@ public void processElement(ProcessContext c) { for (Field field : row.getFieldsList()) { if (!field.getValue().getValCase().equals(ValCase.VAL_NOT_SET)) { - statsd.histogram("feature_value_lag_ms", System.currentTimeMillis() - eventTimestamp, + statsd.histogram( + "feature_value_lag_ms", + System.currentTimeMillis() - eventTimestamp, STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, FEATURE_TAG_KEY + ":" + field.getName(), INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); } else { - statsd.count("feature_value_missing_count", 1, + statsd.count( + "feature_value_missing_count", + 1, STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, @@ -106,14 +113,15 @@ public void processElement(ProcessContext c) { } } - statsd.count("feature_row_ingested_count", 1, + statsd.count( + "feature_row_ingested_count", + 1, STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); - } catch ( - StatsDClientException e) { + } catch (StatsDClientException e) { log.warn("Unable to push metrics to server", e); } } diff --git a/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java b/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java index 4f6c9d0438..6634d7e8bf 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java @@ -37,8 +37,7 @@ public static Map convertJsonStringToMap(String jsonString) { if (jsonString == null || jsonString.equals("") || jsonString.equals("{}")) { return Collections.emptyMap(); } - Type stringMapType = new TypeToken>() { - }.getType(); + Type stringMapType = new TypeToken>() {}.getType(); return gson.fromJson(jsonString, stringMapType); } } diff --git a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java index 689b284f4f..e955f235db 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java @@ -16,9 +16,7 @@ public class SpecUtil { - /** - * Get only feature set specs that matches the subscription - */ + /** Get only feature set specs that matches the subscription */ public static List getSubscribedFeatureSets( List subscriptions, List featureSetSpecs) { List subscribed = new ArrayList<>(); diff --git a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java index 2d808ec1c5..ec1a6afcb1 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java +++ b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java @@ -2,7 +2,6 @@ import com.google.auto.value.AutoValue; import javax.annotation.Nullable; -import javax.validation.constraints.Null; import org.apache.beam.sdk.schemas.AutoValueSchema; import org.apache.beam.sdk.schemas.annotations.DefaultSchema; import org.joda.time.Instant; diff --git a/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java b/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java index 56becec0d0..ef1e38f5ec 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java +++ b/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java @@ -7,8 +7,8 @@ import java.util.Map; /** - * This class represents {@link feast.core.FeatureSetProto.FeatureSetSpec} but - * contains fields directly accessible by name for feature validation purposes. + * This class represents {@link feast.core.FeatureSetProto.FeatureSetSpec} but contains fields + * directly accessible by name for feature validation purposes. * *

The use for this class is mainly for validating the Fields in FeatureRow. */ diff --git a/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java b/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java index 14517ffd81..cf02713ca9 100644 --- a/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java +++ b/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.store.serving.bigquery; import com.google.api.services.bigquery.model.TimePartitioning; @@ -6,8 +22,8 @@ import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.values.ValueInSingleWindow; -public class GetTableDestination implements - SerializableFunction, TableDestination> { +public class GetTableDestination + implements SerializableFunction, TableDestination> { private String projectId; private String datasetId; @@ -28,8 +44,7 @@ public TableDestination apply(ValueInSingleWindow input) { return new TableDestination( String.format("%s:%s.%s_v%s", projectId, datasetId, split[0], split[1]), - String - .format("Feast table for %s", input.getValue().getFeatureSet()), + String.format("Feast table for %s", input.getValue().getFeatureSet()), timePartitioning); } -} \ No newline at end of file +} diff --git a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java index d54b4dbcb3..d8f4f85dea 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java +++ b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java @@ -33,9 +33,9 @@ public class FeatureRowToRedisMutationDoFn extends DoFn { - private static final Logger log = org.slf4j.LoggerFactory - .getLogger(FeatureRowToRedisMutationDoFn.class); - private Map featureSetSpecs; + private static final Logger log = + org.slf4j.LoggerFactory.getLogger(FeatureRowToRedisMutationDoFn.class); + private Map featureSetSpecs; public FeatureRowToRedisMutationDoFn(Map featureSetSpecs) { this.featureSetSpecs = featureSetSpecs; @@ -43,11 +43,12 @@ public FeatureRowToRedisMutationDoFn(Map featureSetSpecs private RedisKey getKey(FeatureRow featureRow) { FeatureSetSpec featureSetSpec = featureSetSpecs.get(featureRow.getFeatureSet()); - Set entityNames = featureSetSpec.getEntitiesList().stream() - .map(EntitySpec::getName).collect(Collectors.toSet()); + Set entityNames = + featureSetSpec.getEntitiesList().stream() + .map(EntitySpec::getName) + .collect(Collectors.toSet()); - Builder redisKeyBuilder = RedisKey.newBuilder() - .setFeatureSet(featureRow.getFeatureSet()); + Builder redisKeyBuilder = RedisKey.newBuilder().setFeatureSet(featureRow.getFeatureSet()); for (Field field : featureRow.getFieldsList()) { if (entityNames.contains(field.getName())) { redisKeyBuilder.addEntities(field); @@ -56,16 +57,14 @@ private RedisKey getKey(FeatureRow featureRow) { return redisKeyBuilder.build(); } - /** - * Output a redis mutation object for every feature in the feature row. - */ + /** Output a redis mutation object for every feature in the feature row. */ @ProcessElement public void processElement(ProcessContext context) { FeatureRow featureRow = context.element(); try { RedisKey key = getKey(featureRow); - RedisMutation redisMutation = new RedisMutation(Method.SET, key.toByteArray(), - featureRow.toByteArray(), null, null); + RedisMutation redisMutation = + new RedisMutation(Method.SET, key.toByteArray(), featureRow.toByteArray(), null, null); context.output(redisMutation); } catch (Exception e) { log.error(e.getMessage(), e); diff --git a/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java b/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java index d5b6c695d6..c575e03dbd 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java +++ b/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java @@ -39,8 +39,7 @@ public class RedisCustomIO { private static final Logger log = LoggerFactory.getLogger(RedisCustomIO.class); - private RedisCustomIO() { - } + private RedisCustomIO() {} public static Write write(String host, int port) { return new Write(host, port); @@ -54,9 +53,7 @@ public enum Method { */ APPEND, - /** - * Use SET command. If key already holds a value, it is overwritten. - */ + /** Use SET command. If key already holds a value, it is overwritten. */ SET, /** @@ -95,14 +92,16 @@ public static class RedisMutation { private Method method; private byte[] key; private byte[] value; - @Nullable - private Long expiryMillis; - @Nullable - private Long score; + @Nullable private Long expiryMillis; + @Nullable private Long score; public RedisMutation() {} - public RedisMutation(Method method, byte[] key, byte[] value, @Nullable Long expiryMillis, + public RedisMutation( + Method method, + byte[] key, + byte[] value, + @Nullable Long expiryMillis, @Nullable Long score) { this.method = method; this.key = key; @@ -154,9 +153,7 @@ public void setScore(@Nullable Long score) { } } - /** - * ServingStoreWrite data to a Redis server. - */ + /** ServingStoreWrite data to a Redis server. */ public static class Write extends PTransform, PDone> { private WriteDoFn dofn; diff --git a/ingestion/src/test/java/feast/FeastMatchers.java b/ingestion/src/test/java/feast/FeastMatchers.java index eb469b5c93..442c8f85ee 100644 --- a/ingestion/src/test/java/feast/FeastMatchers.java +++ b/ingestion/src/test/java/feast/FeastMatchers.java @@ -23,9 +23,7 @@ public class FeastMatchers { - /** - * Can be used with the PAssert.that(..).satisfies(fn) method. - */ + /** Can be used with the PAssert.that(..).satisfies(fn) method. */ public static SerializableFunction, Void> hasCount(long count) { return (Iterable iterable) -> { Assert.assertEquals(count, Lists.newArrayList(iterable).size()); diff --git a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java index f18ee8bb9a..b691cf9d77 100644 --- a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java +++ b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java @@ -19,9 +19,9 @@ import com.google.common.collect.Lists; import com.google.common.primitives.UnsignedBytes; -import feast.types.FieldProto.Field; import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto.FeatureRow; +import feast.types.FieldProto.Field; import java.util.List; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; @@ -47,7 +47,7 @@ public PCollection expand(PCollection input) { MapElements.into(TypeDescriptor.of(FeatureRow.class)).via(FeatureRowExtended::getRow)) .apply( "normalize rows", - MapElements.into(TypeDescriptor.of(FeatureRow.class)).via( - ToOrderedFeatureRows::orderedFeatureRow)); + MapElements.into(TypeDescriptor.of(FeatureRow.class)) + .via(ToOrderedFeatureRows::orderedFeatureRow)); } } diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java index 5bf9a98377..6a5940918c 100644 --- a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java +++ b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java @@ -54,6 +54,7 @@ public class ImportJobTest { @SuppressWarnings("UnstableApiUsage") private static final String ZOOKEEPER_DATA_DIR = Files.createTempDir().getAbsolutePath(); + private static final String ZOOKEEPER_HOST = "localhost"; private static final int ZOOKEEPER_PORT = 2182; @@ -67,8 +68,14 @@ public class ImportJobTest { @BeforeClass public static void setup() throws IOException, InterruptedException { - LocalKafka.start(KAFKA_HOST, KAFKA_PORT, KAFKA_REPLICATION_FACTOR, true, ZOOKEEPER_HOST, - ZOOKEEPER_PORT, ZOOKEEPER_DATA_DIR); + LocalKafka.start( + KAFKA_HOST, + KAFKA_PORT, + KAFKA_REPLICATION_FACTOR, + true, + ZOOKEEPER_HOST, + ZOOKEEPER_PORT, + ZOOKEEPER_DATA_DIR); LocalRedis.start(REDIS_PORT); } @@ -82,31 +89,50 @@ public static void tearDown() { public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() throws IOException, InterruptedException { FeatureSetSpec spec = - FeatureSetSpec.newBuilder().setName("feature_set").setVersion(3) - .addEntities(EntitySpec.newBuilder() - .setName("entity_id_primary").setValueType(Enum.INT32).build()) - .addEntities(EntitySpec.newBuilder() - .setName("entity_id_secondary").setValueType(Enum.STRING).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_1").setValueType(Enum.STRING_LIST).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_2").setValueType(Enum.STRING).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_3").setValueType(Enum.INT64).build()) - .setSource(Source.newBuilder() - .setType(SourceType.KAFKA).setKafkaSourceConfig( - KafkaSourceConfig.newBuilder() - .setBootstrapServers(KAFKA_HOST + ":" + KAFKA_PORT) - .setTopic(KAFKA_TOPIC).build()) - .build()) + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setVersion(3) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature_1") + .setValueType(Enum.STRING_LIST) + .build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.STRING).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_3").setValueType(Enum.INT64).build()) + .setSource( + Source.newBuilder() + .setType(SourceType.KAFKA) + .setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setBootstrapServers(KAFKA_HOST + ":" + KAFKA_PORT) + .setTopic(KAFKA_TOPIC) + .build()) + .build()) .build(); Store redis = - Store.newBuilder().setName(StoreType.REDIS.toString()).setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder() - .setHost(REDIS_HOST).setPort(REDIS_PORT).build()) - .addSubscriptions(Subscription.newBuilder() - .setName(spec.getName()).setVersion(String.valueOf(spec.getVersion())).build()) + Store.newBuilder() + .setName(StoreType.REDIS.toString()) + .setType(StoreType.REDIS) + .setRedisConfig( + RedisConfig.newBuilder().setHost(REDIS_HOST).setPort(REDIS_PORT).build()) + .addSubscriptions( + Subscription.newBuilder() + .setName(spec.getName()) + .setVersion(String.valueOf(spec.getVersion())) + .build()) .build(); ImportOptions options = PipelineOptionsFactory.create().as(ImportOptions.class); @@ -124,12 +150,14 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() Map expected = new HashMap<>(); LOGGER.info("Generating test data ..."); - IntStream.range(0, inputSize).forEach(i -> { - FeatureRow randomRow = TestUtil.createRandomFeatureRow(spec); - RedisKey redisKey = TestUtil.createRedisKey(spec, randomRow); - input.add(randomRow); - expected.put(redisKey, randomRow); - }); + IntStream.range(0, inputSize) + .forEach( + i -> { + FeatureRow randomRow = TestUtil.createRandomFeatureRow(spec); + RedisKey redisKey = TestUtil.createRedisKey(spec, randomRow); + input.add(randomRow); + expected.put(redisKey, randomRow); + }); LOGGER.info("Starting Import Job with the following options: {}", options.toString()); PipelineResult pipelineResult = ImportJob.runPipeline(options); @@ -137,24 +165,30 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() Assert.assertEquals(pipelineResult.getState(), State.RUNNING); LOGGER.info("Publishing {} Feature Row messages to Kafka ...", input.size()); - TestUtil.publishFeatureRowsToKafka(KAFKA_BOOTSTRAP_SERVERS, KAFKA_TOPIC, input, - ByteArraySerializer.class, KAFKA_PUBLISH_TIMEOUT_SEC); + TestUtil.publishFeatureRowsToKafka( + KAFKA_BOOTSTRAP_SERVERS, + KAFKA_TOPIC, + input, + ByteArraySerializer.class, + KAFKA_PUBLISH_TIMEOUT_SEC); Thread.sleep(Duration.ofSeconds(IMPORT_JOB_RUN_DURATION_SEC).toMillis()); LOGGER.info("Validating the actual values written to Redis ..."); Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT); - expected.forEach((key, expectedValue) -> { - byte[] actualByteValue = jedis.get(key.toByteArray()); - Assert.assertNotNull("Key not found in Redis: " + key, actualByteValue); - FeatureRow actualValue = null; - try { - actualValue = FeatureRow.parseFrom(actualByteValue); - } catch (InvalidProtocolBufferException e) { - Assert.fail(String - .format("Actual Redis value cannot be parsed as FeatureRow, key: %s, value :%s", - key, new String(actualByteValue, StandardCharsets.UTF_8))); - } - Assert.assertEquals(expectedValue, actualValue); - }); + expected.forEach( + (key, expectedValue) -> { + byte[] actualByteValue = jedis.get(key.toByteArray()); + Assert.assertNotNull("Key not found in Redis: " + key, actualByteValue); + FeatureRow actualValue = null; + try { + actualValue = FeatureRow.parseFrom(actualByteValue); + } catch (InvalidProtocolBufferException e) { + Assert.fail( + String.format( + "Actual Redis value cannot be parsed as FeatureRow, key: %s, value :%s", + key, new String(actualByteValue, StandardCharsets.UTF_8))); + } + Assert.assertEquals(expectedValue, actualValue); + }); } } diff --git a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java b/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java index e4d1e76640..107821d2eb 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java @@ -5,9 +5,6 @@ import feast.core.FeatureSetProto.EntitySpec; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.Source; -import feast.core.SourceProto.SourceType; import feast.ingestion.values.FailedElement; import feast.test.TestUtil; import feast.types.FeatureRowProto.FeatureRow; @@ -27,39 +24,53 @@ import org.junit.Test; public class ValidateFeatureRowsTest { - @Rule - public transient TestPipeline p = TestPipeline.create(); + @Rule public transient TestPipeline p = TestPipeline.create(); - private static final TupleTag SUCCESS_TAG = new TupleTag() { - }; + private static final TupleTag SUCCESS_TAG = new TupleTag() {}; - private static final TupleTag FAILURE_TAG = new TupleTag() { - }; + private static final TupleTag FAILURE_TAG = new TupleTag() {}; @Test public void shouldWriteSuccessAndFailureTagsCorrectly() { - FeatureSetSpec fs1 = FeatureSetSpec.newBuilder().setName("feature_set").setVersion(1) - .addEntities(EntitySpec.newBuilder() - .setName("entity_id_primary").setValueType(Enum.INT32).build()) - .addEntities(EntitySpec.newBuilder() - .setName("entity_id_secondary").setValueType(Enum.STRING).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_1").setValueType(Enum.STRING).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_2").setValueType(Enum.INT64).build()) - .build(); - - FeatureSetSpec fs2 = FeatureSetSpec.newBuilder().setName("feature_set").setVersion(2) - .addEntities(EntitySpec.newBuilder() - .setName("entity_id_primary").setValueType(Enum.INT32).build()) - .addEntities(EntitySpec.newBuilder() - .setName("entity_id_secondary").setValueType(Enum.STRING).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_1").setValueType(Enum.STRING).build()) - .addFeatures(FeatureSpec.newBuilder() - .setName("feature_2").setValueType(Enum.INT64).build()) - .build(); + FeatureSetSpec fs1 = + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setVersion(1) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + .build(); + FeatureSetSpec fs2 = + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setVersion(2) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + .build(); Map featureSetSpecs = new HashMap<>(); featureSetSpecs.put("feature_set:1", fs1); @@ -76,16 +87,19 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { input.add(FeatureRow.newBuilder().setFeatureSet("invalid").build()); - PCollectionTuple output = p.apply(Create.of(input)).setCoder(ProtoCoder.of(FeatureRow.class)) - .apply(ValidateFeatureRows.newBuilder() - .setFailureTag(FAILURE_TAG) - .setSuccessTag(SUCCESS_TAG) - .setFeatureSetSpecs(featureSetSpecs) - .build()); + PCollectionTuple output = + p.apply(Create.of(input)) + .setCoder(ProtoCoder.of(FeatureRow.class)) + .apply( + ValidateFeatureRows.newBuilder() + .setFailureTag(FAILURE_TAG) + .setSuccessTag(SUCCESS_TAG) + .setFeatureSetSpecs(featureSetSpecs) + .build()); PAssert.that(output.get(SUCCESS_TAG)).containsInAnyOrder(expected); PAssert.that(output.get(FAILURE_TAG).apply(Count.globally())).containsInAnyOrder(1L); p.run(); } -} \ No newline at end of file +} diff --git a/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java index 6ffe4ebf82..fa5e2bc029 100644 --- a/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java @@ -42,4 +42,4 @@ public void convertJsonStringToMapShouldReturnEmptyMapForEmptyJson() { Map expected = Collections.emptyMap(); assertThat(JsonUtil.convertJsonStringToMap(input), equalTo(expected)); } -} \ No newline at end of file +} diff --git a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java index efc8e9ac0c..d0ecd578e2 100644 --- a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java @@ -25,4 +25,4 @@ public void setupBigQuery_shouldCreateTable_givenFeatureSetSpec() { BigQuery mockedBigquery = Mockito.mock(BigQuery.class); StoreUtil.setupBigQuery(featureSetSpec, "project-1", "dataset_1", mockedBigquery); } -} \ No newline at end of file +} diff --git a/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java b/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java index d0c8743493..362dc2a299 100644 --- a/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java +++ b/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java @@ -26,14 +26,12 @@ public class RedisCustomIOTest { - @Rule - public transient TestPipeline p = TestPipeline.create(); + @Rule public transient TestPipeline p = TestPipeline.create(); private static int REDIS_PORT = 51234; private static Redis redis; private static Jedis jedis; - @BeforeClass public static void setUp() throws IOException { redis = new RedisServer(REDIS_PORT); @@ -49,31 +47,46 @@ public static void teardown() { @Test public void shouldWriteToRedis() { HashMap kvs = new LinkedHashMap<>(); - kvs.put(RedisKey.newBuilder().setFeatureSet("fs:1") - .addEntities(field("entity", 1, Enum.INT64)).build(), - FeatureRow.newBuilder().setFeatureSet("fs:1") + kvs.put( + RedisKey.newBuilder() + .setFeatureSet("fs:1") + .addEntities(field("entity", 1, Enum.INT64)) + .build(), + FeatureRow.newBuilder() + .setFeatureSet("fs:1") .addFields(field("entity", 1, Enum.INT64)) - .addFields(field("feature", "one", Enum.STRING)).build()); - kvs.put(RedisKey.newBuilder().setFeatureSet("fs:1") - .addEntities(field("entity", 2, Enum.INT64)).build(), - FeatureRow.newBuilder().setFeatureSet("fs:1") + .addFields(field("feature", "one", Enum.STRING)) + .build()); + kvs.put( + RedisKey.newBuilder() + .setFeatureSet("fs:1") + .addEntities(field("entity", 2, Enum.INT64)) + .build(), + FeatureRow.newBuilder() + .setFeatureSet("fs:1") .addFields(field("entity", 2, Enum.INT64)) - .addFields(field("feature", "two", Enum.STRING)).build()); + .addFields(field("feature", "two", Enum.STRING)) + .build()); - List featureRowWrites = kvs.entrySet().stream() - .map(kv -> new RedisMutation(Method.SET, kv.getKey().toByteArray(), - kv.getValue().toByteArray(), - null, null) - ) - .collect(Collectors.toList()); + List featureRowWrites = + kvs.entrySet().stream() + .map( + kv -> + new RedisMutation( + Method.SET, + kv.getKey().toByteArray(), + kv.getValue().toByteArray(), + null, + null)) + .collect(Collectors.toList()); - p.apply(Create.of(featureRowWrites)) - .apply(RedisCustomIO.write("localhost", REDIS_PORT)); + p.apply(Create.of(featureRowWrites)).apply(RedisCustomIO.write("localhost", REDIS_PORT)); p.run(); - kvs.forEach((key, value) -> { - byte[] actual = jedis.get(key.toByteArray()); - assertThat(actual, equalTo(value.toByteArray())); - }); + kvs.forEach( + (key, value) -> { + byte[] actual = jedis.get(key.toByteArray()); + assertThat(actual, equalTo(value.toByteArray())); + }); } -} \ No newline at end of file +} diff --git a/ingestion/src/test/java/feast/test/TestUtil.java b/ingestion/src/test/java/feast/test/TestUtil.java index b0fa4cbd10..163624f2af 100644 --- a/ingestion/src/test/java/feast/test/TestUtil.java +++ b/ingestion/src/test/java/feast/test/TestUtil.java @@ -66,15 +66,21 @@ public static class LocalKafka { /** * Start local Kafka and (optionally) Zookeeper * - * @param kafkaHost e.g. localhost - * @param kafkaPort e.g. 60001 + * @param kafkaHost e.g. localhost + * @param kafkaPort e.g. 60001 * @param kafkaReplicationFactor e.g. 1 - * @param zookeeperHost e.g. localhost - * @param zookeeperPort e.g. 60002 - * @param zookeeperDataDir e.g. "/tmp" or "Files.createTempDir().getAbsolutePath()" + * @param zookeeperHost e.g. localhost + * @param zookeeperPort e.g. 60002 + * @param zookeeperDataDir e.g. "/tmp" or "Files.createTempDir().getAbsolutePath()" */ - public static void start(String kafkaHost, int kafkaPort, short kafkaReplicationFactor, - boolean startZookeper, String zookeeperHost, int zookeeperPort, String zookeeperDataDir) + public static void start( + String kafkaHost, + int kafkaPort, + short kafkaReplicationFactor, + boolean startZookeper, + String zookeeperHost, + int zookeeperPort, + String zookeeperDataDir) throws InterruptedException { if (startZookeper) { LocalZookeeper.start(zookeeperPort, zookeeperDataDir); @@ -104,14 +110,18 @@ public static void stop() { /** * Publish test Feature Row messages to a running Kafka broker * - * @param bootstrapServers e.g. localhost:9092 - * @param topic e.g. my_topic - * @param messages e.g. list of Feature Row - * @param valueSerializer in Feast this valueSerializer should be "ByteArraySerializer.class" + * @param bootstrapServers e.g. localhost:9092 + * @param topic e.g. my_topic + * @param messages e.g. list of Feature Row + * @param valueSerializer in Feast this valueSerializer should be "ByteArraySerializer.class" * @param publishTimeoutSec duration to wait for publish operation (of each message) to succeed */ - public static void publishFeatureRowsToKafka(String bootstrapServers, String topic, - List messages, Class valueSerializer, long publishTimeoutSec) { + public static void publishFeatureRowsToKafka( + String bootstrapServers, + String topic, + List messages, + Class valueSerializer, + long publishTimeoutSec) { Long defaultKey = 1L; Properties prop = new Properties(); prop.put("bootstrap.servers", bootstrapServers); @@ -119,15 +129,16 @@ public static void publishFeatureRowsToKafka(String bootstrapServers, String top prop.put("value.serializer", valueSerializer); Producer producer = new KafkaProducer<>(prop); - messages.forEach(featureRow -> { - ProducerRecord record = new ProducerRecord<>(topic, defaultKey, - featureRow.toByteArray()); - try { - producer.send(record).get(publishTimeoutSec, TimeUnit.SECONDS); - } catch (InterruptedException | ExecutionException | TimeoutException e) { - e.printStackTrace(); - } - }); + messages.forEach( + featureRow -> { + ProducerRecord record = + new ProducerRecord<>(topic, defaultKey, featureRow.toByteArray()); + try { + producer.send(record).get(publishTimeoutSec, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + e.printStackTrace(); + } + }); } /** @@ -144,32 +155,38 @@ public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec) { /** * Create a Feature Row with random value according to the FeatureSetSpec. * - *

The Feature Row created contains fields according to the entities and features - * defined in FeatureSetSpec, matching the value type of the field, with randomized value for - * testing. + *

The Feature Row created contains fields according to the entities and features defined in + * FeatureSetSpec, matching the value type of the field, with randomized value for testing. * - * @param spec {@link FeatureSetSpec} + * @param spec {@link FeatureSetSpec} * @param randomStringSize number of characters for the generated random string * @return {@link FeatureRow} */ public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec, int randomStringSize) { - Builder builder = FeatureRow.newBuilder() - .setFeatureSet(spec.getName() + ":" + spec.getVersion()) - .setEventTimestamp(Timestamps.fromMillis(System.currentTimeMillis())); + Builder builder = + FeatureRow.newBuilder() + .setFeatureSet(spec.getName() + ":" + spec.getVersion()) + .setEventTimestamp(Timestamps.fromMillis(System.currentTimeMillis())); - spec.getEntitiesList().forEach(field -> { - builder.addFields(Field.newBuilder() - .setName(field.getName()) - .setValue(createRandomValue(field.getValueType(), randomStringSize)) - .build()); - }); + spec.getEntitiesList() + .forEach( + field -> { + builder.addFields( + Field.newBuilder() + .setName(field.getName()) + .setValue(createRandomValue(field.getValueType(), randomStringSize)) + .build()); + }); - spec.getFeaturesList().forEach(field -> { - builder.addFields(Field.newBuilder() - .setName(field.getName()) - .setValue(createRandomValue(field.getValueType(), randomStringSize)) - .build()); - }); + spec.getFeaturesList() + .forEach( + field -> { + builder.addFields( + Field.newBuilder() + .setName(field.getName()) + .setValue(createRandomValue(field.getValueType(), randomStringSize)) + .build()); + }); return builder.build(); } @@ -177,7 +194,7 @@ public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec, int randomS /** * Create a random Feast {@link Value} of {@link ValueType.Enum}. * - * @param type {@link ValueType.Enum} + * @param type {@link ValueType.Enum} * @param randomStringSize number of characters for the generated random string * @return {@link Value} */ @@ -212,14 +229,17 @@ public static Value createRandomValue(ValueType.Enum type, int randomStringSize) builder.setBoolVal(random.nextBoolean()); break; case BYTES_LIST: - builder.setBytesListVal(BytesList.newBuilder() - .addVal(ByteString - .copyFrom(RandomStringUtils.randomAlphanumeric(randomStringSize).getBytes())) - .build()); + builder.setBytesListVal( + BytesList.newBuilder() + .addVal( + ByteString.copyFrom( + RandomStringUtils.randomAlphanumeric(randomStringSize).getBytes())) + .build()); break; case STRING_LIST: builder.setStringListVal( - StringList.newBuilder().addVal(RandomStringUtils.randomAlphanumeric(randomStringSize)) + StringList.newBuilder() + .addVal(RandomStringUtils.randomAlphanumeric(randomStringSize)) .build()); break; case INT32_LIST: @@ -244,20 +264,23 @@ public static Value createRandomValue(ValueType.Enum type, int randomStringSize) /** * Create {@link RedisKey} from {@link FeatureSetSpec} and {@link FeatureRow}. * - *

The entities in the created {@link RedisKey} will contain the value with matching - * field name in the {@link FeatureRow} + *

The entities in the created {@link RedisKey} will contain the value with matching field name + * in the {@link FeatureRow} * * @param spec {@link FeatureSetSpec} - * @param row {@link FeatureSetSpec} + * @param row {@link FeatureSetSpec} * @return {@link RedisKey} */ public static RedisKey createRedisKey(FeatureSetSpec spec, FeatureRow row) { - RedisKey.Builder builder = RedisKey.newBuilder() - .setFeatureSet(spec.getName() + ":" + spec.getVersion()); - spec.getEntitiesList().forEach(entityField -> row.getFieldsList().stream() - .filter(rowField -> rowField.getName().equals(entityField.getName())).findFirst() - .ifPresent( - builder::addEntities)); + RedisKey.Builder builder = + RedisKey.newBuilder().setFeatureSet(spec.getName() + ":" + spec.getVersion()); + spec.getEntitiesList() + .forEach( + entityField -> + row.getFieldsList().stream() + .filter(rowField -> rowField.getName().equals(entityField.getName())) + .findFirst() + .ifPresent(builder::addEntities)); return builder.build(); } @@ -266,15 +289,15 @@ private static class LocalZookeeper { static void start(int zookeeperPort, String zookeeperDataDir) { final ZooKeeperServerMain zookeeper = new ZooKeeperServerMain(); final ServerConfig serverConfig = new ServerConfig(); - serverConfig.parse(new String[]{String.valueOf(zookeeperPort), zookeeperDataDir}); + serverConfig.parse(new String[] {String.valueOf(zookeeperPort), zookeeperDataDir}); new Thread( - () -> { - try { - zookeeper.runFromConfig(serverConfig); - } catch (IOException e) { - e.printStackTrace(); - } - }) + () -> { + try { + zookeeper.runFromConfig(serverConfig); + } catch (IOException e) { + e.printStackTrace(); + } + }) .start(); } } @@ -288,8 +311,7 @@ static void start(int zookeeperPort, String zookeeperDataDir) { * @return Field object */ public static Field field(String name, Object value, ValueType.Enum valueType) { - Field.Builder fieldBuilder = Field.newBuilder() - .setName(name); + Field.Builder fieldBuilder = Field.newBuilder().setName(name); switch (valueType) { case INT32: return fieldBuilder.setValue(Value.newBuilder().setInt32Val((int) value)).build(); diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java index 9f93f56c9c..2e8b946d9d 100644 --- a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java @@ -1,10 +1,10 @@ package com.gojek.feast.v1alpha1; +import feast.serving.ServingAPIProto.FeatureSetRequest; import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.FeatureSetRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; import feast.serving.ServingServiceGrpc; import io.grpc.ManagedChannel; diff --git a/serving/src/main/java/feast/serving/FeastProperties.java b/serving/src/main/java/feast/serving/FeastProperties.java index 73cc91479f..c856e7cb68 100644 --- a/serving/src/main/java/feast/serving/FeastProperties.java +++ b/serving/src/main/java/feast/serving/FeastProperties.java @@ -44,5 +44,4 @@ public static class TracingProperties { private String tracerName; private String serviceName; } - } diff --git a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java b/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java index aa4faee6fc..01f3a6446b 100644 --- a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java +++ b/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java @@ -9,12 +9,10 @@ @Component public class ContextClosedHandler implements ApplicationListener { - @Autowired - ScheduledExecutorService executor; + @Autowired ScheduledExecutorService executor; @Override public void onApplicationEvent(ContextClosedEvent event) { executor.shutdown(); } } - diff --git a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java b/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java index ead8e67b73..368f6065c9 100644 --- a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java +++ b/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java @@ -3,8 +3,8 @@ import feast.serving.FeastProperties; import io.opentracing.Tracer; import io.opentracing.noop.NoopTracerFactory; -import io.prometheus.client.hotspot.DefaultExports; import io.prometheus.client.exporter.MetricsServlet; +import io.prometheus.client.hotspot.DefaultExports; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.web.servlet.ServletRegistrationBean; import org.springframework.context.annotation.Bean; diff --git a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java index de80ff14e6..288ffcd11e 100644 --- a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java @@ -7,7 +7,6 @@ import feast.serving.service.JobService; import feast.serving.service.NoopJobService; import feast.serving.service.RedisBackedJobService; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import redis.clients.jedis.Jedis; @@ -16,8 +15,7 @@ public class JobServiceConfig { @Bean - public JobService jobService(Store jobStore, - CachedSpecService specService) { + public JobService jobService(Store jobStore, CachedSpecService specService) { if (!specService.getStore().getType().equals(StoreType.BIGQUERY)) { return new NoopJobService(); } @@ -34,8 +32,8 @@ public JobService jobService(Store jobStore, default: throw new IllegalArgumentException( String.format( - "Unsupported store type '%s' for job store name '%s'", jobStore.getType(), - jobStore.getName())); + "Unsupported store type '%s' for job store name '%s'", + jobStore.getType(), jobStore.getName())); } } } diff --git a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java b/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java index a3babf36b4..4b78303435 100644 --- a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java +++ b/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java @@ -10,8 +10,7 @@ @Configuration public class ServingApiConfiguration implements WebMvcConfigurer { - @Autowired - private ProtobufJsonFormatHttpMessageConverter protobufConverter; + @Autowired private ProtobufJsonFormatHttpMessageConverter protobufConverter; @Bean ProtobufJsonFormatHttpMessageConverter protobufHttpMessageConverter() { diff --git a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java index ac5263ec7a..194b695b4c 100644 --- a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java @@ -30,41 +30,43 @@ @Configuration public class ServingServiceConfig { - @Bean(name="JobStore") + @Bean(name = "JobStore") public Store jobStoreDefinition(FeastProperties feastProperties) { JobProperties jobProperties = feastProperties.getJobs(); if (feastProperties.getJobs().getStoreType().equals("")) { return Store.newBuilder().build(); } Map options = jobProperties.getStoreOptions(); - Builder storeDefinitionBuilder = Store.newBuilder() - .setType(StoreType.valueOf(jobProperties.getStoreType())); + Builder storeDefinitionBuilder = + Store.newBuilder().setType(StoreType.valueOf(jobProperties.getStoreType())); return setStoreConfig(storeDefinitionBuilder, options); } private Store setStoreConfig(Store.Builder builder, Map options) { switch (builder.getType()) { case REDIS: - RedisConfig redisConfig = RedisConfig.newBuilder() - .setHost(options.get("host")) - .setPort(Integer.parseInt(options.get("port"))) - .build(); + RedisConfig redisConfig = + RedisConfig.newBuilder() + .setHost(options.get("host")) + .setPort(Integer.parseInt(options.get("port"))) + .build(); return builder.setRedisConfig(redisConfig).build(); case BIGQUERY: - BigQueryConfig bqConfig = BigQueryConfig.newBuilder() - .setProjectId(options.get("projectId")) - .setDatasetId(options.get("datasetId")) - .build(); + BigQueryConfig bqConfig = + BigQueryConfig.newBuilder() + .setProjectId(options.get("projectId")) + .setDatasetId(options.get("datasetId")) + .build(); return builder.setBigqueryConfig(bqConfig).build(); case CASSANDRA: default: - throw new IllegalArgumentException(String.format( - "Unsupported store %s provided, only REDIS or BIGQUERY are currently supported.", - builder.getType())); + throw new IllegalArgumentException( + String.format( + "Unsupported store %s provided, only REDIS or BIGQUERY are currently supported.", + builder.getType())); } } - @Bean public ServingService servingService( FeastProperties feastProperties, @@ -81,8 +83,7 @@ public ServingService servingService( poolConfig.setMaxTotal(feastProperties.getStore().getRedisPoolMaxSize()); poolConfig.setMaxIdle(feastProperties.getStore().getRedisPoolMaxIdle()); JedisPool jedisPool = - new JedisPool( - poolConfig, redisConfig.getHost(), redisConfig.getPort()); + new JedisPool(poolConfig, redisConfig.getHost(), redisConfig.getPort()); servingService = new RedisServingService(jedisPool, specService, tracer); break; case BIGQUERY: @@ -103,7 +104,8 @@ public ServingService servingService( + jobStagingLocation); } if (jobService.getClass() == NoopJobService.class) { - throw new IllegalArgumentException("Unable to instantiate jobService for BigQuery store."); + throw new IllegalArgumentException( + "Unable to instantiate jobService for BigQuery store."); } servingService = new BigQueryServingService( @@ -120,7 +122,8 @@ public ServingService servingService( case INVALID: throw new IllegalArgumentException( String.format( - "Unsupported store type '%s' for store name '%s'", store.getType(), store.getName())); + "Unsupported store type '%s' for store name '%s'", + store.getType(), store.getName())); } return servingService; diff --git a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java index 9c7c74f3ca..d8ec606ba0 100644 --- a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java @@ -21,7 +21,6 @@ public class SpecServiceConfig { private int feastCorePort; private static final int CACHE_REFRESH_RATE_MINUTES = 1; - @Autowired public SpecServiceConfig(FeastProperties feastProperties) { feastCoreHost = feastProperties.getCoreHost(); @@ -31,12 +30,14 @@ public SpecServiceConfig(FeastProperties feastProperties) { @Bean public ScheduledExecutorService cachedSpecServiceScheduledExecutorService( CachedSpecService cachedSpecStorage) { - ScheduledExecutorService scheduledExecutorService = Executors - .newSingleThreadScheduledExecutor(); + ScheduledExecutorService scheduledExecutorService = + Executors.newSingleThreadScheduledExecutor(); // reload all specs including new ones periodically - scheduledExecutorService - .scheduleAtFixedRate(cachedSpecStorage::scheduledPopulateCache, CACHE_REFRESH_RATE_MINUTES, - CACHE_REFRESH_RATE_MINUTES, TimeUnit.MINUTES); + scheduledExecutorService.scheduleAtFixedRate( + cachedSpecStorage::scheduledPopulateCache, + CACHE_REFRESH_RATE_MINUTES, + CACHE_REFRESH_RATE_MINUTES, + TimeUnit.MINUTES); return scheduledExecutorService; } @@ -45,8 +46,7 @@ public CachedSpecService specService(FeastProperties feastProperties) { CoreSpecService coreService = new CoreSpecService(feastCoreHost, feastCorePort); Path path = Paths.get(feastProperties.getStore().getConfigPath()); - CachedSpecService cachedSpecStorage = - new CachedSpecService(coreService, path); + CachedSpecService cachedSpecStorage = new CachedSpecService(coreService, path); try { cachedSpecStorage.populateCache(); } catch (Exception e) { diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java index 1a51bb5cda..c0b60131d6 100644 --- a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java +++ b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java @@ -36,19 +36,19 @@ public ServingServiceGRpcController( } @Override - public void getFeastServingInfo(GetFeastServingInfoRequest request, + public void getFeastServingInfo( + GetFeastServingInfoRequest request, StreamObserver responseObserver) { GetFeastServingInfoResponse feastServingInfo = servingService.getFeastServingInfo(request); - feastServingInfo = feastServingInfo.toBuilder() - .setVersion(version) - .build(); + feastServingInfo = feastServingInfo.toBuilder().setVersion(version).build(); responseObserver.onNext(feastServingInfo); responseObserver.onCompleted(); } @Override public void getOnlineFeatures( - GetOnlineFeaturesRequest request, StreamObserver responseObserver) { + GetOnlineFeaturesRequest request, + StreamObserver responseObserver) { Span span = tracer.buildSpan("getOnlineFeatures").start(); try (Scope scope = tracer.scopeManager().activate(span, false)) { RequestHelper.validateOnlineRequest(request); diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java index 2eeb901463..75c0a96791 100644 --- a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java +++ b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java @@ -34,8 +34,8 @@ public ServingServiceRestController( @RequestMapping(value = "/api/v1/info", produces = "application/json") public GetFeastServingInfoResponse getInfo() { - GetFeastServingInfoResponse feastServingInfo = servingService - .getFeastServingInfo(GetFeastServingInfoRequest.getDefaultInstance()); + GetFeastServingInfoResponse feastServingInfo = + servingService.getFeastServingInfo(GetFeastServingInfoRequest.getDefaultInstance()); return feastServingInfo.toBuilder().setVersion(version).build(); } diff --git a/serving/src/main/java/feast/serving/service/BigQueryServingService.java b/serving/src/main/java/feast/serving/service/BigQueryServingService.java index be8ccf7fee..71caf45373 100644 --- a/serving/src/main/java/feast/serving/service/BigQueryServingService.java +++ b/serving/src/main/java/feast/serving/service/BigQueryServingService.java @@ -78,9 +78,7 @@ public BigQueryServingService( this.storage = storage; } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public GetFeastServingInfoResponse getFeastServingInfo( GetFeastServingInfoRequest getFeastServingInfoRequest) { @@ -90,27 +88,23 @@ public GetFeastServingInfoResponse getFeastServingInfo( .build(); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest getFeaturesRequest) { throw Status.UNIMPLEMENTED.withDescription("Method not implemented").asRuntimeException(); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeaturesRequest) { Timer getBatchFeaturesTimer = requestLatency.labels("getBatchFeatures").startTimer(); List featureSetSpecs = getFeaturesRequest.getFeatureSetsList().stream() - .map(featureSet -> { + .map( + featureSet -> { requestCount.labels(featureSet.getName()).inc(); return specService.getFeatureSet(featureSet.getName(), featureSet.getVersion()); - } - ) + }) .collect(Collectors.toList()); if (getFeaturesRequest.getFeatureSetsList().size() != featureSetSpecs.size()) { @@ -122,16 +116,17 @@ public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeat Table entityTable = loadEntities(getFeaturesRequest.getDatasetSource()); String entityTableName = entityTable.getTableId().getTable(); - //TODO: add expiration to temp tables -// entityTable = entityTable.toBuilder().setExpirationTime(TABLE_EXPIRATION_TIME).build(); -// entityTable.update(TableOption.fields(TableField.EXPIRATION_TIME)); + // TODO: add expiration to temp tables + // entityTable = entityTable.toBuilder().setExpirationTime(TABLE_EXPIRATION_TIME).build(); + // entityTable.update(TableOption.fields(TableField.EXPIRATION_TIME)); FieldValueList timestampLimits = getTimestampLimits(entityTableName); Schema entityTableSchema = entityTable.getDefinition().getSchema(); - List entityNames = entityTableSchema.getFields().stream() - .map(Field::getName) - .filter(name -> !name.equals("event_timestamp")) - .collect(Collectors.toList()); + List entityNames = + entityTableSchema.getFields().stream() + .map(Field::getName) + .filter(name -> !name.equals("event_timestamp")) + .collect(Collectors.toList()); String query; try { @@ -160,82 +155,82 @@ public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeat jobService.upsert(feastJob); new Thread( - () -> { - QueryJobConfiguration queryConfig; - Job queryJob; + () -> { + QueryJobConfiguration queryConfig; + Job queryJob; - try { - queryConfig = - QueryJobConfiguration.newBuilder(query) - .setDefaultDataset(DatasetId.of(projectId, datasetId)) - .build(); - queryJob = bigquery.create(JobInfo.of(queryConfig)); - jobService.upsert( - ServingAPIProto.Job.newBuilder() - .setId(feastJobId) - .setType(JobType.JOB_TYPE_DOWNLOAD) - .setStatus(JobStatus.JOB_STATUS_RUNNING) - .build()); - queryJob.waitFor(); - } catch (BigQueryException | InterruptedException e) { - jobService.upsert( - ServingAPIProto.Job.newBuilder() - .setId(feastJobId) - .setType(JobType.JOB_TYPE_DOWNLOAD) - .setStatus(JobStatus.JOB_STATUS_DONE) - .setError(e.getMessage()) - .build()); - return; - } + try { + queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(projectId, datasetId)) + .build(); + queryJob = bigquery.create(JobInfo.of(queryConfig)); + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_RUNNING) + .build()); + queryJob.waitFor(); + } catch (BigQueryException | InterruptedException e) { + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_DONE) + .setError(e.getMessage()) + .build()); + return; + } - try { - queryConfig = queryJob.getConfiguration(); - String exportTableDestinationUri = - String.format("%s/%s/*.avro", jobStagingLocation, feastJobId); + try { + queryConfig = queryJob.getConfiguration(); + String exportTableDestinationUri = + String.format("%s/%s/*.avro", jobStagingLocation, feastJobId); - // Hardcode the format to Avro for now - ExtractJobConfiguration extractConfig = - ExtractJobConfiguration.of( - queryConfig.getDestinationTable(), exportTableDestinationUri, "Avro"); - Job extractJob = bigquery.create(JobInfo.of(extractConfig)); - extractJob.waitFor(); - } catch (BigQueryException | InterruptedException e) { - jobService.upsert( - ServingAPIProto.Job.newBuilder() - .setId(feastJobId) - .setType(JobType.JOB_TYPE_DOWNLOAD) - .setStatus(JobStatus.JOB_STATUS_DONE) - .setError(e.getMessage()) - .build()); - return; - } + // Hardcode the format to Avro for now + ExtractJobConfiguration extractConfig = + ExtractJobConfiguration.of( + queryConfig.getDestinationTable(), exportTableDestinationUri, "Avro"); + Job extractJob = bigquery.create(JobInfo.of(extractConfig)); + extractJob.waitFor(); + } catch (BigQueryException | InterruptedException e) { + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_DONE) + .setError(e.getMessage()) + .build()); + return; + } - String scheme = jobStagingLocation.substring(0, jobStagingLocation.indexOf("://")); - String stagingLocationNoScheme = - jobStagingLocation.substring(jobStagingLocation.indexOf("://") + 3); - String bucket = stagingLocationNoScheme.split("/")[0]; - List prefixParts = new ArrayList<>(); - prefixParts.add( - stagingLocationNoScheme.contains("/") && !stagingLocationNoScheme.endsWith("/") - ? stagingLocationNoScheme.substring(stagingLocationNoScheme.indexOf("/") + 1) - : ""); - prefixParts.add(feastJobId); - String prefix = String.join("/", prefixParts) + "/"; + String scheme = jobStagingLocation.substring(0, jobStagingLocation.indexOf("://")); + String stagingLocationNoScheme = + jobStagingLocation.substring(jobStagingLocation.indexOf("://") + 3); + String bucket = stagingLocationNoScheme.split("/")[0]; + List prefixParts = new ArrayList<>(); + prefixParts.add( + stagingLocationNoScheme.contains("/") && !stagingLocationNoScheme.endsWith("/") + ? stagingLocationNoScheme.substring(stagingLocationNoScheme.indexOf("/") + 1) + : ""); + prefixParts.add(feastJobId); + String prefix = String.join("/", prefixParts) + "/"; - List fileUris = new ArrayList<>(); - for (Blob blob : storage.list(bucket, BlobListOption.prefix(prefix)).iterateAll()) { - fileUris.add(String.format("%s://%s/%s", scheme, blob.getBucket(), blob.getName())); - } + List fileUris = new ArrayList<>(); + for (Blob blob : storage.list(bucket, BlobListOption.prefix(prefix)).iterateAll()) { + fileUris.add(String.format("%s://%s/%s", scheme, blob.getBucket(), blob.getName())); + } - jobService.upsert( - ServingAPIProto.Job.newBuilder() - .setId(feastJobId) - .setType(JobType.JOB_TYPE_DOWNLOAD) - .setStatus(JobStatus.JOB_STATUS_DONE) - .addAllFileUris(fileUris) - .setDataFormat(DataFormat.DATA_FORMAT_AVRO) - .build()); - }) + jobService.upsert( + ServingAPIProto.Job.newBuilder() + .setId(feastJobId) + .setType(JobType.JOB_TYPE_DOWNLOAD) + .setStatus(JobStatus.JOB_STATUS_DONE) + .addAllFileUris(fileUris) + .setDataFormat(DataFormat.DATA_FORMAT_AVRO) + .build()); + }) .start(); getBatchFeaturesTimer.observeDuration(); @@ -243,15 +238,14 @@ public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeat } private FieldValueList getTimestampLimits(String entityTableName) { - QueryJobConfiguration getTimestampLimitsQuery = QueryJobConfiguration - .newBuilder(getTimestampLimitQuery(projectId, datasetId, entityTableName)) - .setDefaultDataset(DatasetId.of(projectId, datasetId)).build(); + QueryJobConfiguration getTimestampLimitsQuery = + QueryJobConfiguration.newBuilder( + getTimestampLimitQuery(projectId, datasetId, entityTableName)) + .setDefaultDataset(DatasetId.of(projectId, datasetId)) + .build(); try { - Job job = bigquery - .create(JobInfo.of(getTimestampLimitsQuery)); - TableResult getTimestampLimitsQueryResult = job - .waitFor() - .getQueryResults(); + Job job = bigquery.create(JobInfo.of(getTimestampLimitsQuery)); + TableResult getTimestampLimitsQueryResult = job.waitFor().getQueryResults(); FieldValueList result = null; for (FieldValueList fields : getTimestampLimitsQueryResult.getValues()) { result = fields; @@ -268,9 +262,7 @@ private FieldValueList getTimestampLimits(String entityTableName) { } } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public GetJobResponse getJob(GetJobRequest getJobRequest) { Optional job = jobService.get(getJobRequest.getJob().getId()); @@ -295,12 +287,11 @@ private Table loadEntities(DatasetSource datasetSource) { .withDescription("Invalid file format, only avro supported") .asRuntimeException(); } - LoadJobConfiguration loadJobConfiguration = LoadJobConfiguration.of(tableId, - datasetSource.getFileSource().getFileUrisList(), - FormatOptions.avro()); - loadJobConfiguration = loadJobConfiguration.toBuilder() - .setUseAvroLogicalTypes(true) - .build(); + LoadJobConfiguration loadJobConfiguration = + LoadJobConfiguration.of( + tableId, datasetSource.getFileSource().getFileUrisList(), FormatOptions.avro()); + loadJobConfiguration = + loadJobConfiguration.toBuilder().setUseAvroLogicalTypes(true).build(); Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); job.waitFor(); Table entityTable = bigquery.getTable(tableId); diff --git a/serving/src/main/java/feast/serving/service/CachedSpecService.java b/serving/src/main/java/feast/serving/service/CachedSpecService.java index dd1be0bf7d..65dbe3ab11 100644 --- a/serving/src/main/java/feast/serving/service/CachedSpecService.java +++ b/serving/src/main/java/feast/serving/service/CachedSpecService.java @@ -26,9 +26,7 @@ import java.util.concurrent.ExecutionException; import lombok.extern.slf4j.Slf4j; -/** - * In-memory cache of specs. - */ +/** In-memory cache of specs. */ @Slf4j public class CachedSpecService { @@ -41,14 +39,18 @@ public class CachedSpecService { private final LoadingCache featureSetSpecCache; private Store store; - private static Gauge featureSetsCount = Gauge.build().name("feature_set_count") - .subsystem("feast_serving") - .help("number of feature sets served by this instance") - .register(); - private static Gauge cacheLastUpdated = Gauge.build().name("cache_last_updated") - .subsystem("feast_serving") - .help("epoch time of the last time the cache was updated") - .register(); + private static Gauge featureSetsCount = + Gauge.build() + .name("feature_set_count") + .subsystem("feast_serving") + .help("number of feature sets served by this instance") + .register(); + private static Gauge cacheLastUpdated = + Gauge.build() + .name("cache_last_updated") + .subsystem("feast_serving") + .help("epoch time of the last time the cache was updated") + .register(); public CachedSpecService(CoreSpecService coreService, Path configPath) { this.configPath = configPath; @@ -56,9 +58,7 @@ public CachedSpecService(CoreSpecService coreService, Path configPath) { this.store = updateStore(readConfig(configPath)); Map featureSetSpecs = getFeatureSetSpecMap(); - featureSetSpecCacheLoader = - CacheLoader.from( - (String key) -> featureSetSpecs.get(key)); + featureSetSpecCacheLoader = CacheLoader.from((String key) -> featureSetSpecs.get(key)); featureSetSpecCache = CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(featureSetSpecCacheLoader); } @@ -85,11 +85,13 @@ public FeatureSetSpec getFeatureSet(String name, int version) { return featureSetSpecCache.get(id); } catch (InvalidCacheLoadException e) { // if not found, try to retrieve from core - ListFeatureSetsRequest request = ListFeatureSetsRequest.newBuilder() - .setFilter(Filter.newBuilder() - .setFeatureSetName(name) - .setFeatureSetVersion(String.valueOf(version))) - .build(); + ListFeatureSetsRequest request = + ListFeatureSetsRequest.newBuilder() + .setFilter( + Filter.newBuilder() + .setFeatureSetName(name) + .setFeatureSetVersion(String.valueOf(version))) + .build(); ListFeatureSetsResponse featureSets = coreService.listFeatureSets(request); if (featureSets.getFeatureSetsList().size() == 0) { throw new SpecRetrievalException( @@ -130,18 +132,19 @@ private Map getFeatureSetSpecMap() { for (Subscription subscription : this.store.getSubscriptionsList()) { try { - ListFeatureSetsResponse featureSetsResponse = coreService - .listFeatureSets(ListFeatureSetsRequest.newBuilder() - .setFilter( - ListFeatureSetsRequest.Filter.newBuilder() - .setFeatureSetName(subscription.getName()) - .setFeatureSetVersion(subscription.getVersion()) - ).build()); + ListFeatureSetsResponse featureSetsResponse = + coreService.listFeatureSets( + ListFeatureSetsRequest.newBuilder() + .setFilter( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion())) + .build()); for (FeatureSetSpec featureSetSpec : featureSetsResponse.getFeatureSetsList()) { - featureSetSpecs - .put(String.format("%s:%s", featureSetSpec.getName(), featureSetSpec.getVersion()), - featureSetSpec); + featureSetSpecs.put( + String.format("%s:%s", featureSetSpec.getName(), featureSetSpec.getVersion()), + featureSetSpec); } } catch (StatusRuntimeException e) { throw new RuntimeException( diff --git a/serving/src/main/java/feast/serving/service/CoreSpecService.java b/serving/src/main/java/feast/serving/service/CoreSpecService.java index f37ae61963..f9f37f48e7 100644 --- a/serving/src/main/java/feast/serving/service/CoreSpecService.java +++ b/serving/src/main/java/feast/serving/service/CoreSpecService.java @@ -9,9 +9,7 @@ import io.grpc.ManagedChannelBuilder; import lombok.extern.slf4j.Slf4j; -/** - * Client for spec retrieval from core. - */ +/** Client for spec retrieval from core. */ @Slf4j public class CoreSpecService { private final CoreServiceGrpc.CoreServiceBlockingStub blockingStub; diff --git a/serving/src/main/java/feast/serving/service/NoopJobService.java b/serving/src/main/java/feast/serving/service/NoopJobService.java index 8cecb54f09..28601671aa 100644 --- a/serving/src/main/java/feast/serving/service/NoopJobService.java +++ b/serving/src/main/java/feast/serving/service/NoopJobService.java @@ -12,7 +12,5 @@ public Optional get(String id) { } @Override - public void upsert(Job job) { - - } + public void upsert(Job job) {} } diff --git a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java index d50541f4fa..79e52ecd9e 100644 --- a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java +++ b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java @@ -1,6 +1,5 @@ package feast.serving.service; -import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import feast.serving.ServingAPIProto.Job; import feast.serving.ServingAPIProto.Job.Builder; diff --git a/serving/src/main/java/feast/serving/service/RedisServingService.java b/serving/src/main/java/feast/serving/service/RedisServingService.java index 118b1367bd..b9368bb297 100644 --- a/serving/src/main/java/feast/serving/service/RedisServingService.java +++ b/serving/src/main/java/feast/serving/service/RedisServingService.java @@ -17,8 +17,8 @@ package feast.serving.service; import static feast.serving.util.Metrics.missingKeyCount; -import static feast.serving.util.Metrics.requestLatency; import static feast.serving.util.Metrics.requestCount; +import static feast.serving.util.Metrics.requestLatency; import static feast.serving.util.Metrics.staleKeyCount; import com.google.common.collect.Maps; @@ -28,6 +28,7 @@ import feast.core.FeatureSetProto.EntitySpec; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.serving.ServingAPIProto.FeastServingType; +import feast.serving.ServingAPIProto.FeatureSetRequest; import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; @@ -36,7 +37,6 @@ import feast.serving.ServingAPIProto.GetJobResponse; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.FeatureSetRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; import feast.storage.RedisProto.RedisKey; @@ -67,9 +67,7 @@ public RedisServingService(JedisPool jedisPool, CachedSpecService specService, T this.tracer = tracer; } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public GetFeastServingInfoResponse getFeastServingInfo( GetFeastServingInfoRequest getFeastServingInfoRequest) { @@ -78,9 +76,7 @@ public GetFeastServingInfoResponse getFeastServingInfo( .build(); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest request) { try (Scope scope = tracer.buildSpan("Redis-getOnlineFeatures").startActive(true)) { @@ -180,8 +176,9 @@ private RedisKey makeRedisKey( if (!fieldsMap.containsKey(entityName)) { throw Status.INVALID_ARGUMENT - .withDescription(String - .format("Entity row fields \"%s\" does not contain required entity field \"%s\"", + .withDescription( + String.format( + "Entity row fields \"%s\" does not contain required entity field \"%s\"", fieldsMap.keySet().toString(), entityName)) .asRuntimeException(); } @@ -200,8 +197,7 @@ private void sendAndProcessMultiGet( throws InvalidProtocolBufferException { List jedisResps = sendMultiGet(redisKeys); - Timer processResponseTimer = requestLatency.labels("processResponse") - .startTimer(); + Timer processResponseTimer = requestLatency.labels("processResponse").startTimer(); try (Scope scope = tracer.buildSpan("Redis-processResponse").startActive(true)) { String featureSetId = String.format("%s:%d", featureSetRequest.getName(), featureSetRequest.getVersion()); diff --git a/serving/src/main/java/feast/serving/util/BigQueryUtil.java b/serving/src/main/java/feast/serving/util/BigQueryUtil.java index 390bea19b5..27b3105ac9 100644 --- a/serving/src/main/java/feast/serving/util/BigQueryUtil.java +++ b/serving/src/main/java/feast/serving/util/BigQueryUtil.java @@ -31,8 +31,8 @@ public static class FeatureSetInfo { List features; } - public static String getTimestampLimitQuery(String projectId, String datasetId, - String leftTableName) { + public static String getTimestampLimitQuery( + String projectId, String datasetId, String leftTableName) { return String.format( "SELECT DATETIME(MAX(event_timestamp)) as max, DATETIME(MIN(event_timestamp)) as min FROM `%s.%s.%s`", projectId, datasetId, leftTableName); @@ -46,11 +46,10 @@ public static String createQuery( String bigqueryDataset, String leftTableName, String minTimestamp, - String maxTimestamp) throws IOException { + String maxTimestamp) + throws IOException { - if (featureSets == null - || featureSetSpecs == null - || bigqueryDataset.isEmpty()) { + if (featureSets == null || featureSetSpecs == null || bigqueryDataset.isEmpty()) { return ""; } @@ -63,12 +62,26 @@ public static String createQuery( FeatureSetSpec spec = featureSetSpecs.get(i); FeatureSetRequest request = featureSets.get(i); Duration maxAge = getMaxAge(request, spec); - List fsEntities = spec.getEntitiesList().stream().map(EntitySpec::getName) - .collect(Collectors.toList()); + List fsEntities = + spec.getEntitiesList().stream().map(EntitySpec::getName).collect(Collectors.toList()); String id = String.format("%s:%s", spec.getName(), spec.getVersion()); - featureSetInfos.add(new FeatureSetInfo(id, spec.getName(), spec.getVersion(), maxAge.getSeconds(), fsEntities, request.getFeatureNamesList())); + featureSetInfos.add( + new FeatureSetInfo( + id, + spec.getName(), + spec.getVersion(), + maxAge.getSeconds(), + fsEntities, + request.getFeatureNamesList())); } - return createQueryForFeatureSets(featureSetInfos, entities, projectId, bigqueryDataset, leftTableName, minTimestamp, maxTimestamp); + return createQueryForFeatureSets( + featureSetInfos, + entities, + projectId, + bigqueryDataset, + leftTableName, + minTimestamp, + maxTimestamp); } public static String createQueryForFeatureSets( @@ -78,7 +91,8 @@ public static String createQueryForFeatureSets( String datasetId, String leftTableName, String minTimestamp, - String maxTimestamp) throws IOException { + String maxTimestamp) + throws IOException { PebbleTemplate template = engine.getTemplate(FEATURESET_TEMPLATE_NAME); Map context = new HashMap<>(); diff --git a/serving/src/main/java/feast/serving/util/Metrics.java b/serving/src/main/java/feast/serving/util/Metrics.java index a4463cf689..f5102ea4c0 100644 --- a/serving/src/main/java/feast/serving/util/Metrics.java +++ b/serving/src/main/java/feast/serving/util/Metrics.java @@ -2,36 +2,39 @@ import io.prometheus.client.Counter; import io.prometheus.client.Histogram; -import io.prometheus.client.Summary; public class Metrics { - public static final Histogram requestLatency = Histogram.build() - .buckets(2, 4, 6, 8, 10, 15, 20, 25, 30, 35, 50) - .name("request_latency_ms") - .subsystem("feast_serving") - .help("Request latency in milliseconds.") - .labelNames("method") - .register(); + public static final Histogram requestLatency = + Histogram.build() + .buckets(2, 4, 6, 8, 10, 15, 20, 25, 30, 35, 50) + .name("request_latency_ms") + .subsystem("feast_serving") + .help("Request latency in milliseconds.") + .labelNames("method") + .register(); - public static final Counter requestCount = Counter.build() - .name("request_feature_count") - .subsystem("feast_serving") - .help("number of feature rows requested") - .labelNames("feature_set_name") - .register(); + public static final Counter requestCount = + Counter.build() + .name("request_feature_count") + .subsystem("feast_serving") + .help("number of feature rows requested") + .labelNames("feature_set_name") + .register(); - public static final Counter missingKeyCount = Counter.build() - .name("missing_feature_count") - .subsystem("feast_serving") - .help("number requested feature rows that were not found") - .labelNames("feature_set_name") - .register(); + public static final Counter missingKeyCount = + Counter.build() + .name("missing_feature_count") + .subsystem("feast_serving") + .help("number requested feature rows that were not found") + .labelNames("feature_set_name") + .register(); - public static final Counter staleKeyCount = Counter.build() - .name("stale_feature_count") - .subsystem("feast_serving") - .help("number requested feature rows that were stale") - .labelNames("feature_set_name") - .register(); + public static final Counter staleKeyCount = + Counter.build() + .name("stale_feature_count") + .subsystem("feast_serving") + .help("number requested feature rows that were stale") + .labelNames("feature_set_name") + .register(); } diff --git a/serving/src/main/java/feast/serving/util/RequestHelper.java b/serving/src/main/java/feast/serving/util/RequestHelper.java index 445e781eda..a6945d2dba 100644 --- a/serving/src/main/java/feast/serving/util/RequestHelper.java +++ b/serving/src/main/java/feast/serving/util/RequestHelper.java @@ -1,6 +1,5 @@ package feast.serving.util; -import feast.serving.ServingAPIProto.DataFormat; import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import io.grpc.Status; @@ -10,14 +9,16 @@ public class RequestHelper { public static void validateOnlineRequest(GetOnlineFeaturesRequest request) { // EntityDataSetRow shall not be empty if (request.getEntityRowsCount() <= 0) { - throw Status.INVALID_ARGUMENT.withDescription("Entity value must be provided") + throw Status.INVALID_ARGUMENT + .withDescription("Entity value must be provided") .asRuntimeException(); } } public static void validateBatchRequest(GetBatchFeaturesRequest getFeaturesRequest) { if (!getFeaturesRequest.hasDatasetSource()) { - throw Status.INVALID_ARGUMENT.withDescription("Dataset source must be provided") + throw Status.INVALID_ARGUMENT + .withDescription("Dataset source must be provided") .asRuntimeException(); } diff --git a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java index 550601a687..b83d66e91c 100644 --- a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java +++ b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java @@ -10,7 +10,8 @@ // ResponseJSONMapper maps GRPC Response types to more human readable JSON responses public class ResponseJSONMapper { - public static List> mapGetOnlineFeaturesResponse(GetOnlineFeaturesResponse response) { + public static List> mapGetOnlineFeaturesResponse( + GetOnlineFeaturesResponse response) { return response.getFieldValuesList().stream() .map(fieldValue -> convertFieldValuesToMap(fieldValue)) .collect(Collectors.toList()); @@ -55,5 +56,4 @@ private static Object extractValue(Value value) { return null; } } - } diff --git a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java index 329dc21e07..d464efa429 100644 --- a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java +++ b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java @@ -5,9 +5,9 @@ import com.google.common.collect.Lists; import com.google.protobuf.Timestamp; import feast.serving.FeastProperties; +import feast.serving.ServingAPIProto.FeatureSetRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.FeatureSetRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; import feast.serving.service.ServingService; import feast.types.ValueProto.Value; @@ -22,11 +22,9 @@ public class ServingServiceGRpcControllerTest { - @Mock - private ServingService mockServingService; + @Mock private ServingService mockServingService; - @Mock - private StreamObserver mockStreamObserver; + @Mock private StreamObserver mockStreamObserver; private GetOnlineFeaturesRequest validRequest; @@ -36,17 +34,20 @@ public class ServingServiceGRpcControllerTest { public void setUp() { initMocks(this); - validRequest = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", Value.newBuilder().setInt64Val(1).build()) - .putFields("entity2", Value.newBuilder().setInt64Val(1).build())) - .build(); + validRequest = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", Value.newBuilder().setInt64Val(1).build()) + .putFields("entity2", Value.newBuilder().setInt64Val(1).build())) + .build(); Tracer tracer = Configuration.fromEnv("dummy").getTracer(); FeastProperties feastProperties = new FeastProperties(); @@ -66,4 +67,4 @@ public void shouldCallOnErrorIfEntityDatasetIsNotSet() { service.getOnlineFeatures(missingEntityName, mockStreamObserver); Mockito.verify(mockStreamObserver).onError(Mockito.any(StatusRuntimeException.class)); } -} \ No newline at end of file +} diff --git a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java index 2b422b609c..31dc88fab7 100644 --- a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java +++ b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java @@ -34,11 +34,9 @@ public class CachedSpecServiceTest { private File configFile; private Store store; - @Rule - public final ExpectedException expectedException = ExpectedException.none(); + @Rule public final ExpectedException expectedException = ExpectedException.none(); - @Mock - CoreSpecService coreService; + @Mock CoreSpecService coreService; private Map featureSetSpecs; private CachedSpecService cachedSpecService; @@ -47,27 +45,30 @@ public class CachedSpecServiceTest { public void setUp() throws IOException { initMocks(this); - configFile = File.createTempFile( "serving", ".yml"); - String yamlString = "name: SERVING\n" - + "type: REDIS\n" - + "redis_config:\n" - + " host: localhost\n" - + " port: 6379\n" - + "subscriptions:\n" - + "- name: fs1\n" - + " version: \">0\"\n" - + "- name: fs2\n" - + " version: \">0\""; + configFile = File.createTempFile("serving", ".yml"); + String yamlString = + "name: SERVING\n" + + "type: REDIS\n" + + "redis_config:\n" + + " host: localhost\n" + + " port: 6379\n" + + "subscriptions:\n" + + "- name: fs1\n" + + " version: \">0\"\n" + + "- name: fs2\n" + + " version: \">0\""; BufferedWriter writer = new BufferedWriter(new FileWriter(configFile)); writer.write(yamlString); writer.close(); - store = Store.newBuilder().setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .addSubscriptions(Subscription.newBuilder().setName("fs1").setVersion(">0").build()) - .addSubscriptions(Subscription.newBuilder().setName("fs2").setVersion(">0").build()) - .build(); + store = + Store.newBuilder() + .setName("SERVING") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .addSubscriptions(Subscription.newBuilder().setName("fs1").setVersion(">0").build()) + .addSubscriptions(Subscription.newBuilder().setName("fs2").setVersion(">0").build()) + .build(); when(coreService.updateStore(UpdateStoreRequest.newBuilder().setStore(store).build())) .thenReturn(UpdateStoreResponse.newBuilder().setStore(store).build()); @@ -77,20 +78,26 @@ public void setUp() throws IOException { featureSetSpecs.put("fs1:2", FeatureSetSpec.newBuilder().setName("fs1").setVersion(2).build()); featureSetSpecs.put("fs2:1", FeatureSetSpec.newBuilder().setName("fs2").setVersion(1).build()); - List fs1FeatureSets = Lists - .newArrayList(featureSetSpecs.get("fs1:1"), featureSetSpecs.get("fs1:2")); + List fs1FeatureSets = + Lists.newArrayList(featureSetSpecs.get("fs1:1"), featureSetSpecs.get("fs1:2")); List fs2FeatureSets = Lists.newArrayList(featureSetSpecs.get("fs2:1")); - when(coreService.listFeatureSets(ListFeatureSetsRequest - .newBuilder() - .setFilter(ListFeatureSetsRequest.Filter.newBuilder().setFeatureSetName("fs1") - .setFeatureSetVersion(">0").build()) - .build())) + when(coreService.listFeatureSets( + ListFeatureSetsRequest.newBuilder() + .setFilter( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName("fs1") + .setFeatureSetVersion(">0") + .build()) + .build())) .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs1FeatureSets).build()); - when(coreService.listFeatureSets(ListFeatureSetsRequest - .newBuilder() - .setFilter(ListFeatureSetsRequest.Filter.newBuilder().setFeatureSetName("fs2") - .setFeatureSetVersion(">0").build()) - .build())) + when(coreService.listFeatureSets( + ListFeatureSetsRequest.newBuilder() + .setFilter( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName("fs2") + .setFeatureSetVersion(">0") + .build()) + .build())) .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs2FeatureSets).build()); cachedSpecService = new CachedSpecService(coreService, configFile.toPath()); diff --git a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java index 84c3f96035..4575c49153 100644 --- a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java +++ b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java @@ -35,17 +35,13 @@ public class RedisServingServiceTest { - @Mock - JedisPool jedisPool; + @Mock JedisPool jedisPool; - @Mock - Jedis jedis; + @Mock Jedis jedis; - @Mock - CachedSpecService specService; + @Mock CachedSpecService specService; - @Mock - Tracer tracer; + @Mock Tracer tracer; private RedisServingService redisServingService; private byte[][] redisKeyList; @@ -55,194 +51,217 @@ public void setUp() { initMocks(this); redisServingService = new RedisServingService(jedisPool, specService, tracer); - redisKeyList = Lists.newArrayList( - RedisKey.newBuilder().setFeatureSet("featureSet:1") - .addAllEntities(Lists.newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build() - )).build(), - RedisKey.newBuilder().setFeatureSet("featureSet:1") - .addAllEntities(Lists.newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build() - )).build() - ).stream() - .map(AbstractMessageLite::toByteArray) - .collect(Collectors.toList()) - .toArray(new byte[0][0]); + redisKeyList = + Lists.newArrayList( + RedisKey.newBuilder() + .setFeatureSet("featureSet:1") + .addAllEntities( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build())) + .build(), + RedisKey.newBuilder() + .setFeatureSet("featureSet:1") + .addAllEntities( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build())) + .build()) + .stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()) + .toArray(new byte[0][0]); } @Test public void shouldReturnResponseWithValuesIfKeysPresent() { - GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") - .build() - ); - - List featureRowBytes = featureRows.stream() - .map(AbstractMessageLite::toByteArray) - .collect(Collectors.toList()); + GetOnlineFeaturesRequest request = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build()); + + List featureRowBytes = + featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", intValue(2)) - .putFields("featureSet:1:feature2", intValue(2))) - .build(); + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", intValue(2)) + .putFields("featureSet:1:feature2", intValue(2))) + .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); - assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + assertThat( + responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); } @Test public void shouldReturnResponseWithValuesWhenFeatureSetSpecHasUnspecifiedMaxAge() { - GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) // much older timestamp - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(15)) // much older timestamp - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") - .build() - ); - - List featureRowBytes = featureRows.stream() - .map(AbstractMessageLite::toByteArray) - .collect(Collectors.toList()); + GetOnlineFeaturesRequest request = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(2)) // much older timestamp + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(15)) // much older timestamp + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build()); + + List featureRowBytes = + featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpecWithNoMaxAge()); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", intValue(2)) - .putFields("featureSet:1:feature2", intValue(2))) - .build(); + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", intValue(2)) + .putFields("featureSet:1:feature2", intValue(2))) + .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); - assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + assertThat( + responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); } @Test public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { // some keys not present, should have empty values - GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder()) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").build(), - Field.newBuilder().setName("feature2").build())) - .setFeatureSet("featureSet:1") - .build() - ); + GetOnlineFeaturesRequest request = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder()) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").build(), + Field.newBuilder().setName("feature2").build())) + .setFeatureSet("featureSet:1") + .build()); List featureRowBytes = Lists.newArrayList(featureRows.get(0).toByteArray(), null); when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); @@ -250,223 +269,250 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", Value.newBuilder().build()) - .putFields("featureSet:1:feature2", Value.newBuilder().build())) - .build(); + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", Value.newBuilder().build()) + .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); - assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + assertThat( + responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); } @Test public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { // keys present, but too stale comp. to maxAge set in request - GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .setMaxAge(Duration.newBuilder().setSeconds(10)) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(50)) // this value should be nulled - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") - .build() - ); - - List featureRowBytes = featureRows.stream() - .map(AbstractMessageLite::toByteArray) - .collect(Collectors.toList()); + GetOnlineFeaturesRequest request = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .setMaxAge(Duration.newBuilder().setSeconds(10)) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp( + Timestamp.newBuilder().setSeconds(50)) // this value should be nulled + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build()); + + List featureRowBytes = + featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", Value.newBuilder().build()) - .putFields("featureSet:1:feature2", Value.newBuilder().build())) - .build(); + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", Value.newBuilder().build()) + .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); - assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + assertThat( + responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); } - @Test public void shouldReturnResponseWithUnsetValuesIfDefaultMaxAgeIsExceeded() { // keys present, but too stale comp. to maxAge set in featureSetSpec - GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(0)) // this value should be nulled - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") - .build() - ); - - List featureRowBytes = featureRows.stream() - .map(AbstractMessageLite::toByteArray) - .collect(Collectors.toList()); + GetOnlineFeaturesRequest request = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp( + Timestamp.newBuilder().setSeconds(0)) // this value should be nulled + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build()); + + List featureRowBytes = + featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", Value.newBuilder().build()) - .putFields("featureSet:1:feature2", Value.newBuilder().build())) - .build(); + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1)) + .putFields("featureSet:1:feature2", intValue(1))) + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", Value.newBuilder().build()) + .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); - assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + assertThat( + responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); } - @Test public void shouldFilterOutUndesiredRows() { // requested rows less than the rows available in the featureset - GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets(FeatureSetRequest.newBuilder() - .setName("featureSet") - .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1")) - .build()) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows(EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields(Lists - .newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") - .build() - ); - - List featureRowBytes = featureRows.stream() - .map(AbstractMessageLite::toByteArray) - .collect(Collectors.toList()); + GetOnlineFeaturesRequest request = + GetOnlineFeaturesRequest.newBuilder() + .addFeatureSets( + FeatureSetRequest.newBuilder() + .setName("featureSet") + .setVersion(1) + .addAllFeatureNames(Lists.newArrayList("feature1")) + .build()) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a"))) + .addEntityRows( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b"))) + .build(); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .setFeatureSet("featureSet:1") + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .setFeatureSet("featureSet:1") + .build()); + + List featureRowBytes = + featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - GetOnlineFeaturesResponse expected = GetOnlineFeaturesResponse.newBuilder() - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1))) - .addFieldValues(FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", intValue(2))) - .build(); + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .putFields("featureSet:1:feature1", intValue(1))) + .addFieldValues( + FieldValues.newBuilder() + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .putFields("featureSet:1:feature1", intValue(2))) + .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); - assertThat(responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); + assertThat( + responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); } private List> responseToMapList(GetOnlineFeaturesResponse response) { - return response.getFieldValuesList().stream().map(FieldValues::getFieldsMap).collect(Collectors.toList()); + return response.getFieldValuesList().stream() + .map(FieldValues::getFieldsMap) + .collect(Collectors.toList()); } private Value intValue(int val) { @@ -476,6 +522,7 @@ private Value intValue(int val) { private Value strValue(String val) { return Value.newBuilder().setStringVal(val).build(); } + private FeatureSetSpec getFeatureSetSpec() { return FeatureSetSpec.newBuilder() .addEntities(EntitySpec.newBuilder().setName("entity1")) @@ -491,5 +538,4 @@ private FeatureSetSpec getFeatureSetSpecWithNoMaxAge() { .setMaxAge(Duration.newBuilder().setSeconds(0).setNanos(0).build()) .build(); } - -} \ No newline at end of file +} diff --git a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java index 88d227d76a..0849ec98c8 100644 --- a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java +++ b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java @@ -14,23 +14,23 @@ public class YamlToProtoMapperTest { @Test public void shouldConvertYamlToProto() throws IOException { - String yaml = "name: test\n" - + "type: REDIS\n" - + "redis_config:\n" - + " host: localhost\n" - + " port: 6379\n" - + "subscriptions:\n" - + "- name: \"*\"\n" - + " version: \">0\"\n"; + String yaml = + "name: test\n" + + "type: REDIS\n" + + "redis_config:\n" + + " host: localhost\n" + + " port: 6379\n" + + "subscriptions:\n" + + "- name: \"*\"\n" + + " version: \">0\"\n"; Store store = YamlToProtoMapper.yamlToStoreProto(yaml); - Store expected = Store.newBuilder() - .setName("test") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .addSubscriptions(Subscription.newBuilder() - .setName("*") - .setVersion(">0")) - .build(); + Store expected = + Store.newBuilder() + .setName("test") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) + .addSubscriptions(Subscription.newBuilder().setName("*").setVersion(">0")) + .build(); assertThat(store, equalTo(expected)); } -} \ No newline at end of file +} From d7ea74a256268599d8899031000340f0d15a03c8 Mon Sep 17 00:00:00 2001 From: Ches Martin Date: Wed, 27 Nov 2019 14:19:50 +0700 Subject: [PATCH 3/3] Enable Apache license header in Spotless for .java --- .../main/java/feast/core/CoreApplication.java | 5 ++--- .../config/CoreGRpcServerBuilderConfig.java | 16 ++++++++++++++ .../feast/core/config/FeastProperties.java | 16 ++++++++++++++ .../core/config/FeatureStreamConfig.java | 16 ++++++++++++++ .../java/feast/core/config/JPAConfig.java | 5 ++--- .../java/feast/core/config/JobConfig.java | 5 ++--- .../java/feast/core/config/WebMvcConfig.java | 5 ++--- .../feast/core/dao/FeatureSetRepository.java | 16 ++++++++++++++ .../feast/core/dao/JobInfoRepository.java | 5 ++--- .../feast/core/dao/MetricsRepository.java | 5 ++--- .../java/feast/core/dao/StoreRepository.java | 16 ++++++++++++++ .../core/exception/JobExecutionException.java | 5 ++--- .../exception/JobMonitoringException.java | 5 ++--- .../core/exception/RegistrationException.java | 5 ++--- .../core/exception/RetrievalException.java | 5 ++--- .../core/exception/TopicExistsException.java | 5 ++--- .../java/feast/core/grpc/CoreServiceImpl.java | 5 ++--- .../feast/core/http/HealthController.java | 5 ++--- .../main/java/feast/core/job/JobManager.java | 5 ++--- .../main/java/feast/core/job/JobMonitor.java | 5 ++--- .../java/feast/core/job/NoopJobMonitor.java | 5 ++--- core/src/main/java/feast/core/job/Runner.java | 16 ++++++++++++++ .../feast/core/job/ScheduledJobMonitor.java | 5 ++--- .../core/job/dataflow/DataflowJobConfig.java | 5 ++--- .../core/job/dataflow/DataflowJobManager.java | 5 ++--- .../core/job/dataflow/DataflowJobMonitor.java | 5 ++--- .../core/job/dataflow/DataflowJobState.java | 5 ++--- .../job/dataflow/DataflowJobStateMapper.java | 5 ++--- .../core/job/dataflow/DataflowJobType.java | 5 ++--- .../java/feast/core/job/direct/DirectJob.java | 16 ++++++++++++++ .../core/job/direct/DirectJobRegistry.java | 16 ++++++++++++++ .../core/job/direct/DirectJobStateMapper.java | 16 ++++++++++++++ .../job/direct/DirectRunnerJobManager.java | 5 ++--- .../job/direct/DirectRunnerJobMonitor.java | 16 ++++++++++++++ core/src/main/java/feast/core/log/Action.java | 16 ++++++++++++++ .../main/java/feast/core/log/AuditLogger.java | 5 ++--- .../main/java/feast/core/log/Resource.java | 16 ++++++++++++++ .../core/model/AbstractTimestampEntity.java | 5 ++--- .../java/feast/core/model/FeatureSet.java | 16 ++++++++++++++ .../src/main/java/feast/core/model/Field.java | 16 ++++++++++++++ .../main/java/feast/core/model/JobInfo.java | 5 ++--- .../main/java/feast/core/model/JobStatus.java | 5 ++--- .../main/java/feast/core/model/Metrics.java | 5 ++--- .../main/java/feast/core/model/Source.java | 16 ++++++++++++++ .../src/main/java/feast/core/model/Store.java | 16 ++++++++++++++ .../core/service/JobCoordinatorService.java | 16 ++++++++++++++ .../feast/core/service/JobStatusService.java | 16 ++++++++++++++ .../java/feast/core/service/SpecService.java | 5 ++--- .../java/feast/core/util/PackageUtil.java | 16 ++++++++++++++ .../java/feast/core/util/PipelineUtil.java | 16 ++++++++++++++ .../java/feast/core/util/TypeConversion.java | 5 ++--- .../core/validators/FeatureSetValidator.java | 16 ++++++++++++++ .../java/feast/core/validators/Matchers.java | 5 ++--- .../java/feast/core/CoreApplicationTest.java | 16 ++++++++++++++ .../core/annotation/IntegrationTest.java | 5 ++--- .../feast/core/http/HealthControllerTest.java | 16 ++++++++++++++ .../core/job/ScheduledJobMonitorTest.java | 5 ++--- .../job/dataflow/DataflowJobManagerTest.java | 5 ++--- .../job/dataflow/DataflowJobMonitorTest.java | 5 ++--- .../dataflow/DataflowJobStateMapperTest.java | 5 ++--- .../direct/DirectRunnerJobManagerTest.java | 16 ++++++++++++++ .../service/JobCoordinatorServiceTest.java | 16 ++++++++++++++ .../feast/core/service/SpecServiceTest.java | 5 ++--- .../feast/core/util/TypeConversionTest.java | 5 ++--- .../feast/core/validators/MatchersTest.java | 5 ++--- .../main/java/feast/ingestion/ImportJob.java | 16 ++++++++++++++ .../coders/FailsafeFeatureRowCoder.java | 16 ++++++++++++++ .../ingestion/options/ImportOptions.java | 5 ++--- .../ingestion/transform/ReadFromSource.java | 16 ++++++++++++++ .../transform/ValidateFeatureRows.java | 16 ++++++++++++++ .../WriteFailedElementToBigQuery.java | 16 ++++++++++++++ .../ingestion/transform/WriteToStore.java | 16 ++++++++++++++ .../fn/KafkaRecordToFeatureRowDoFn.java | 16 ++++++++++++++ .../ingestion/transform/fn/LoggerDoFn.java | 5 ++--- .../transform/fn/ValidateFeatureRowDoFn.java | 16 ++++++++++++++ .../WriteDeadletterRowMetricsDoFn.java | 16 ++++++++++++++ .../metrics/WriteMetricsTransform.java | 16 ++++++++++++++ .../metrics/WriteRowMetricsDoFn.java | 16 ++++++++++++++ .../java/feast/ingestion/utils/DateUtil.java | 5 ++--- .../java/feast/ingestion/utils/JsonUtil.java | 7 +++--- .../feast/ingestion/utils/ResourceUtil.java | 16 ++++++++++++++ .../java/feast/ingestion/utils/SpecUtil.java | 16 ++++++++++++++ .../java/feast/ingestion/utils/StoreUtil.java | 16 ++++++++++++++ .../feast/ingestion/values/FailedElement.java | 16 ++++++++++++++ .../ingestion/values/FailsafeFeatureRow.java | 16 ++++++++++++++ .../ingestion/values/FeatureSetSpec.java | 16 ++++++++++++++ .../java/feast/ingestion/values/Field.java | 16 ++++++++++++++ .../bigquery/FeatureRowToTableRow.java | 16 ++++++++++++++ .../redis/FeatureRowToRedisMutationDoFn.java | 5 ++--- .../store/serving/redis/RedisCustomIO.java | 6 ++--- .../src/test/java/feast/FeastMatchers.java | 5 ++--- .../test/java/feast/ToOrderedFeatureRows.java | 5 ++--- .../java/feast/ingestion/ImportJobTest.java | 16 ++++++++++++++ .../transform/ValidateFeatureRowsTest.java | 16 ++++++++++++++ .../feast/ingestion/util/DateUtilTest.java | 5 ++--- .../feast/ingestion/util/JsonUtilTest.java | 5 ++--- .../feast/ingestion/util/StoreUtilTest.java | 16 ++++++++++++++ .../serving/redis/RedisCustomIOTest.java | 16 ++++++++++++++ .../src/test/java/feast/test/TestUtil.java | 16 ++++++++++++++ pom.xml | 22 +++++++++++++++++++ .../com/gojek/feast/v1alpha1/FeastClient.java | 16 ++++++++++++++ .../com/gojek/feast/v1alpha1/RequestUtil.java | 16 ++++++++++++++ .../java/com/gojek/feast/v1alpha1/Row.java | 16 ++++++++++++++ .../gojek/feast/v1alpha1/RequestUtilTest.java | 16 ++++++++++++++ .../java/feast/serving/FeastProperties.java | 16 ++++++++++++++ .../feast/serving/ServingApplication.java | 5 ++--- .../configuration/ContextClosedHandler.java | 16 ++++++++++++++ .../configuration/InstrumentationConfig.java | 16 ++++++++++++++ .../configuration/JobServiceConfig.java | 16 ++++++++++++++ .../ServingApiConfiguration.java | 16 ++++++++++++++ .../configuration/ServingServiceConfig.java | 16 ++++++++++++++ .../configuration/SpecServiceConfig.java | 16 ++++++++++++++ .../controller/HealthServiceController.java | 16 ++++++++++++++ .../ServingServiceGRpcController.java | 16 ++++++++++++++ .../ServingServiceRestController.java | 16 ++++++++++++++ .../exception/SpecRetrievalException.java | 5 ++--- .../service/BigQueryServingService.java | 16 ++++++++++++++ .../serving/service/CachedSpecService.java | 16 ++++++++++++++ .../serving/service/CoreSpecService.java | 16 ++++++++++++++ .../feast/serving/service/JobService.java | 16 ++++++++++++++ .../feast/serving/service/NoopJobService.java | 16 ++++++++++++++ .../service/RedisBackedJobService.java | 16 ++++++++++++++ .../serving/service/RedisServingService.java | 4 ++-- .../feast/serving/service/ServingService.java | 16 ++++++++++++++ .../java/feast/serving/util/BigQueryUtil.java | 16 ++++++++++++++ .../main/java/feast/serving/util/Metrics.java | 16 ++++++++++++++ .../feast/serving/util/RequestHelper.java | 16 ++++++++++++++ .../util/mappers/ResponseJSONMapper.java | 16 ++++++++++++++ .../util/mappers/YamlToProtoMapper.java | 16 ++++++++++++++ .../ServingServiceGRpcControllerTest.java | 16 ++++++++++++++ .../service/CachedSpecServiceTest.java | 16 ++++++++++++++ .../service/RedisServingServiceTest.java | 16 ++++++++++++++ .../util/mappers/YamlToProtoMapperTest.java | 16 ++++++++++++++ 133 files changed, 1393 insertions(+), 160 deletions(-) diff --git a/core/src/main/java/feast/core/CoreApplication.java b/core/src/main/java/feast/core/CoreApplication.java index c28887d59e..957fdf5015 100644 --- a/core/src/main/java/feast/core/CoreApplication.java +++ b/core/src/main/java/feast/core/CoreApplication.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core; import feast.core.config.FeastProperties; diff --git a/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java b/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java index 84912d0a74..e025c7b297 100644 --- a/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java +++ b/core/src/main/java/feast/core/config/CoreGRpcServerBuilderConfig.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.config; import io.grpc.ServerBuilder; diff --git a/core/src/main/java/feast/core/config/FeastProperties.java b/core/src/main/java/feast/core/config/FeastProperties.java index 681157a683..e57a594305 100644 --- a/core/src/main/java/feast/core/config/FeastProperties.java +++ b/core/src/main/java/feast/core/config/FeastProperties.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.config; import java.util.Map; diff --git a/core/src/main/java/feast/core/config/FeatureStreamConfig.java b/core/src/main/java/feast/core/config/FeatureStreamConfig.java index 10036b2e15..ca8240d780 100644 --- a/core/src/main/java/feast/core/config/FeatureStreamConfig.java +++ b/core/src/main/java/feast/core/config/FeatureStreamConfig.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.config; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/config/JPAConfig.java b/core/src/main/java/feast/core/config/JPAConfig.java index 6b8b5e5e22..dada6c9d3a 100644 --- a/core/src/main/java/feast/core/config/JPAConfig.java +++ b/core/src/main/java/feast/core/config/JPAConfig.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.config; import javax.persistence.EntityManagerFactory; diff --git a/core/src/main/java/feast/core/config/JobConfig.java b/core/src/main/java/feast/core/config/JobConfig.java index 4342352bae..c47bb78400 100644 --- a/core/src/main/java/feast/core/config/JobConfig.java +++ b/core/src/main/java/feast/core/config/JobConfig.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.config; import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; diff --git a/core/src/main/java/feast/core/config/WebMvcConfig.java b/core/src/main/java/feast/core/config/WebMvcConfig.java index cbbaf12850..5d6a6b8ece 100644 --- a/core/src/main/java/feast/core/config/WebMvcConfig.java +++ b/core/src/main/java/feast/core/config/WebMvcConfig.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.config; import java.util.List; diff --git a/core/src/main/java/feast/core/dao/FeatureSetRepository.java b/core/src/main/java/feast/core/dao/FeatureSetRepository.java index 11a1c308bd..e8d37424da 100644 --- a/core/src/main/java/feast/core/dao/FeatureSetRepository.java +++ b/core/src/main/java/feast/core/dao/FeatureSetRepository.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.dao; import feast.core.model.FeatureSet; diff --git a/core/src/main/java/feast/core/dao/JobInfoRepository.java b/core/src/main/java/feast/core/dao/JobInfoRepository.java index 949e900636..6e5820eae7 100644 --- a/core/src/main/java/feast/core/dao/JobInfoRepository.java +++ b/core/src/main/java/feast/core/dao/JobInfoRepository.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.dao; import feast.core.model.JobInfo; diff --git a/core/src/main/java/feast/core/dao/MetricsRepository.java b/core/src/main/java/feast/core/dao/MetricsRepository.java index 24c6d05036..c7bc483697 100644 --- a/core/src/main/java/feast/core/dao/MetricsRepository.java +++ b/core/src/main/java/feast/core/dao/MetricsRepository.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.dao; import feast.core.model.Metrics; diff --git a/core/src/main/java/feast/core/dao/StoreRepository.java b/core/src/main/java/feast/core/dao/StoreRepository.java index 70560320da..a0015b9932 100644 --- a/core/src/main/java/feast/core/dao/StoreRepository.java +++ b/core/src/main/java/feast/core/dao/StoreRepository.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.dao; import feast.core.model.Store; diff --git a/core/src/main/java/feast/core/exception/JobExecutionException.java b/core/src/main/java/feast/core/exception/JobExecutionException.java index 75a635e324..85eb199ae4 100644 --- a/core/src/main/java/feast/core/exception/JobExecutionException.java +++ b/core/src/main/java/feast/core/exception/JobExecutionException.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.exception; /** Exception thrown when a request for job execution fails. */ diff --git a/core/src/main/java/feast/core/exception/JobMonitoringException.java b/core/src/main/java/feast/core/exception/JobMonitoringException.java index 705c8ebac2..380f68a778 100644 --- a/core/src/main/java/feast/core/exception/JobMonitoringException.java +++ b/core/src/main/java/feast/core/exception/JobMonitoringException.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.exception; /** Exception thrown when error happen during job monitoring. */ diff --git a/core/src/main/java/feast/core/exception/RegistrationException.java b/core/src/main/java/feast/core/exception/RegistrationException.java index e6861b8012..ccb4d55cae 100644 --- a/core/src/main/java/feast/core/exception/RegistrationException.java +++ b/core/src/main/java/feast/core/exception/RegistrationException.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.exception; /** Exception thrown when a spec is fails to be registered to the Feast metadata registry. */ diff --git a/core/src/main/java/feast/core/exception/RetrievalException.java b/core/src/main/java/feast/core/exception/RetrievalException.java index 82c9ba30ae..bd543048a9 100644 --- a/core/src/main/java/feast/core/exception/RetrievalException.java +++ b/core/src/main/java/feast/core/exception/RetrievalException.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.exception; /** Exception thrown when retrieval of a spec from the registry fails. */ diff --git a/core/src/main/java/feast/core/exception/TopicExistsException.java b/core/src/main/java/feast/core/exception/TopicExistsException.java index 4416563651..abd4937c71 100644 --- a/core/src/main/java/feast/core/exception/TopicExistsException.java +++ b/core/src/main/java/feast/core/exception/TopicExistsException.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.exception; /** Exception thrown when creation of a topic in the stream fails because it already exists. */ diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java index 744c449307..5861742e00 100644 --- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java +++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.grpc; import com.google.common.collect.Lists; diff --git a/core/src/main/java/feast/core/http/HealthController.java b/core/src/main/java/feast/core/http/HealthController.java index 563c7bad42..2451ed793e 100644 --- a/core/src/main/java/feast/core/http/HealthController.java +++ b/core/src/main/java/feast/core/http/HealthController.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.http; import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR; diff --git a/core/src/main/java/feast/core/job/JobManager.java b/core/src/main/java/feast/core/job/JobManager.java index bed7f265b7..5147671c84 100644 --- a/core/src/main/java/feast/core/job/JobManager.java +++ b/core/src/main/java/feast/core/job/JobManager.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job; import feast.core.FeatureSetProto.FeatureSetSpec; diff --git a/core/src/main/java/feast/core/job/JobMonitor.java b/core/src/main/java/feast/core/job/JobMonitor.java index 8829241340..740f4bdb87 100644 --- a/core/src/main/java/feast/core/job/JobMonitor.java +++ b/core/src/main/java/feast/core/job/JobMonitor.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job; import feast.core.model.JobInfo; diff --git a/core/src/main/java/feast/core/job/NoopJobMonitor.java b/core/src/main/java/feast/core/job/NoopJobMonitor.java index e078730b03..c71010c242 100644 --- a/core/src/main/java/feast/core/job/NoopJobMonitor.java +++ b/core/src/main/java/feast/core/job/NoopJobMonitor.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job; import feast.core.model.JobInfo; diff --git a/core/src/main/java/feast/core/job/Runner.java b/core/src/main/java/feast/core/job/Runner.java index 98e008f7a9..637621be35 100644 --- a/core/src/main/java/feast/core/job/Runner.java +++ b/core/src/main/java/feast/core/job/Runner.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.job; public enum Runner { diff --git a/core/src/main/java/feast/core/job/ScheduledJobMonitor.java b/core/src/main/java/feast/core/job/ScheduledJobMonitor.java index 96b070b2b8..cc87d5fcf6 100644 --- a/core/src/main/java/feast/core/job/ScheduledJobMonitor.java +++ b/core/src/main/java/feast/core/job/ScheduledJobMonitor.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java index e3bd71a817..a9bbf345d1 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import lombok.Value; diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java index 28fc680e53..4e4533c4c9 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import static feast.core.util.PipelineUtil.detectClassPathResourcesToStage; diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java index 09feb61049..9394878548 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobMonitor.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import static com.google.common.base.Preconditions.checkNotNull; diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobState.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobState.java index 6a1a059ba6..a5da7fd979 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobState.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobState.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; public enum DataflowJobState { diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java index 59deb1cb95..c94c84ce8e 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobStateMapper.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import static feast.core.job.dataflow.DataflowJobState.JOB_STATE_CANCELLED; diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobType.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobType.java index fc3edfd42f..e96bbd2141 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobType.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobType.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; public enum DataflowJobType { diff --git a/core/src/main/java/feast/core/job/direct/DirectJob.java b/core/src/main/java/feast/core/job/direct/DirectJob.java index 3483045fd2..35c778f360 100644 --- a/core/src/main/java/feast/core/job/direct/DirectJob.java +++ b/core/src/main/java/feast/core/job/direct/DirectJob.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.job.direct; import java.io.IOException; diff --git a/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java b/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java index 70d6696967..f7ded9fec7 100644 --- a/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java +++ b/core/src/main/java/feast/core/job/direct/DirectJobRegistry.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.job.direct; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java b/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java index fa9a5f347b..e0e521e6a4 100644 --- a/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java +++ b/core/src/main/java/feast/core/job/direct/DirectJobStateMapper.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.job.direct; import feast.core.model.JobStatus; diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java index dff265a35b..a09fd39495 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.direct; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java index ace86d305b..50d02b2728 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobMonitor.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.job.direct; import feast.core.job.JobMonitor; diff --git a/core/src/main/java/feast/core/log/Action.java b/core/src/main/java/feast/core/log/Action.java index 7b0bd0ad30..3eb24c080a 100644 --- a/core/src/main/java/feast/core/log/Action.java +++ b/core/src/main/java/feast/core/log/Action.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.log; /** Actions taken for audit logging purposes */ diff --git a/core/src/main/java/feast/core/log/AuditLogger.java b/core/src/main/java/feast/core/log/AuditLogger.java index c65171a0f1..5349b5548b 100644 --- a/core/src/main/java/feast/core/log/AuditLogger.java +++ b/core/src/main/java/feast/core/log/AuditLogger.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.log; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/log/Resource.java b/core/src/main/java/feast/core/log/Resource.java index 1fce2fd265..d8e484b388 100644 --- a/core/src/main/java/feast/core/log/Resource.java +++ b/core/src/main/java/feast/core/log/Resource.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.log; /** Resources interacted with, for audit logging purposes */ diff --git a/core/src/main/java/feast/core/model/AbstractTimestampEntity.java b/core/src/main/java/feast/core/model/AbstractTimestampEntity.java index d77dd9d807..cacaa51adb 100644 --- a/core/src/main/java/feast/core/model/AbstractTimestampEntity.java +++ b/core/src/main/java/feast/core/model/AbstractTimestampEntity.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.model; import java.util.Date; diff --git a/core/src/main/java/feast/core/model/FeatureSet.java b/core/src/main/java/feast/core/model/FeatureSet.java index ff22160477..49607200fc 100644 --- a/core/src/main/java/feast/core/model/FeatureSet.java +++ b/core/src/main/java/feast/core/model/FeatureSet.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.model; import com.google.protobuf.Duration; diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java index 7f0feb3e55..22a44b2a5a 100644 --- a/core/src/main/java/feast/core/model/Field.java +++ b/core/src/main/java/feast/core/model/Field.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.model; import feast.types.ValueProto.ValueType; diff --git a/core/src/main/java/feast/core/model/JobInfo.java b/core/src/main/java/feast/core/model/JobInfo.java index b38c0c5cb7..74d3402af5 100644 --- a/core/src/main/java/feast/core/model/JobInfo.java +++ b/core/src/main/java/feast/core/model/JobInfo.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.model; import java.util.List; diff --git a/core/src/main/java/feast/core/model/JobStatus.java b/core/src/main/java/feast/core/model/JobStatus.java index 04a5d56c51..123b57a21b 100644 --- a/core/src/main/java/feast/core/model/JobStatus.java +++ b/core/src/main/java/feast/core/model/JobStatus.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.model; import java.util.Arrays; diff --git a/core/src/main/java/feast/core/model/Metrics.java b/core/src/main/java/feast/core/model/Metrics.java index 7ec770b988..1e25222baf 100644 --- a/core/src/main/java/feast/core/model/Metrics.java +++ b/core/src/main/java/feast/core/model/Metrics.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.model; import javax.persistence.Entity; diff --git a/core/src/main/java/feast/core/model/Source.java b/core/src/main/java/feast/core/model/Source.java index 6f817e3b56..6711aaf30f 100644 --- a/core/src/main/java/feast/core/model/Source.java +++ b/core/src/main/java/feast/core/model/Source.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.model; import com.google.common.collect.Sets; diff --git a/core/src/main/java/feast/core/model/Store.java b/core/src/main/java/feast/core/model/Store.java index 588da9dc3c..9bfc27db1f 100644 --- a/core/src/main/java/feast/core/model/Store.java +++ b/core/src/main/java/feast/core/model/Store.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.model; import com.google.protobuf.InvalidProtocolBufferException; diff --git a/core/src/main/java/feast/core/service/JobCoordinatorService.java b/core/src/main/java/feast/core/service/JobCoordinatorService.java index 1cd3515026..fd60062952 100644 --- a/core/src/main/java/feast/core/service/JobCoordinatorService.java +++ b/core/src/main/java/feast/core/service/JobCoordinatorService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.service; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/service/JobStatusService.java b/core/src/main/java/feast/core/service/JobStatusService.java index 0307afa0e1..db6cd41ee8 100644 --- a/core/src/main/java/feast/core/service/JobStatusService.java +++ b/core/src/main/java/feast/core/service/JobStatusService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.service; import lombok.extern.slf4j.Slf4j; diff --git a/core/src/main/java/feast/core/service/SpecService.java b/core/src/main/java/feast/core/service/SpecService.java index 6811ae8668..77052eafb6 100644 --- a/core/src/main/java/feast/core/service/SpecService.java +++ b/core/src/main/java/feast/core/service/SpecService.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.service; import static feast.core.validators.Matchers.checkValidCharacters; diff --git a/core/src/main/java/feast/core/util/PackageUtil.java b/core/src/main/java/feast/core/util/PackageUtil.java index eff4705aed..ef27332ac0 100644 --- a/core/src/main/java/feast/core/util/PackageUtil.java +++ b/core/src/main/java/feast/core/util/PackageUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.util; import java.io.File; diff --git a/core/src/main/java/feast/core/util/PipelineUtil.java b/core/src/main/java/feast/core/util/PipelineUtil.java index 959f417520..ef1b50d8c9 100644 --- a/core/src/main/java/feast/core/util/PipelineUtil.java +++ b/core/src/main/java/feast/core/util/PipelineUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.util; import static feast.core.util.PackageUtil.resolveSpringBootPackageClasspath; diff --git a/core/src/main/java/feast/core/util/TypeConversion.java b/core/src/main/java/feast/core/util/TypeConversion.java index 3a2b7bdddc..e01a551135 100644 --- a/core/src/main/java/feast/core/util/TypeConversion.java +++ b/core/src/main/java/feast/core/util/TypeConversion.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.util; import com.google.common.base.Strings; diff --git a/core/src/main/java/feast/core/validators/FeatureSetValidator.java b/core/src/main/java/feast/core/validators/FeatureSetValidator.java index 50924dcadd..e14fde72cb 100644 --- a/core/src/main/java/feast/core/validators/FeatureSetValidator.java +++ b/core/src/main/java/feast/core/validators/FeatureSetValidator.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.validators; import static feast.core.validators.Matchers.checkValidCharacters; diff --git a/core/src/main/java/feast/core/validators/Matchers.java b/core/src/main/java/feast/core/validators/Matchers.java index edd3554831..03bafafdbf 100644 --- a/core/src/main/java/feast/core/validators/Matchers.java +++ b/core/src/main/java/feast/core/validators/Matchers.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.validators; import java.util.regex.Pattern; diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index b91bfc91c4..59c4dfdaaa 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core; // // import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; diff --git a/core/src/test/java/feast/core/annotation/IntegrationTest.java b/core/src/test/java/feast/core/annotation/IntegrationTest.java index e816e9215f..f1617e32a6 100644 --- a/core/src/test/java/feast/core/annotation/IntegrationTest.java +++ b/core/src/test/java/feast/core/annotation/IntegrationTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.annotation; public interface IntegrationTest {} diff --git a/core/src/test/java/feast/core/http/HealthControllerTest.java b/core/src/test/java/feast/core/http/HealthControllerTest.java index 70e6e632e0..2fcd622f34 100644 --- a/core/src/test/java/feast/core/http/HealthControllerTest.java +++ b/core/src/test/java/feast/core/http/HealthControllerTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.http; import static org.junit.Assert.assertEquals; diff --git a/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java b/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java index b5c2bab89d..24d1747ce4 100644 --- a/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java +++ b/core/src/test/java/feast/core/job/ScheduledJobMonitorTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job; import static org.hamcrest.core.IsEqual.equalTo; diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java index 2eb4e53450..c2c47a8d03 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java index f68a8b916b..1311fcbdfc 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobMonitorTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java index 86729b4b4d..af9e8b2c42 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobStateMapperTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.job.dataflow; import org.junit.Test; diff --git a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java index a75565f2dc..f78060269c 100644 --- a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java +++ b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.job.direct; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java index 8091c148cd..9bc641f92b 100644 --- a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java +++ b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.core.service; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index 025cee9e2c..ef58506b1b 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.service; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/core/src/test/java/feast/core/util/TypeConversionTest.java b/core/src/test/java/feast/core/util/TypeConversionTest.java index 07b4f9c7b1..75548f3465 100644 --- a/core/src/test/java/feast/core/util/TypeConversionTest.java +++ b/core/src/test/java/feast/core/util/TypeConversionTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.util; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; diff --git a/core/src/test/java/feast/core/validators/MatchersTest.java b/core/src/test/java/feast/core/validators/MatchersTest.java index f167d67f4f..774e58c7a8 100644 --- a/core/src/test/java/feast/core/validators/MatchersTest.java +++ b/core/src/test/java/feast/core/validators/MatchersTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.core.validators; import static feast.core.validators.Matchers.checkLowerSnakeCase; diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 7ea5f589ec..fb719120d4 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion; import com.google.protobuf.InvalidProtocolBufferException; diff --git a/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java b/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java index 6c0e9a3055..6985941ad3 100644 --- a/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java +++ b/ingestion/src/main/java/feast/ingestion/coders/FailsafeFeatureRowCoder.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.coders; import feast.ingestion.values.FailsafeFeatureRow; diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java index e379dc7a4e..21df87e4b2 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.ingestion.options; import java.util.List; diff --git a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java index bbaed02011..65e95b287d 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java index f9261035ea..19b1f1f860 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java b/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java index e82e0c1e8c..cda590b21a 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteFailedElementToBigQuery.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform; import com.google.api.services.bigquery.model.TableRow; diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java index 806a5f8b57..e2f695871b 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform; import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java index f9975490d4..25aafd6ee7 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform.fn; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java index 7b318fd161..5bde9a24cb 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/LoggerDoFn.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.ingestion.transform.fn; import com.google.protobuf.Message; diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java index 777b721d3b..eeb5ce6732 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform.fn; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java index 382063236b..452bfb2377 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform.metrics; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java index 5a460df0ee..43f314aa86 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform.metrics; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index ba65b56b0f..19a2444ee1 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform.metrics; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/utils/DateUtil.java b/ingestion/src/main/java/feast/ingestion/utils/DateUtil.java index 07e15056d1..7a6ef42906 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/DateUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/DateUtil.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.ingestion.utils; import com.google.protobuf.Timestamp; diff --git a/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java b/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java index 6634d7e8bf..8fc1991b6a 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/JsonUtil.java @@ -1,20 +1,19 @@ /* - * Copyright 2019 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.ingestion.utils; import com.google.gson.Gson; diff --git a/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java b/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java index 9d735d2e65..92912c96a5 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/ResourceUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.utils; import com.google.common.io.Resources; diff --git a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java index e955f235db..132a2e93bf 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.utils; import com.google.protobuf.InvalidProtocolBufferException; diff --git a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java index ace686d9e5..d277972450 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.utils; import static feast.types.ValueProto.ValueType; diff --git a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java index ec1a6afcb1..a7fd162f35 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java +++ b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.values; import com.google.auto.value.AutoValue; diff --git a/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java b/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java index 7ca3a88f10..6035b8c762 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java +++ b/ingestion/src/main/java/feast/ingestion/values/FailsafeFeatureRow.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.values; import com.google.common.base.MoreObjects; diff --git a/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java b/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java index ef1e38f5ec..8c6e804a06 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java +++ b/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.values; import static feast.ingestion.utils.SpecUtil.getFieldsByName; diff --git a/ingestion/src/main/java/feast/ingestion/values/Field.java b/ingestion/src/main/java/feast/ingestion/values/Field.java index 3550879ba3..9b4f570d7e 100644 --- a/ingestion/src/main/java/feast/ingestion/values/Field.java +++ b/ingestion/src/main/java/feast/ingestion/values/Field.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.values; import feast.types.ValueProto.ValueType; diff --git a/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRow.java b/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRow.java index c685e4d95c..b89cf83291 100644 --- a/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRow.java +++ b/ingestion/src/main/java/feast/store/serving/bigquery/FeatureRowToTableRow.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.store.serving.bigquery; import com.google.api.services.bigquery.model.TableRow; diff --git a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java index d8f4f85dea..9bc503f987 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java +++ b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.store.serving.redis; import feast.core.FeatureSetProto.EntitySpec; diff --git a/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java b/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java index c575e03dbd..2e5d4c9452 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java +++ b/ingestion/src/main/java/feast/store/serving/redis/RedisCustomIO.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,10 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - -// package io.suryawirawan.henry.beam.redis.io; package feast.store.serving.redis; import org.apache.avro.reflect.Nullable; diff --git a/ingestion/src/test/java/feast/FeastMatchers.java b/ingestion/src/test/java/feast/FeastMatchers.java index 442c8f85ee..7c67afaf74 100644 --- a/ingestion/src/test/java/feast/FeastMatchers.java +++ b/ingestion/src/test/java/feast/FeastMatchers.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast; import com.google.common.collect.Lists; diff --git a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java index b691cf9d77..db552693f9 100644 --- a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java +++ b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast; import com.google.common.collect.Lists; diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java index 6a5940918c..b50d839d57 100644 --- a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java +++ b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion; import com.google.common.io.Files; diff --git a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java b/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java index 107821d2eb..7f2d717688 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.transform; import static org.junit.Assert.*; diff --git a/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java index ed0402a07c..71d4e67bea 100644 --- a/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/DateUtilTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.ingestion.util; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java index fa5e2bc029..02af4d819f 100644 --- a/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/JsonUtilTest.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.ingestion.util; import static org.hamcrest.Matchers.equalTo; diff --git a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java index d0ecd578e2..f031425989 100644 --- a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.ingestion.util; import static feast.types.ValueProto.ValueType.Enum.INT32; diff --git a/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java b/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java index 362dc2a299..a35e63386d 100644 --- a/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java +++ b/ingestion/src/test/java/feast/store/serving/redis/RedisCustomIOTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.store.serving.redis; import static feast.test.TestUtil.field; diff --git a/ingestion/src/test/java/feast/test/TestUtil.java b/ingestion/src/test/java/feast/test/TestUtil.java index 163624f2af..397d3c11ae 100644 --- a/ingestion/src/test/java/feast/test/TestUtil.java +++ b/ingestion/src/test/java/feast/test/TestUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.test; import com.google.protobuf.ByteString; diff --git a/pom.xml b/pom.xml index 440bcb812a..f8524afc88 100644 --- a/pom.xml +++ b/pom.xml @@ -273,6 +273,28 @@ 1.26.0 + + + + + 1.7 diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java index 2e8b946d9d..b7a3e78ab1 100644 --- a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/FeastClient.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package com.gojek.feast.v1alpha1; import feast.serving.ServingAPIProto.FeatureSetRequest; diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java index 082b1f3d2f..72fbe289f2 100644 --- a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/RequestUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package com.gojek.feast.v1alpha1; import feast.serving.ServingAPIProto.FeatureSetRequest; diff --git a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java index 78ea419f7c..77f9f29887 100644 --- a/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java +++ b/sdk/java/src/main/java/com/gojek/feast/v1alpha1/Row.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package com.gojek.feast.v1alpha1; import com.google.protobuf.ByteString; diff --git a/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java b/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java index 76503bd308..5f87ba0153 100644 --- a/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java +++ b/sdk/java/src/test/java/com/gojek/feast/v1alpha1/RequestUtilTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package com.gojek.feast.v1alpha1; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/serving/src/main/java/feast/serving/FeastProperties.java b/serving/src/main/java/feast/serving/FeastProperties.java index c856e7cb68..ebf5f408db 100644 --- a/serving/src/main/java/feast/serving/FeastProperties.java +++ b/serving/src/main/java/feast/serving/FeastProperties.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving; // Feast configuration properties that maps Feast configuration from default application.yml file to diff --git a/serving/src/main/java/feast/serving/ServingApplication.java b/serving/src/main/java/feast/serving/ServingApplication.java index 3357996128..ae9bb87a0b 100644 --- a/serving/src/main/java/feast/serving/ServingApplication.java +++ b/serving/src/main/java/feast/serving/ServingApplication.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.serving; import org.springframework.boot.SpringApplication; diff --git a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java b/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java index 01f3a6446b..a4f6d64d84 100644 --- a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java +++ b/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.configuration; import java.util.concurrent.ScheduledExecutorService; diff --git a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java b/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java index 368f6065c9..2cd284829c 100644 --- a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java +++ b/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.configuration; import feast.serving.FeastProperties; diff --git a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java index 288ffcd11e..6e02c3f383 100644 --- a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.configuration; import feast.core.StoreProto.Store; diff --git a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java b/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java index 4b78303435..539b25a0fc 100644 --- a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java +++ b/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.configuration; import java.util.List; diff --git a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java index 194b695b4c..089e28a7b0 100644 --- a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.configuration; import com.google.cloud.bigquery.BigQuery; diff --git a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java index d8ec606ba0..9626f79e5a 100644 --- a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.configuration; import feast.serving.FeastProperties; diff --git a/serving/src/main/java/feast/serving/controller/HealthServiceController.java b/serving/src/main/java/feast/serving/controller/HealthServiceController.java index b9ae450303..b3b5dc7de4 100644 --- a/serving/src/main/java/feast/serving/controller/HealthServiceController.java +++ b/serving/src/main/java/feast/serving/controller/HealthServiceController.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.controller; import feast.core.StoreProto.Store; diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java index c0b60131d6..dd00ac1674 100644 --- a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java +++ b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.controller; import feast.serving.FeastProperties; diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java index 75c0a96791..b0e349fd6b 100644 --- a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java +++ b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.controller; import static feast.serving.util.mappers.ResponseJSONMapper.mapGetOnlineFeaturesResponse; diff --git a/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java b/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java index 50e7999dac..fbcba969fa 100644 --- a/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java +++ b/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java @@ -1,5 +1,6 @@ /* - * Copyright 2018 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,9 +13,7 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * */ - package feast.serving.exception; /** Application-specific exception for any failure of retrieving feature/entity/storage spec. */ diff --git a/serving/src/main/java/feast/serving/service/BigQueryServingService.java b/serving/src/main/java/feast/serving/service/BigQueryServingService.java index 71caf45373..f846345215 100644 --- a/serving/src/main/java/feast/serving/service/BigQueryServingService.java +++ b/serving/src/main/java/feast/serving/service/BigQueryServingService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import static feast.serving.util.BigQueryUtil.getTimestampLimitQuery; diff --git a/serving/src/main/java/feast/serving/service/CachedSpecService.java b/serving/src/main/java/feast/serving/service/CachedSpecService.java index 65dbe3ab11..5daefdd8f8 100644 --- a/serving/src/main/java/feast/serving/service/CachedSpecService.java +++ b/serving/src/main/java/feast/serving/service/CachedSpecService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import static feast.serving.util.mappers.YamlToProtoMapper.yamlToStoreProto; diff --git a/serving/src/main/java/feast/serving/service/CoreSpecService.java b/serving/src/main/java/feast/serving/service/CoreSpecService.java index f9f37f48e7..f5b344db3b 100644 --- a/serving/src/main/java/feast/serving/service/CoreSpecService.java +++ b/serving/src/main/java/feast/serving/service/CoreSpecService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import feast.core.CoreServiceGrpc; diff --git a/serving/src/main/java/feast/serving/service/JobService.java b/serving/src/main/java/feast/serving/service/JobService.java index 2029afb153..96af5cd4d6 100644 --- a/serving/src/main/java/feast/serving/service/JobService.java +++ b/serving/src/main/java/feast/serving/service/JobService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import feast.serving.ServingAPIProto.Job; diff --git a/serving/src/main/java/feast/serving/service/NoopJobService.java b/serving/src/main/java/feast/serving/service/NoopJobService.java index 28601671aa..41fd561073 100644 --- a/serving/src/main/java/feast/serving/service/NoopJobService.java +++ b/serving/src/main/java/feast/serving/service/NoopJobService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import feast.serving.ServingAPIProto.Job; diff --git a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java index 79e52ecd9e..3db943182b 100644 --- a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java +++ b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import com.google.protobuf.util.JsonFormat; diff --git a/serving/src/main/java/feast/serving/service/RedisServingService.java b/serving/src/main/java/feast/serving/service/RedisServingService.java index b9368bb297..38a3ae63d9 100644 --- a/serving/src/main/java/feast/serving/service/RedisServingService.java +++ b/serving/src/main/java/feast/serving/service/RedisServingService.java @@ -1,5 +1,6 @@ /* - * Copyright 2019 The Feast Authors + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,7 +14,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package feast.serving.service; import static feast.serving.util.Metrics.missingKeyCount; diff --git a/serving/src/main/java/feast/serving/service/ServingService.java b/serving/src/main/java/feast/serving/service/ServingService.java index 3c8c075c15..83adcb73ba 100644 --- a/serving/src/main/java/feast/serving/service/ServingService.java +++ b/serving/src/main/java/feast/serving/service/ServingService.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; diff --git a/serving/src/main/java/feast/serving/util/BigQueryUtil.java b/serving/src/main/java/feast/serving/util/BigQueryUtil.java index 27b3105ac9..db0c90e449 100644 --- a/serving/src/main/java/feast/serving/util/BigQueryUtil.java +++ b/serving/src/main/java/feast/serving/util/BigQueryUtil.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.util; import com.google.protobuf.Duration; diff --git a/serving/src/main/java/feast/serving/util/Metrics.java b/serving/src/main/java/feast/serving/util/Metrics.java index f5102ea4c0..ffd6d1a0d6 100644 --- a/serving/src/main/java/feast/serving/util/Metrics.java +++ b/serving/src/main/java/feast/serving/util/Metrics.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.util; import io.prometheus.client.Counter; diff --git a/serving/src/main/java/feast/serving/util/RequestHelper.java b/serving/src/main/java/feast/serving/util/RequestHelper.java index a6945d2dba..4127b6afef 100644 --- a/serving/src/main/java/feast/serving/util/RequestHelper.java +++ b/serving/src/main/java/feast/serving/util/RequestHelper.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.util; import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; diff --git a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java index b83d66e91c..14723efe7b 100644 --- a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java +++ b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.util.mappers; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; diff --git a/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java b/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java index 9391cab293..00ad1fabb1 100644 --- a/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java +++ b/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.util.mappers; import com.fasterxml.jackson.databind.ObjectMapper; diff --git a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java index d464efa429..6dfc54ec2b 100644 --- a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java +++ b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.controller; import static org.mockito.MockitoAnnotations.initMocks; diff --git a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java index 31dc88fab7..5b295e9ee7 100644 --- a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java +++ b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java index 4575c49153..890699db6d 100644 --- a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java +++ b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.service; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java index 0849ec98c8..9437aa0333 100644 --- a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java +++ b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java @@ -1,3 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package feast.serving.util.mappers; import static org.hamcrest.core.IsEqual.equalTo;